content
stringlengths 10
4.9M
|
---|
/**
*
* @author Priyansh Rastogi
*/
public class AlbumDetails extends javax.swing.JFrame {
/**
* Creates new form AlbumDetails
*/
public static Album currentAlbum = new Album();
MediaPlayer mPlayer;
public AlbumDetails() {
initComponents();
albumName.setText(currentAlbum.getAlbumName());
artistName.setText(currentAlbum.getArtist());
List<Song> songs = null;
try {
DBDriver driver = new DBDriver();
songs = driver.getAllSongs(currentAlbum.getAlbumID());
}catch(Exception e) {
e.printStackTrace();
}
SongModel model = new SongModel(songs);
table.setModel(model);
TableColumnModel tcm = table.getColumnModel();
tcm.getColumn(2).setMaxWidth(100);
table.addMouseListener(new java.awt.event.MouseAdapter() {
@Override
public void mouseClicked(java.awt.event.MouseEvent evt) {
int row = table.rowAtPoint(evt.getPoint());
int column = table.columnAtPoint(evt.getPoint());
if(column==2) {
if (JOptionPane.showConfirmDialog(null, "Are you sure about that? Once you delete it, there is no going back.", "Warning", JOptionPane.YES_NO_OPTION) == JOptionPane.YES_OPTION) {
try {
DBDriver driver = new DBDriver();
driver.deleteSong(Integer.parseInt(table.getModel().getValueAt(row, 4).toString()));
if(mPlayer!=null) {
mPlayer.stop();
}
List<Song> songlist = driver.getAllSongs(currentAlbum.getAlbumID());
SongModel model2 = new SongModel(songlist);
table.setModel(model2);
TableColumnModel tcm = table.getColumnModel();
tcm.getColumn(2).setMaxWidth(100);
}catch(Exception e) {
e.printStackTrace();
}
}
}
else {
}
if(column!=2) {
new javafx.embed.swing.JFXPanel();
String uriString = new File(table.getModel().getValueAt(row, 1).toString()).toURI().toString();
Media media = new Media(uriString);
if(mPlayer!=null)
mPlayer.stop();
mPlayer = new MediaPlayer(media);
mPlayer.play();
}
}
});
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
albumName = new javax.swing.JLabel();
jScrollPane1 = new javax.swing.JScrollPane();
table = new javax.swing.JTable();
addSongs = new javax.swing.JButton();
artistName = new javax.swing.JLabel();
delete = new javax.swing.JButton();
Back = new javax.swing.JButton();
pause = new javax.swing.JButton();
stop = new javax.swing.JButton();
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
albumName.setFont(new java.awt.Font("Tahoma", 0, 18)); // NOI18N
albumName.setText("Album Name");
table.setModel(new javax.swing.table.DefaultTableModel(
new Object [][] {
},
new String [] {
}
));
jScrollPane1.setViewportView(table);
addSongs.setText("Add Songs");
addSongs.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
addSongsActionPerformed(evt);
}
});
artistName.setText("Artist Name");
delete.setText("Delete Album");
delete.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
deleteActionPerformed(evt);
}
});
Back.setText("Back");
Back.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
BackActionPerformed(evt);
}
});
pause.setText("Pause");
pause.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
pauseActionPerformed(evt);
}
});
stop.setText("Stop");
stop.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
stopActionPerformed(evt);
}
});
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addContainerGap()
.addComponent(Back)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 237, Short.MAX_VALUE)
.addComponent(albumName)
.addGap(200, 200, 200)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(delete, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(addSongs, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addGap(32, 32, 32))
.addComponent(jScrollPane1, javax.swing.GroupLayout.Alignment.TRAILING)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(328, 328, 328)
.addComponent(artistName))
.addGroup(layout.createSequentialGroup()
.addGap(304, 304, 304)
.addComponent(pause)
.addGap(41, 41, 41)
.addComponent(stop)))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(25, 25, 25)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(albumName)
.addComponent(addSongs)))
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addComponent(Back)))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(artistName)
.addGap(1, 1, 1)
.addComponent(delete)
.addGap(18, 18, 18)
.addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 300, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 44, Short.MAX_VALUE)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(pause)
.addComponent(stop))
.addContainerGap())
);
pack();
setLocationRelativeTo(null);
}// </editor-fold>//GEN-END:initComponents
private void addSongsActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_addSongsActionPerformed
AddSong.album = currentAlbum;
AddSong as = new AddSong();
if(mPlayer!=null)
mPlayer.stop();
this.dispose();
as.setVisible(true);
}//GEN-LAST:event_addSongsActionPerformed
private void deleteActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_deleteActionPerformed
if (JOptionPane.showConfirmDialog(null, "Are you sure about that? Once you delete it, there is no going back.", "Warning", JOptionPane.YES_NO_OPTION) == JOptionPane.YES_OPTION) {
try {
int albumID = currentAlbum.getAlbumID();
DBDriver driver = new DBDriver();
driver.deleteAlbum(albumID);
}catch(Exception e) {
e.printStackTrace();
}
Dashboard db = new Dashboard();
if(mPlayer!=null)
mPlayer.stop();
this.dispose();
db.setVisible(true);
}
else {
}
}//GEN-LAST:event_deleteActionPerformed
private void BackActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_BackActionPerformed
Dashboard db = new Dashboard();
if(mPlayer!=null)
mPlayer.stop();
this.dispose();
db.setVisible(true);
}//GEN-LAST:event_BackActionPerformed
private void pauseActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_pauseActionPerformed
if(mPlayer.getStatus().equals(Status.PLAYING))
mPlayer.pause();
else
mPlayer.play();
}//GEN-LAST:event_pauseActionPerformed
private void stopActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_stopActionPerformed
mPlayer.stop();
}//GEN-LAST:event_stopActionPerformed
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
/* Set the Nimbus look and feel */
//<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
/* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
* For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html
*/
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Windows".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException ex) {
java.util.logging.Logger.getLogger(AlbumDetails.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (InstantiationException ex) {
java.util.logging.Logger.getLogger(AlbumDetails.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
java.util.logging.Logger.getLogger(AlbumDetails.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(AlbumDetails.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
//</editor-fold>
/* Create and display the form */
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
new AlbumDetails().setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton Back;
private javax.swing.JButton addSongs;
private javax.swing.JLabel albumName;
private javax.swing.JLabel artistName;
private javax.swing.JButton delete;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JButton pause;
private javax.swing.JButton stop;
private javax.swing.JTable table;
// End of variables declaration//GEN-END:variables
} |
def _make_meta(self, img: MulImage, channel_num: int) -> bytes:
meta_name = bytes(f'/{channel_num}/meta\0o', 'utf-8')
meta_name += b'GwyContainer\0'
meta_data = b''.join([
bytes(f'ID\0s{img.img_id}\0', 'utf-8'),
bytes(f'Bias\0s{img.bias * 1e-3:.5f} V\0', 'utf-8'),
bytes(f'Current\0s{img.current:.2f} nA\0', 'utf-8'),
bytes(f'Current factor\0s{img.currfac}\0', 'utf-8'),
bytes(f'Scan Size\0s{img.xsize} nm, {img.ysize} nm\0', 'utf-8'),
bytes(f'Resolution\0s{img.xres}, {img.yres}\0', 'utf-8'),
bytes(f'Datetime\0s{img.datetime}\0', 'utf-8'),
bytes(f'Gain\0s{img.gain}\0', 'utf-8'),
bytes(f'Mode\0s{img.mode}\0', 'utf-8'),
bytes(f'Postprocessing\0s{img.postpr}\0', 'utf-8'),
bytes(f'Scan Duration\0s{img.speed:.2f} s\0', 'utf-8'),
bytes(f'Line Time\0s{img.line_time:.2f} ms\0', 'utf-8'),
bytes(f'Tilt\0s{img.tilt} deg\0', 'utf-8'),
bytes(f'X-Offset\0s{img.xoffset:.2f} nm\0', 'utf-8'),
bytes(f'Y-Offset\0s{img.yoffset:.2f} nm\0', 'utf-8'),
bytes(f'Z-Scale\0s{img.zscale}\0', 'utf-8'),
])
meta_data_size = pack('<i', len(meta_data))
return meta_name + meta_data_size + meta_data |
Num = int(input())
Num_Array = []
count = 0
result = 0
for i in range(Num):
Num_Array.append(int(input()))
Num_Array.sort()
while count < Num:
if Num_Array.count(Num_Array[count]) == 1:
result += 1
else:
Num_Array.pop(count)
count -= 1
Num -= 1
count += 1
print(result) |
<gh_stars>10-100
pub mod Opcode {
pub const LoadI: u8 = 0x1;
pub const LoadF: u8 = 0x2;
pub const LoadL: u8 = 0x3;
pub const LoadD: u8 = 0x4;
pub const LoadG: u8 = 0xa1;
pub const LoadAt: u8 = 0xa2;
pub const StoreAt: u8 = 0xa3;
pub const Ret: u8 = 0xa4;
pub const Ret0: u8 = 0xa5;
pub const Call: u8 = 0xa6;
pub const StoreG: u8 = 0xa7;
pub const Move: u8 = 0xa8;
pub const Label: u8 = 0xa9;
pub const Goto: u8 = 0xe1;
pub const GotoT: u8 = 0xe2;
pub const GotoF: u8 = 0xe3;
pub fn to_string<'a>(op: u8) -> &'a str {
match op {
LoadI => "LoadI",
Move => "Move",
_ => "",
}
}
}
pub mod Size {
pub const Float: u32 = ::std::mem::size_of::<f32>() as u32;
pub const Double: u32 = ::std::mem::size_of::<f64>() as u32;
pub const Int: u32 = ::std::mem::size_of::<i32>() as u32;
pub const Long: u32 = ::std::mem::size_of::<i64>() as u32;
pub const Bool: u32 = ::std::mem::size_of::<bool>() as u32;
} |
Paulus Le Son, a blogger detained in Vietnam since August 2011
Arrests of Dissident Bloggers Continue in Vietnam
As we have previously covered, the Vietnamese government continues to crack down on bloggers and writers who have spoken out against the Communist regime. Alternative news site, Vietnam Redemptorist News, has been targeted by the state and several of their active contributors have been arrested. Paulus Le Son, 26, is one of the most active bloggers who was arrested without a warrant.
Vietnam is increasingly applying vague national security laws to silence free speech and political opposition. He is one of 17 bloggers who have been arrested since August 2011. Charged with “subversion” and “activities aimed at overthrowing the people’s administration”, there is a campaign to release him and the others who have been detained
EFF stands with the Committee to Project Journalists, Reporters Without Borders, and Front Line in calling for the immediate release of all arrested bloggers.
Google Quietly Releases Country-by-Country Take Downs For Blogger
Most of the blogosphere’s attention has been focused on Twitter’s new censorship policies released last week, but Google has quietly unveiled its new policies for its blogging interface, Blogger. The changes reflect a compromise similar to Twitter's, allowing them to target their response to content removal requests by certain states. Over the coming weeks, Google will redirect users to a country-code top-level domain, or “ccTLD”, which corresponds to the user’s current location based upon their IP address. Google also provides users a way to get around these blocks by entering a formatted No Country Redirect or “NCR” URL.
These moves come after pressure from countries like India that are cracking down on social media sites for content deemed “inappropriate”. On Blogger’s FAQ they explain why it has come to this:
Migrating to localized domains will allow us to continue promoting free expression and responsible publishing while providing greater flexibility in complying with valid removal requests pursuant to local law. By utilizing ccTLDs, content removals can be managed on a per country basis, which will limit their impact to the smallest number of readers. Content removed due to a specific country’s law will only be removed from the relevant ccTLD.
As these companies enter new countries, they become subject to local laws. Given that they say they already respond to valid and applicable court orders that could effect global access to certain content, it is in some ways an improvement to limit censorship to the region in which it applies. Google’s policy changes are similar to Twitter’s, which we reacted to last week:
For now, the overall effect is less censorship rather than more censorship, since they used to take things down for all users. But people have voiced concerns that "if you build it, they will come,"--if you build a tool for state-by-state censorship, states will start to use it. We should remain vigilant against this outcome.
The lasting consequences of this new policy cannot be foreseen, in the meantime we will be keeping a close eye on Chilling Effects to track government requests to censor content on Blogger.
China Shuts Down Tibetan Blogs
The Chinese government shut down several independent Tibetan-language blogs on Wednesday. This occurred amid heightened tensions in the decades-long conflict between the minority group and the government. While some of the take-downs leave no explanation, there was one notice by the Chinese state on AmdoTibet, whose blog has been the only page of the site has been taken down. It reads:
Due to some of the blog users not publishing in accordance with the goal of this site, the blog has temporarily been shut down, we hope that blog users will have understanding!
We condemn the Chinese government’s heavy-handed censorship policies, and demand them to stop silencing the Tibetan voice in their country. |
#include<bits/stdc++.h>
using namespace std;
#define fastio ios_base::sync_with_stdio(0); cin.tie(0); cout.tie(0);
#define debug cout << "\there\t";
#define deci(n) cout << std::fixed << std::setprecision(n);
#define all(a) a.begin(), a.end()
#define sz(a) (int)a.size()
#define len(a) (int)a.length()
#define M 1000000007
#define pb push_back
#define fr first
#define sc second
typedef long long ll;
const int mod = 1e9 +7;
void solve(){
int n;
cin >> n;
for(int i=31; i>0; i--){
if(n%(1<<i))
continue;
int x = n/(1<<i);
int ro = sqrt(x);
if(ro*ro == x){
cout << "YES\n";
return;
}
}
cout << "NO\n";
return;
}
int32_t main(){
fastio;
int t; cin>>t;
//int i=1;
while(t--){
//cout <<"Case #" << i << ": ";
solve();
//i++;
}
return 0;
}
|
<reponame>togglebyte/netlib<gh_stars>1-10
use crossbeam::deque::{Steal, Stealer as CBStealer, Worker as CBWorker};
use crate::{Evented, Interest, Reaction, Reactor, Result, System};
// -----------------------------------------------------------------------------
// - Worker -
// -----------------------------------------------------------------------------
pub struct Worker<T> {
inner: CBWorker<T>,
stealers: Vec<Evented>,
// evented: Evented,
current_stealer_id: usize,
}
impl<T> Worker<T> {
pub fn new() -> Result<Self> {
let inner = CBWorker::new_fifo();
// let evented = Evented::new()?;
let inst = Self {
inner,
stealers: Vec::new(),
current_stealer_id: 0,
};
Ok(inst)
}
pub fn dequeue(&mut self) -> Result<Stealer<T>> {
self.current_stealer_id += 1;
let evented = Evented::new()?;
self.stealers.push(evented.clone());
let inst = Stealer::new(
self.inner.stealer(),
evented,
self.current_stealer_id,
);
Ok(inst)
}
pub fn send(&mut self, val: T) {
self.stealers.iter_mut().for_each(|s| { s.poke(); });
self.inner.push(val)
}
}
impl<T> Reactor for Worker<T> {
type Input = T;
type Output = ();
fn react(&mut self, reaction: Reaction<Self::Input>) -> Reaction<Self::Output> {
match reaction {
Reaction::Event(ev) => Reaction::Event(ev),
Reaction::Value(val) => Reaction::Value(self.send(val)),
Reaction::Continue => Reaction::Continue,
}
}
}
// -----------------------------------------------------------------------------
// - Stealer -
// -----------------------------------------------------------------------------
pub struct Stealer<T> {
inner: CBStealer<T>,
id: usize,
pub evented: Evented,
}
impl<T> Stealer<T> {
fn new(inner: CBStealer<T>, evented: Evented, id: usize) -> Self {
Self { inner, evented, id }
}
pub fn arm(&mut self) -> Result<()> {
self.evented.reactor_id = System::reserve();
System::arm(&self.evented.fd, Interest::Read, self.evented.reactor_id)
}
}
impl<T> Reactor for Stealer<T> {
type Input = ();
type Output = Result<T>;
fn react(&mut self, reaction: Reaction<Self::Input>) -> Reaction<Self::Output> {
match reaction {
Reaction::Event(ev) if ev.owner != self.evented.reactor_id => Reaction::Event(ev),
Reaction::Event(ev) => {
if let Err(e) = self.evented.consume_event() {
return Reaction::Value(Err(e));
}
loop {
match self.inner.steal() {
Steal::Success(val) => break Reaction::Value(Ok(val)),
Steal::Retry => continue,
Steal::Empty => break Reaction::Continue,
}
}
}
Reaction::Value(()) | Reaction::Continue => Reaction::Continue,
}
}
}
|
def _logout():
if 'SID' in session:
sid = session['SID']
if sid != "" and sid is not None:
flag, ret = core.Disconnect(session['SID'])
session.clear() |
def restore_state(self):
try:
self._cr50_reset.cleanup()
finally:
self.cleanup_rescue_thread() |
/**
* Start an event to find the fourth click object action.
*/
public static void clickObjectType4Event(final Player player) {
if (player.doingClickObjectType4Event) {
return;
}
player.doingClickObjectType4Event = true;
CycleEventHandler.getSingleton().addEvent(player, new CycleEvent() {
@Override
public void execute(CycleEventContainer container) {
/*
if (player.playerAssistant.withInDistance(player.getObjectX() + player.objectXOffset, player.getObjectY() + player.objectYOffset, player.getX(), player.getY(), player.objectDistance) && player.clickObjectType == 4 && !player.tempMoving)
{
FourthClickObject.fourthClickObject(player, player.getObjectId(), player.getObjectX(), player.getObjectY());
}
*/
if (player.clickObjectType != 4) {
container.stop();
}
}
@Override
public void stop() {
player.doingClickObjectType4Event = false;
}
}, 1);
} |
def construct_optimizer(self, parameters: Sequence[torch.Tensor]):
self.optimizer = self._opt_type(parameters, **self._opt_kwargs) |
Prevalence and predictors of underweight and stunting among children under 2 years of age in Eastern Kenya
Abstract Objective: To investigate key risk factors associated with undernutrition in the first few years of life. Design: A cross-sectional household survey was conducted in January 2018 collecting anthropometric data and other information on household, caregiver and child characteristics. Crude and adjusted odds ratios were calculated to assess the association of these characteristics with stunting and underweight outcomes. Setting: Kitui and Machakos counties in south-east Kenya. Participants: Caregivers and their children aged 0–23 months in 967 beneficiary households of the Government of Kenya’s cash for orphans and vulnerable children (CT-OVC) social protection scheme. Results: Twenty-three per cent of the 1004 children with anthropometric data were stunted, 10 % were underweight and 6 % experienced wasting. The strongest predictors of stunting and underweight were being in the second year of life and being born with a low birth weight. Residing in a poor household and having more than one child under 2 years of age in the household were also significant risk factors for being underweight. Although 43 % of children did not receive the minimal acceptable diet, this was not a significant factor associated with undernutrition. When age was removed as a covariate in children aged 12–23 months, being male resulted in a significantly higher risk of being stunted. Conclusions: While only 9 % of children were born with a low birth weight, these were four to five times more likely to be stunted and underweight, suggesting that preventive measures during pregnancy could have significant nutrition and health benefits for young children in this study area. |
<filename>src/data_conversion.py<gh_stars>1-10
#!/usr/local/bin/python3
import sys
import random
import argparse
from pathlib import Path
import numpy as np
from skimage import io
import SimpleITK as sitk
from cinemri.utils import get_patients
def convert_2d_image_file_to_pseudo_3d(input_file_path, spacing=[999, 1, 1], is_seg=False):
"""Reads an image (must be .npy or fromat recognized by skimage) and converts it into a series of niftis.
The input image should be grayscalse.
Parameters
----------
input_file_path : Path
A path to image to convert
spacing : list, default=[999, 1, 1]
is_seg : bool, default=False
Indicates if the specified image is a segmentation mask
Returns
-------
SimpleITK Image
An image converted to pseudo 2d format suitable for nnU-Net
"""
img = np.load(input_file_path) if input_file_path.suffix == ".npy" else io.imread(input_file_path)
return convert_2d_image_to_pseudo_3d(img, spacing, is_seg)
def convert_2d_image_to_pseudo_3d(image, spacing=[999, 1, 1], is_seg=False):
"""
Taken from https://github.com/MIC-DKFZ/nnUNet/blob/master/nnunet/utilities/file_conversions.py and slightly modified
Converts an image into a series of niftis.
The image should be grayscalse
!!!2D images are often natural images which do not have a voxel spacing that could be used for resampling. These images
must be resampled by you prior to converting them to nifti!!!
Datasets converted with this utility can only be used with the 2d U-Net configuration of nnU-Net
Segmentations will be converted to np.uint32!
Parameters
----------
image : ndarray
An image to convert
spacing : list, default=[999, 1, 1]
is_seg : bool, default=False
Indicates if the specified image is a segmentation mask
Returns
-------
SimpleITK Image
An image converted to pseudo 2d format suitable for nnU-Net
"""
assert len(image.shape) == 2, 'images should be grayscalse'
image = image[None] # add dimension
if is_seg:
image = image.astype(np.uint32)
itk_image = sitk.GetImageFromArray(image)
itk_image.SetSpacing(spacing[::-1])
return itk_image
def subset_to_diag_nnunet(patients,
segmentation_path,
target_path,
images_folder="images",
masks_folder="masks",
is_train=True):
"""Saves images an masks for training or test subset of patients
Parameters
----------
patients : list of Patients
A list of patients in the subset
segmentation_path : Path
A path to the segmentation dataset
target_path : Path
A path to save the subset
images_folder : str, default="images"
An images folder name
masks_folder : str, default="masks"
A masks folder name
is_train : bool, default=True
A boolean flag indicating if it is a training subset
"""
# Create folders of the subset
target_path.mkdir(exist_ok=True)
train_path_images = target_path / images_folder
train_path_images.mkdir(exist_ok=True)
train_path_masks = target_path / masks_folder
train_path_masks.mkdir(exist_ok=True)
# Extract and save files related to the specified patients list
for patient in patients:
for slice in patient.cinemri_slices:
extension = ".mha" if is_train else ".nii.gz"
image_id = slice.full_id if is_train else (slice.full_id + "_0000")
image_stem = train_path_images / image_id
slice_image_path = slice.build_path(segmentation_path / images_folder, extension=".npy")
img_pseudo_3d = convert_2d_image_file_to_pseudo_3d(slice_image_path)
sitk.WriteImage(img_pseudo_3d, str(image_stem) + extension)
mask_stem = train_path_masks / slice.full_id
slice_mask_path = slice.build_path(segmentation_path / masks_folder, extension=".npy")
mask_pseudo_3d = convert_2d_image_file_to_pseudo_3d(slice_mask_path, is_seg=True)
sitk.WriteImage(mask_pseudo_3d, str(mask_stem) + extension)
def convert_to_diag_nnunet(segmentation_path,
target_path,
train_folder="train",
images_folder="images",
masks_folder="masks"):
"""Converts the segmentation data subset to a diag nnU-Net input format
This format is expected by prepare method of a diag nnU-Net that
converts it to the nnU-Net input format
Parameters
----------
segmentation_path : Path
A path to a segmentation subset of cine-MRI data
target_path : Path
A destination path to save converted files
images_folder : str, default="images"
A name of a folder that contains scans inside the archive
masks_folder : str, default="masks"
A name of a folder that contains masks inside the archive
train_folder : str, default="train"
A name of a folder with training data
"""
# Make directories to save converted images
target_path.mkdir(exist_ok=True)
patients = get_patients(segmentation_path / images_folder, slice_extension=".npy")
# Convert training data
subset_to_diag_nnunet(patients,
segmentation_path,
target_path / train_folder,
images_folder,
masks_folder)
def to_diag_nnunet(argv):
"""A command line wrapper of convert_to_diag_nnunet
Parameters
----------
argv : list of str
"""
parser = argparse.ArgumentParser()
parser.add_argument('segmentation_path', type=str, help="path to a segmentation subset of cine-MRI data")
parser.add_argument('target_path', type=str, help="a destination path to save converted files")
parser.add_argument('--images', type=str, default="images", help="a folder inside the archive, which contains scans")
parser.add_argument('--masks', type=str, default="masks", help="a folder inside the archive, which contains masks")
parser.add_argument('--train', type=str, default="train", help="a name of a folder with training data")
args = parser.parse_args(argv)
segmentation_path = Path(args.segmentation_path)
target_path = Path(args.target_path)
images_folder = args.images
masks_folder = args.masks
train_folder = args.train
convert_to_diag_nnunet(segmentation_path,
target_path,
train_folder,
images_folder,
masks_folder)
if __name__ == '__main__':
np.random.seed(99)
random.seed(99)
# Very first argument determines action
actions = {
"to_diag_nnunet": to_diag_nnunet
}
try:
action = actions[sys.argv[1]]
except (IndexError, KeyError):
print('Usage: data_conversion ' + '/'.join(actions.keys()) + ' ...')
else:
action(sys.argv[2:])
|
Coronary artery bypass grafting in acute myocardial infarction: analysis of predictors of in-hospital mortality.
OBJECTIVE
Coronary artery bypass grafting (CABG) during the acute phase of infarction (AMI) is associated with increased operative risk. The aim of this study was to determine predictors of in-hospital mortality in patients undergoing CABG in AMI.
METHODS
During three years, all patients undergoing CABG in AMI were retrospectively analyzed of the institutional database. Sixty variables per patient were evaluated: 49 preoperative variables from the 2000 Bernstein-Parsonnet and EuroSCORE models, 4 preoperative variables not considered in these models (time between AMI and CABG, maximum CKMB, Troponin maximum and ST-segment elevation) and 7 intraoperative variables . Univariate and multivariate analysis for the outcome of in hospital mortality were performed.
RESULTS
The mean time between AMI and CABG was 3.8 ± 3 days. The overall mortality was 19%. In the multivariate analysis: age > 65 years OR ; CPB > 108 minutes , creatinine> 2 mg/dl and systolic pulmonary pressure > 60 mmHg were predictors of in-hospital mortality.
CONCLUSION
Conventional preoperative variables such as age > 65 years, creatinine > 2 mg/dl and systolic pulmonary pressure > 60 mmHg were predictive of in-hospital mortality in patients underwent CABG in AMI. |
#ifndef __ICM20948_H__
#define __ICM20948_H__
#include "i2cw.h"
// ICM20948MCM (multi-chip-module)
// ICM20948Module/Die
// Interface I2C
// Interface SPI
// Accelerometer
// Gyroscope
// Digital Motion Processor/DMP
// Temperature Sensor
// FIFO
// FSYNC
// Interrupts
// AK09916Module/Die
// Interface I2c
// Magnetometer
// https://github.com/kriswiner/MPU9250/issues/86 - reading magnetometer
class AK09916 : public I2CDevice {
public:
enum MagDataRate { MDR_10HZ=0x02, MDR_20HZ=0x04, MDR_50HZ=0x06, MDR_100HZ=0x08 };
AK09916(I2CMaster &i2c);
const float sensitivity = 0.15f;
static const uint8_t I2C_ADDR = 0x0c;
static const uint8_t CHIP_ID = 0x09;
static const uint8_t WIA1 = 0x00;
static const uint8_t WIA2 = 0x01;
static const uint8_t ST1 = 0x10;
static const uint8_t HXL = 0x11;
static const uint8_t ST2 = 0x18;
static const uint8_t CNTL2 = 0x31;
static const uint8_t CNTL3 = 0x32;
};
class ICM20948 : public I2CDevice {
public:
enum GyroRange { FSR_250DPS, FSR_500DPS, FSR_1000DPS, FSR_2000DPS };
enum AccelRange { FSR_2G, FSR_4G, FSR_8G, FSR_16G };
enum GyroDataRate{ GDR_1250HZ=0, GDR_550HZ=1, GDR_275HZ=3, GDR_220HZ=4, GDR_110HZ=9, GDR_100HZ=10, GDR_50HZ=50, GDR_44HZ=24, GDR_20HZ=54, GDR_10HZ=109, GDR_5HZ=219 };
enum AccelDataRate { ADR_1250HZ=0, ADR_625HZ=1, ADR_250HZ=4, ADR_125HZ=9, ADR_50HZ=24, ADR_25HZ=49, ADR_10HZ=124, ADR_5HZ=249, ADR_1HZ=1249 };
enum AUXI2C { MASTER_MODE, PASSTHROUGH_MODE };
public:
ICM20948(I2CMaster &i2c);
void setBank(uint8_t value);
void configI2CAuxilary(AUXI2C mode);
void triggerMagIO();
void readSensors(float *data);
float readTemperature();
void setGyroRange(GyroRange range=FSR_250DPS);
void setGyroLowpass(bool enabled=true, uint8_t mode=5);
void setGyroDataRate(GyroDataRate rate=GDR_1250HZ);
void setAccelRange(AccelRange range=FSR_2G);
void setAccelLowpass(bool enabled=true, uint8_t mode=5);
void setAccelDataRate(AccelDataRate rate=ADR_1250HZ);
void writeMagRegister(uint8_t register_address, uint8_t data);
void readMagRegister(uint8_t register_address, uint8_t* value);
void readMagRegisters(uint8_t register_address, uint8_t* data, uint length);
public:
const float accel_sensitvity[4] = {16384.f, 8192.f, 4096.f, 2048.f};
const float gyro_sensitvity[4] = {131.f, 65.5f, 32.8f, 16.4f};
const float mag_sensitivity = 0.15;
static const uint8_t CHIP_ID = 0xEA;
static const uint8_t I2C_ADDR = 0x68;
static const uint8_t I2C_ADDR_ALT = 0x69;
static const uint8_t BANKSEL = 0x7f;
static const uint8_t I2C_MST_ODR_CONFIG = 0x00;
static const uint8_t I2C_MST_CTRL = 0x01;
static const uint8_t I2C_MST_DELAY_CTRL = 0x02;
static const uint8_t I2C_SLV0_ADDR = 0x03;
static const uint8_t I2C_SLV0_REG = 0x04;
static const uint8_t I2C_SLV0_CTRL = 0x05;
static const uint8_t I2C_SLV0_DO = 0x06;
static const uint8_t EXT_SLV_SENS_DATA_00 = 0x3B;
static const uint8_t GYRO_SMPLRT_DIV = 0x00;
static const uint8_t GYRO_CONFIG_1 = 0x01;
static const uint8_t GYRO_CONFIG_2 = 0x02;
// setBank 0
static const uint8_t WHO_AM_I = 0x00;
static const uint8_t USER_CTRL = 0x03;
static const uint8_t PWR_MGMT_1 = 0x06;
static const uint8_t PWR_MGMT_2 = 0x07;
static const uint8_t INT_PIN_CFG = 0x0F;
// setBank 2
static const uint8_t ACCEL_SMPLRT_DIV_1 = 0x10;
static const uint8_t ACCEL_SMPLRT_DIV_2 = 0x11;
static const uint8_t ACCEL_INTEL_CTRL = 0x12;
static const uint8_t ACCEL_WOM_THR = 0x13;
static const uint8_t ACCEL_CONFIG = 0x14;
static const uint8_t ACCEL_XOUT_H = 0x2D;
static const uint8_t GRYO_XOUT_H = 0x33;
static const uint8_t TEMP_OUT_H = 0x39;
static const uint8_t TEMP_OUT_L = 0x3A;
};
#endif // __ICM20948_H__ |
Ascending from the Ashes: Images of Plato in Bradbury’s Fahrenheit 451
The pleasure of burning books consumed Montag, consumed him until the day the books burned back, their possibilities enflaming his curiosity to the point of existential immolation. Yet from these ashes, he rises. Fahrenheit 451 is a novel of ascent, an ascent to freedom that can be found only in knowledge. Superficially, the relationship between freedom and knowledge seems antagonistic; however, examining Bradbury’s novel in Plato’s light—particularly focusing on the images of the Cave and Line—can provide piercing insights into the inherent harmony uniting knowledge and freedom, rendering this antagonism illusory. |
<reponame>pletzer/nemoflux<filename>nemoflux/datagen.py
import netCDF4
import numpy
from numpy import pi, cos, sin, arctan2, arctan
import math
import defopt
import geo
# precision with which data will be saved in the netCDF file
REAL = 'float64'
class DataGen(object):
def __init__(self, prefix=''):
self.prefix = prefix
def setBoundingBox(self, xmin, xmax, ymin, ymax, zmin, zmax):
self.xmin = xmin
self.xmax = xmax
self.ymin = ymin
self.ymax = ymax
self.zmin = zmin
self.zmax = zmax
def setSizes(self, nx, ny, nz, nt):
# number of cells in x, y, z, time
self.nx = nx
self.ny = ny
self.nz = nz
self.nt = nt
def build(self):
self.buildVertical()
self.buildUniformHorizontal()
def buildVertical(self):
dz = (self.zmax - self.zmin)/float(self.nz)
print(f'zmin/zmax = {self.zmin}/{self.zmax}')
self.zhalf = numpy.array([self.zmin + (k + 0.5)*dz for k in range(self.nz)])
self.ztop = numpy.array([self.zmin + (k + 1 )*dz for k in range(self.nz)])
self.zbot = numpy.array([self.zmin + (k + 2 )*dz for k in range(self.nz)])
def buildUniformHorizontal(self):
ny1, nx1 = self.ny + 1, self.nx + 1
dy, dx = (self.ymax - self.ymin)/float(self.ny), (self.xmax - self.xmin)/float(self.nx)
# xx and yy are the lon and lats
x = numpy.array([self.xmin + i*dx for i in range(nx1)])
y = numpy.array([self.ymin + j*dx for j in range(ny1)])
self.xx, self.yy = numpy.meshgrid(x, y, indexing='xy')
# cell bounds
self.bounds_lon = numpy.zeros((self.ny, self.nx, 4), numpy.float64)
self.bounds_lat = numpy.zeros((self.ny, self.nx, 4), numpy.float64)
self.bounds_lon[..., 0] = self.xx[:-1, :-1]
self.bounds_lat[..., 0] = self.yy[:-1, :-1]
self.bounds_lon[..., 1] = self.xx[:-1, 1:]
self.bounds_lat[..., 1] = self.yy[:-1, 1:]
self.bounds_lon[..., 2] = self.xx[1:, 1:]
self.bounds_lat[..., 2] = self.yy[1:, 1:]
self.bounds_lon[..., 3] = self.xx[1:, :-1]
self.bounds_lat[..., 3] = self.yy[1:, :-1]
def applyStreamFunction(self, streamFunction):
zmin, zmax = self.zmin, self.zmax
self.potential = numpy.zeros((self.nt, self.nz, self.ny, self.nx, 4), numpy.float64)
A = geo.EARTH_RADIUS
nt = self.nt
for t in range(self.nt):
for k in range(self.nz):
z = self.zhalf[k]
x, y = self.xx, self.yy
pot = eval(streamFunction)
self.potential[t, k, ..., 0] = pot[:-1, :-1]
self.potential[t, k, ..., 1] = pot[:-1, 1:]
self.potential[t, k, ..., 2] = pot[1:, 1:]
self.potential[t, k, ..., 3] = pot[1:, :-1]
def computeUVFromPotential(self):
self.u = numpy.zeros((self.nt, self.nz, self.ny, self.nx), numpy.float64)
self.v = numpy.zeros((self.nt, self.nz, self.ny, self.nx), numpy.float64)
pp1 = numpy.zeros((self.ny, self.nx, 3), numpy.float64)
pp2 = numpy.zeros((self.ny, self.nx, 3), numpy.float64)
pp3 = numpy.zeros((self.ny, self.nx, 3), numpy.float64)
pp1[..., 0] = self.xx[:-1, 1:]
pp1[..., 1] = self.yy[:-1, 1:]
pp2[..., 0] = self.xx[1:, 1:]
pp2[..., 1] = self.yy[1:, 1:]
pp3[..., 0] = self.xx[1:, :-1]
pp3[..., 1] = self.yy[1:, :-1]
xyz1 = geo.lonLat2XYZArray(pp1, radius=geo.EARTH_RADIUS)
xyz2 = geo.lonLat2XYZArray(pp2, radius=geo.EARTH_RADIUS)
xyz3 = geo.lonLat2XYZArray(pp3, radius=geo.EARTH_RADIUS)
ds21 = geo.getArcLengthArray(xyz2, xyz1, radius=geo.EARTH_RADIUS)
ds23 = geo.getArcLengthArray(xyz2, xyz3, radius=geo.EARTH_RADIUS)
# cannot be zero (which is the case at the north pole)
numpy.clip(ds23, a_min=1.e-12, a_max=None, out=ds23)
for t in range(self.nt):
for k in range(self.nz):
dPhi21 = self.potential[t, k, ..., 2] - self.potential[t, k, ..., 1]
dPhi23 = self.potential[t, k, ..., 2] - self.potential[t, k, ..., 3]
# east, d phi/ d eta, divide by ds to get vector component
self.u[t, k, ...] = dPhi21 / ds21
# north, d phi/ d xi, divide by ds to get vector component. Note that
# the surface element ds x dz points down, hence negative sign
self.v[t, k, ...] = -dPhi23 / ds23
def rotatePole(self, deltaDeg=(0., 0.)):
lats = numpy.zeros((self.ny, self.nx,), numpy.float64)
lons = numpy.zeros((self.ny, self.nx,), numpy.float64)
alpha = numpy.pi * deltaDeg[1] / 180.
beta = numpy.pi * deltaDeg[0] / 180.
cos_alp = numpy.cos(alpha)
sin_alp = numpy.sin(alpha)
cos_bet = numpy.cos(beta)
sin_bet = numpy.sin(beta)
# http://gis.stackexchange.com/questions/10808/lon-lat-transformation
rot_alp = numpy.array([[ cos_alp, 0., sin_alp],
[ 0., 1., 0. ],
[-sin_alp, 0., cos_alp]])
rot_bet = numpy.array([[ cos_bet, sin_bet, 0.],
[-sin_bet, cos_bet, 0.],
[ 0. , 0., 1.]])
transfMatrix = numpy.dot(rot_bet, rot_alp)
# original position
xyzOld = numpy.zeros((3,), numpy.float64)
# transformed position
xyzNew = numpy.zeros((3,), numpy.float64)
for j in range(self.ny):
for i in range(self.nx):
for vertex in range(4):
the = numpy.pi * self.bounds_lat[j, i, vertex] / 180.
lam = numpy.pi * self.bounds_lon[j, i, vertex] / 180.
cos_the = numpy.cos(the)
sin_the = numpy.sin(the)
rho = cos_the
cos_lam = numpy.cos(lam)
sin_lam = numpy.sin(lam)
xyzOld[:] = rho * cos_lam, rho * sin_lam, sin_the
xyzNew[:] = numpy.dot(transfMatrix, xyzOld)
self.bounds_lat[j, i, vertex] = 180. * math.asin(xyzNew[2]) / numpy.pi
self.bounds_lon[j, i, vertex] = 180. * math.atan2(xyzNew[1], xyzNew[0]) / numpy.pi
# use the convention 0 <= lon < 360
# self.bounds_lon[j, i, vertex] %= 360.
# date line fix
dLon = self.bounds_lon[j, i, vertex] - self.bounds_lon[j, i, 0]
if dLon > +270.:
self.bounds_lon[j, i, vertex] -= 360.
if dLon < -270.:
self.bounds_lon[j, i, vertex] += 360.
def save(self):
ncT = netCDF4.Dataset(self.prefix + 'T.nc', 'w')
ncT.createDimension('z', self.nz)
ncT.createDimension('y', self.ny)
ncT.createDimension('x', self.nx)
ncT.createDimension('nvertex', 4)
ncT.createDimension('axis_nbounds', 2)
deptht_bounds = ncT.createVariable('deptht_bounds', REAL, ('z', 'axis_nbounds'))
deptht_bounds[:, 0] = self.ztop
deptht_bounds[:, 1] = self.zbot
bounds_lat = ncT.createVariable('bounds_lat', REAL, ('y', 'x', 'nvertex'))
bounds_lat[:] = self.bounds_lat
bounds_lon = ncT.createVariable('bounds_lon', REAL, ('y', 'x', 'nvertex'))
bounds_lon[:] = self.bounds_lon
ncT.close()
ncU = netCDF4.Dataset(self.prefix + 'U.nc', 'w')
ncU.createDimension('t', self.nt)
ncU.createDimension('z', self.nz)
ncU.createDimension('y', self.ny)
ncU.createDimension('x', self.nx)
ncU.createDimension('axis_nbounds', 2)
uo = ncU.createVariable('uo', REAL, ('t', 'z', 'y', 'x'), fill_value=1.e20)
uo.standard_name = 'sea_water_x_velocity'
uo.units = 'm/s'
uo[:] = self.u
ncU.earthRadius = f'earth radius = {geo.EARTH_RADIUS} in metres'
ncU.close()
ncV = netCDF4.Dataset(self.prefix + 'V.nc', 'w')
ncV.createDimension('t', self.nt)
ncV.createDimension('z', self.nz)
ncV.createDimension('y', self.ny)
ncV.createDimension('x', self.nx)
ncV.createDimension('axis_nbounds', 2)
vo = ncV.createVariable('vo', REAL, ('t', 'z', 'y', 'x'), fill_value=1.e20)
vo.standard_name = 'sea_water_y_velocity'
vo.units = 'm/s'
vo[:] = self.v
ncV.close()
def main(*, streamFunction: str="(cos(t*2*pi/nt)+2)*(0.5*(y/180)**2 + sin(2*pi*x/360))", prefix: str='',
xmin: float=-180., xmax: float=180.,
ymin: float=-90., ymax: float=90.,
zmin: float=0., zmax: float=1.0,
nx: int=36, ny: int=18, nz: int=1, nt: int=1,
deltaDeg: str="(0.,0.)"):
"""Generate data
:param potentialFunction: potential expression of x (logical lon), y (logical lat), z (depth) and t (time index)
:param prefix: file prefix, data will be saved as <prefix>T.nc, <prefix>U.nc and <prefix>V.nc
:param xmin: min longitude
:param xmax: max longitude
:param ymin: min latitude
:param ymax: max latitude
:param zmin: min depth
:param zmax: max depth
:param nx: number of cells in longitude
:param ny: number of cells in latitude
:param nz: number of vertical cells
:param nt: number of time steps
:param deltaDeg: longitude, latitude pole displacement
"""
lldg = DataGen(prefix)
lldg.setSizes(nx, ny, nz, nt)
lldg.setBoundingBox(xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax, zmin=zmin, zmax=zmax)
lldg.build()
deltaDeg = eval(deltaDeg)
if deltaDeg[0] != 0 or deltaDeg[1] != 0:
# the rotatePole function does not work at lon = 0 if there is no displacement
lldg.rotatePole(deltaDeg=deltaDeg)
lldg.applyStreamFunction(streamFunction)
lldg.computeUVFromPotential()
lldg.save()
if __name__ == '__main__':
defopt.run(main)
|
/**
* @file DecodeBase64Stream.cpp
*
* @brief File for class DecodeBase64Stream
* @date 05.08.2016
* @author duke
*/
#include "DecodeBase64Stream.h"
#include "DecodeBase64StreamBuf.h"
DecodeBase64Stream::DecodeBase64Stream(ostream &a_out) :
Base64Stream(new DecodeBase64StreamBuf(a_out))
{
exceptions(ios::badbit);
}
DecodeBase64Stream::~DecodeBase64Stream()
{
delete rdbuf();
}
|
// Transition from search to non-search and make sure instant isn't displayable.
// See bug http://crbug.com/100368 for details.
IN_PROC_BROWSER_TEST_F(InstantTest, MAYBE(SearchToNonSearch)) {
ASSERT_TRUE(test_server()->Start());
EnableInstant();
SetupInstantProvider("instant.html");
content::WindowedNotificationObserver instant_support_observer(
chrome::NOTIFICATION_INSTANT_SUPPORT_DETERMINED,
content::NotificationService::AllSources());
omnibox()->SetUserText(ASCIIToUTF16("def"));
GURL url(test_server()->GetURL("files/empty.html"));
omnibox()->SetUserText(UTF8ToUTF16(url.spec()));
instant_support_observer.Wait();
EXPECT_TRUE(preview());
EXPECT_FALSE(loader()->ready());
EXPECT_FALSE(instant()->is_displayable());
EXPECT_FALSE(instant()->IsCurrent());
ASSERT_TRUE(content::ExecuteJavaScript(
preview()->web_contents()->GetRenderViewHost(), std::wstring(),
L"window.chrome.searchBox.onchange();"));
ASSERT_TRUE(WaitForMessageToBeProcessedByRenderer());
EXPECT_TRUE(preview());
EXPECT_TRUE(loader()->ready());
EXPECT_FALSE(instant()->is_displayable());
EXPECT_FALSE(instant()->IsCurrent());
} |
Q: Which dinosaur is the largest?
A: The biggest dinosaur is probably ultrasauros. We only have a few bones of this late Jurassic (140 million years ago) plant-eater from Colorado, but the bones show an animal who was six-stories high and may have weighed more than 50 tons. A four-legged plant-eating dinosaur found recently in Argentina, argentinasaurus, may have been even heavier than ultrasauros. If it was a brachiosaur like ultrauros then it probably was the biggest, but if it was a titanosaur, another kind of big plant-eater common in South America, it wouldn't have been so bulky.
Q: Which dinosaurs were bigger — plant-eaters or meat-eaters?
A: Plant-eaters by far. T. rex and giganotosaurus, the biggest meat-eaters, were 7 or 8 tons and 45 feet long. The biggest plant-eaters were100 tons and 110 feet long!
Q: What was the smallest dinosaur?
A: The smallest dinosaur that we know of was a meat-eater 200 million years old from Nova Scotia which was the size of a little robin. But we know it only from footprints, and maybe that was only a baby. As for full-grown fossil dinosaurs, the smallest are the little bird-hipped plant-eaters like lesothosaurus, which were only the size of big chickens. Compsognathus, a meat-eater the size of a turkey, is often called the smallest, but that was information based on a skeleton that belonged to a young, not yet full -grown animal.
Q: Which dinosaur was the strongest?
A: The strongest was probably the biggest, ultrasauros, who was six-stories high. Or, among meat-eaters, T. rex.
Q: In the book The Biggest Dinosaurs by Michael Berenstain, it says that the seismosaurus, found in Mexico, might be larger than the ultrasaurus. Do you know if it is?
A: Ultrasauros was the biggest we know, but we only know it from a few bones. It appears to be a brachiosaur, which was giraffe-like and heavily built. Seismosaurus was indeed longer, the longest we know, and we know it from a whole back end. It was a diplodocus relative, a longer-bodied animal with a whip tail but not so heavily built. It comes from New Mexico.
Q: How heavy and how tall was gigantosaurus?
A: I figure you are asking about giganotosaurus, the newly named biggest of all meat-eaters. The name means giant of the south. From most of the skeleton it is estimated to be about 46 feet long and 8 tons in weight. We don't measure dinosaurs by height since they didn't stand tall like people. Meat-eaters leaned forward with their tails held high, so they were nearly horizontal over their hips. There is also a gigantosaurus. It is a big plant-eater with four legs, like brachiosaurus. It is known from just a few bones from England found more than a century ago.
Q: What is the largest dinosaur bone ever found?
A: The biggest single dinosaur bone, by weight, is one of the backbones of argentinosaurus. One backbone was five feet by five feet and more than one ton in fossil weight. Of course it was lighter when the animal was alive before minerals entered into it to preserve it. The largest bone group ever found was the hip area of the supersaurus, another four-legged plant-eater. Brigham Young University researchers dug it up in Colorado about five years ago. It is about six feet wide and up to eight feet tall. The biggest skull ever found was the eight foot long skull of torosaurus, a horned dinosaur which ate plants near the end of dinosaur time in the American West. It was discovered by the Milwaukee Public Museum crews about eight years ago.
Q: What is the smartest dinosaur? What dinosaur had the largest brain? The smallest brain?
A: The smartest dinosaur, if you compare body size to brain size (a rough measurement of intelligence) is troodon. Troodon was a meat-eater the size of a man, with a brain as big as an avocado pit. It was not only the smartest dinosaur, but the smartest animal of dinosaur times, including our ancestors — the mammals of the Mesozoic Era. The biggest brained dinosaur of all was probably T. rex, since it was such a huge animal. Its brain was about as big as ours but it was many times bigger than us. Stegosaurus was a tiny-brained dinosaur compared to its size. Its brain wasn't much bigger than a ping pong ball and its body was the size of a truck.
Q: How old is the oldest dinosaur?
A: The oldest dinosaurs known are 228 million years old, such as eoraptor, a dog sized meat-eater found in Argentina. Eoraptor means "dawn thief."
Q: What was the most ferocious dinosaur?
A: The scariest dinosaur was probably T. rex, since it was the most powerful meat-eater. Or perhaps Utahraptor, a giant raptor 20 feet long, with a huge killer toe and hand claws and sharp teeth.
Q: What was the speed of the fastest dinosaur?
A: The fastest dinosaurs were probably the ostrich mimic ornithomimids, toothless meat-eaters with long limbs like ostriches. They ran at least 25 miles per hour from our estimates based on footprints in mud. But that's just a guess and you don't run your fastest in mud.
Q: What were the smallest and biggest eggs ever found?
A: The biggest dinosaur eggs we know are shaped like giant footballs and are about 19 inches long. They belong to a meat-eater from Asia called segnosaurus. The smallest dinosaur eggs are just a few inches across and more tennis ball-shaped, and we don't know what dinosaur made them.
Q: Which dinosaur has the longest neck?
A: The longest necked dinosaur was the 90-foot mamenchisaurus, a four-legged plant-eater from China. Its neck alone was 32 feet long — nearly as long as a school bus.
Q: Which dinosaur was nicknamed "long neck"? And which one really had the longest neck?
A: "Long neck" is a term used in the movie The Land Before Time to refer to what they loosely drew as apatosaurus, also known as brontosaurus, by the looks of it. The longest necked dinosaurs were the giant plant-eaters, the sauropods which included apatosaurus and particularly mamenchisaurus. That dinosaur, from China, had the longest neck of any animal ever, more than 33 feet long on a body about 85 feet long. This giant's neck was held up by 19 neck vertebrae and neck ribs the size of poster tubes.
Q: Which dinosaur had the biggest head?
A: The biggest-headed dinosaur was torosaurus, "bull lizard," a horned dinosaur related to triceratops which lived in the American West 65 million years ago. It had a skull eight feet long, longer than any animal on land ever.
Q: Which Mesozoic period had the smallest dinosaur?
A: The smallest dinosaurs were probably from the late Triassic and early Jurassic. That's where we find the smallest ornithischian dinosaurs so far. Dinosaurs got biggest in the late Jurassic and Cretaceous.
Q: Did the ankylosaurus have the hardest shell?
A: Ankylosaurs sure had hard shells, even on their eyelids! Hard to say which was hardest of any dinosaur though. Same for sharpest teeth.
Q: What is the ugliest dinosaur?
A: I don't know about the ugliest dinosaur — they probably all were good looking to their own kind. I think the eankylosaurs were really ugly — squat and covered with armor.
Q: How do we know that the Tyrannosaurus rex was the meanest dinosaur?
A: T. rex may not have been the meanest, he might just have been a garbage eater, a scavenger. We don't know. But we do know he had the strongest jaws and the biggest teeth of any meat-eater and that he was the biggest, so we guess that he might have been the meanest. One scientist says Utahraptor was meaner, even though it was just 20 feet long. But it had huge claws on its toes and fingers, weapons T. rex didn't have. |
<reponame>omarcherni007/holbertonschool-higher_level_programming<filename>0x07-python-test_driven_development/5-text_indentation.py<gh_stars>1-10
#!/usr/bin/python3
"""
This module prints a text with a 2 new lines after each of
these characters: `.`, `?`, `:`
Example:
Lorem ipsum dolor sit amet, consectetur adipiscing elit.$
$
Quonam modo?$
$
Utrum igitur tibi litteram videor an totas paginas commovere?$
$
Non autem hoc:$
$
* text must be a string
* There should be no space at the beginning or
at the end of each printed line
"""
def text_indentation(text):
"""
Prints a text with indentation
Args:
text (str): The text to prints.
Raises:
TypeError: If `text` isn't string.
"""
if type(text) is not str:
raise TypeError('text must be a string')
text_length = len(text)
idx = 0
new_string = ''
starting = True
while idx < text_length:
if text[idx] == ' ' and starting is True:
idx += 1
continue
starting = False
if text[idx] in '.?:':
new_string += text[idx]
new_string += '\n'
new_string += '\n'
idx += 1
while idx < text_length and text[idx] == ' ':
idx += 1
continue
if idx < text_length:
new_string += text[idx]
idx += 1
print(new_string, end='')
|
#include<cstdio>
using namespace std;
int p1,p2;
double f1,f2,t1,t2;
double maxx(double x,double y)
{
if (x>y) return x;
else return y;
}
int main()
{
scanf("%d%d%lf%lf",&p1,&p2,&t1,&t2);
f1=maxx(3*p1/10.0,p1-p1/250.0*t1);
f2=maxx(3*p2/10.0,p2-p2/250.0*t2);
if(f1>f2) printf("Misha");
else if(f1<f2) printf("Vasya");
else printf("Tie");
return 0;
}
|
// Make sure that interjecting logs updates the "current" log correctly.
TEST_F(MetricsLogManagerTest, InterjectedLog) {
MetricsLogManager log_manager;
MetricsLog* ongoing_log = CreateLog(MetricsLog::ONGOING_LOG);
MetricsLog* temp_log = CreateLog(MetricsLog::INITIAL_STABILITY_LOG);
log_manager.BeginLoggingWithLog(base::WrapUnique(ongoing_log));
EXPECT_EQ(ongoing_log, log_manager.current_log());
log_manager.PauseCurrentLog();
EXPECT_EQ(nullptr, log_manager.current_log());
log_manager.BeginLoggingWithLog(base::WrapUnique(temp_log));
EXPECT_EQ(temp_log, log_manager.current_log());
log_manager.FinishCurrentLog(log_store());
EXPECT_EQ(nullptr, log_manager.current_log());
log_manager.ResumePausedLog();
EXPECT_EQ(ongoing_log, log_manager.current_log());
} |
package org.vaadin.addons.tatu.longpressextension.client;
public class LongPressExtensionState
extends com.vaadin.shared.AbstractComponentState {
public int timeOut = 2000;
} |
/*
* Copyright (C) 2018 Red Hat, Inc. and others.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*/
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
// enum copied from monaco.d.ts
/**
* The style in which the editor's cursor should be rendered.
*/
export enum TextEditorCursorStyle {
/**
* As a vertical line
*/
Line = 1,
/**
* As a block
*/
Block = 2,
/**
* As a horizontal line, under character
*/
Underline = 3,
/**
* As a thin vertical line
*/
LineThin = 4,
/**
* As an outlined block, on top of a character
*/
BlockOutline = 5,
/**
* As a thin horizontal line, under a character
*/
UnderlineThin = 6
}
export function cursorStyleToString(cursorStyle: TextEditorCursorStyle): string {
switch (cursorStyle) {
case TextEditorCursorStyle.Line:
return 'line';
case TextEditorCursorStyle.Block:
return 'block';
case TextEditorCursorStyle.Underline:
return 'underline';
case TextEditorCursorStyle.LineThin:
return 'line-thin';
case TextEditorCursorStyle.BlockOutline:
return 'block-outline';
case TextEditorCursorStyle.UnderlineThin:
return 'underline-thin';
default:
throw new Error('cursorStyleToString: Unknown cursorStyle');
}
}
|
package ast
type Node interface {
}
|
//-----------------------------------------------------------------------------
// Description:
// Insert a point, assuming the point is unique and does not require a
// locator. Tt does not mean it does not use a locator. It just mean that
// some implementation may skip the use of a locator.
void vtkHyperOctreeClipCutPointsGrabber::InsertPoint(vtkIdType ptId,
double pt[3],
double pcoords[3],
int vtkNotUsed(ijk)[3])
{
this->Triangulator->InsertPoint(ptId,pt,pcoords,0);
} |
package cmd
import (
"bytes"
"errors"
"testing"
"github.com/ytakahashi/gecco/aws"
"github.com/ytakahashi/gecco/config"
"github.com/ytakahashi/gecco/ext"
)
type mockedListCommand1 struct{}
func (c mockedListCommand1) runCommand() error {
return nil
}
func (c mockedListCommand1) initListCommand(o config.FilterOption, client aws.Ec2Client) {
}
func TestNewListCmd(t *testing.T) {
command := newListCmd(mockedListCommand1{})
validate := func(name string, actual string, expected string) {
if actual != expected {
t.Errorf("Result of %v was '%v', ecpected: '%v'", name, actual, expected)
}
}
name := "Use"
expectedUse := "list"
actualUse := command.Use
validate(name, actualUse, expectedUse)
name = "Short"
expectedShort := "lists EC2 instances"
actualShort := command.Short
validate(name, actualShort, expectedShort)
actualFlags := command.Flags()
statusFlag := actualFlags.Lookup("status")
tagKeyFlag := actualFlags.Lookup("tagKey")
tagValueFlag := actualFlags.Lookup("tagValue")
name = "Flags.status.Name"
expectedStatusFlagName := "status"
actualStatusFlagName := statusFlag.Name
validate(name, actualStatusFlagName, expectedStatusFlagName)
name = "Flags.status.Usage"
expectedStatusFlagUsage := "filters instances by status"
actualStatusFlagUsage := statusFlag.Usage
validate(name, actualStatusFlagUsage, expectedStatusFlagUsage)
name = "Flags.tagKey.Name"
expectedTagKeyFlagName := "tagKey"
actualTagKeyFlagName := tagKeyFlag.Name
validate(name, expectedTagKeyFlagName, actualTagKeyFlagName)
name = "Flags.tagKey.Usage"
expectedTagKeyFlagUsage := "filters instances by tag key"
actualTagKeyFlagUsage := tagKeyFlag.Usage
validate(name, expectedTagKeyFlagUsage, actualTagKeyFlagUsage)
name = "Flags.tagValue.Name"
expectedTagValueFlagName := "tagValue"
actualTagValueFlagName := tagValueFlag.Name
validate(name, expectedTagValueFlagName, actualTagValueFlagName)
name = "Flags.tagValue.Usage"
expectedTagValueFlagUsage := "filters instances by tag value"
actualTagValueFlagUsage := tagValueFlag.Usage
validate(name, expectedTagValueFlagUsage, actualTagValueFlagUsage)
err := command.RunE(nil, nil)
if err != nil {
t.Errorf("Error")
}
}
type mockedEc2_1 struct{}
func (e mockedEc2_1) GetInstances(o config.FilterOption, s aws.IEc2Service) (instances aws.Ec2Instances, err error) {
return aws.Ec2Instances{}, nil
}
func (e mockedEc2_1) StartInstance(target string, s aws.IEc2Service) error {
return nil
}
func (e mockedEc2_1) StopInstance(target string, s aws.IEc2Service) error {
return nil
}
type mockedEc2_2 struct{}
func (e mockedEc2_2) GetInstances(o config.FilterOption, s aws.IEc2Service) (instances aws.Ec2Instances, err error) {
return nil, errors.New("error")
}
func (e mockedEc2_2) StartInstance(target string, s aws.IEc2Service) error {
return nil
}
func (e mockedEc2_2) StopInstance(target string, s aws.IEc2Service) error {
return nil
}
func Test_ListCommand_InitListCommand(t *testing.T) {
o := config.FilterOption{
Status: "status",
}
sut := listCommand{}
sut.initListCommand(o, mockedEc2_1{})
if sut.options != o {
t.Errorf("Error %v", sut.options)
}
if sut.ec2Client == nil {
t.Error("Error")
}
}
func Test_ListCommand_RunCommand1(t *testing.T) {
sut := listCommand{
options: config.FilterOption{Status: "foo"},
}
err := sut.runCommand()
if err == nil {
t.Error("Error should be thrown")
}
}
func Test_ListCommand_RunCommand2(t *testing.T) {
sut := listCommand{
options: config.FilterOption{},
ec2Client: mockedEc2_2{},
}
err := sut.runCommand()
if err == nil {
t.Error("Error should be thrown")
}
}
func Test_ListCommand_RunCommand3(t *testing.T) {
sut := listCommand{
options: config.FilterOption{
OutputFormat: "text",
},
ec2Client: mockedEc2_1{},
}
err := sut.runCommand()
if err != nil {
t.Errorf("%v", err)
}
}
type mockedEc2Instance struct{}
func (instances mockedEc2Instance) GetFilteredInstances(filter ext.ICommand) (selected string, err error) {
return
}
func (instances mockedEc2Instance) ToString(outputFormat config.OutputFormat) (string, error) {
return "", errors.New("")
}
func Test_PrintInstances3(t *testing.T) {
instanceList := mockedEc2Instance{}
buf := &bytes.Buffer{}
err := printInstances(instanceList, buf, config.Unknown)
if err == nil {
t.Errorf("ERR: %v", err)
}
}
|
package test.expression;
import java.io.Serializable;
public class Conditional {
public void m() {
a((true ? new Double(0) : new String()));
b((true ? new Double(0) : new String()));
c((true ? new Double(0) : new String()));
}
public void a(Object x) {
}
public void b(Comparable<?> y) {
}
public void c(Serializable z) {
}
} |
export const environment = {
production: true,
globalSpinner : true,
modificationFlag : false,
serverAPIURL: 'https://cartella.provincia.tn.it/cartella-asl/api',
defaultPosition: { 'latitude': 46.1025748, 'longitude': 10.927261 },
aacClientId: "7175611b-cb56-4d76-b516-ee2df4461711",
redirectUrl: "https://cartella.provincia.tn.it/asl-ruoli/",
logout_redirect: 'asl-login/',
scope: "profile openid email profile.accountprofile.me profile.adc.me edit.all",
aacUrl: "https://aac.platform.smartcommunitylab.it"
};
|
/*
* edac_device.c
* (C) 2007 www.douglaskthompson.com
*
* This file may be distributed under the terms of the
* GNU General Public License.
*
* Written by Doug Thompson <[email protected]>
*
* edac_device API implementation
* 19 Jan 2007
*/
#include <asm/page.h>
#include <linux/uaccess.h>
#include <linux/ctype.h>
#include <linux/highmem.h>
#include <linux/init.h>
#include <linux/jiffies.h>
#include <linux/module.h>
#include <linux/slab.h>
#include <linux/smp.h>
#include <linux/spinlock.h>
#include <linux/sysctl.h>
#include <linux/timer.h>
#include "edac_device.h"
#include "edac_module.h"
/* lock for the list: 'edac_device_list', manipulation of this list
* is protected by the 'device_ctls_mutex' lock
*/
static DEFINE_MUTEX(device_ctls_mutex);
static LIST_HEAD(edac_device_list);
#ifdef CONFIG_EDAC_DEBUG
static void edac_device_dump_device(struct edac_device_ctl_info *edac_dev)
{
edac_dbg(3, "\tedac_dev = %p dev_idx=%d\n",
edac_dev, edac_dev->dev_idx);
edac_dbg(4, "\tedac_dev->edac_check = %p\n", edac_dev->edac_check);
edac_dbg(3, "\tdev = %p\n", edac_dev->dev);
edac_dbg(3, "\tmod_name:ctl_name = %s:%s\n",
edac_dev->mod_name, edac_dev->ctl_name);
edac_dbg(3, "\tpvt_info = %p\n\n", edac_dev->pvt_info);
}
#endif /* CONFIG_EDAC_DEBUG */
struct edac_device_ctl_info *edac_device_alloc_ctl_info(
unsigned sz_private,
char *edac_device_name, unsigned nr_instances,
char *edac_block_name, unsigned nr_blocks,
unsigned offset_value, /* zero, 1, or other based offset */
struct edac_dev_sysfs_block_attribute *attrib_spec, unsigned nr_attrib,
int device_index)
{
struct edac_device_ctl_info *dev_ctl;
struct edac_device_instance *dev_inst, *inst;
struct edac_device_block *dev_blk, *blk_p, *blk;
struct edac_dev_sysfs_block_attribute *dev_attrib, *attrib_p, *attrib;
unsigned total_size;
unsigned count;
unsigned instance, block, attr;
void *pvt, *p;
int err;
edac_dbg(4, "instances=%d blocks=%d\n", nr_instances, nr_blocks);
/* Calculate the size of memory we need to allocate AND
* determine the offsets of the various item arrays
* (instance,block,attrib) from the start of an allocated structure.
* We want the alignment of each item (instance,block,attrib)
* to be at least as stringent as what the compiler would
* provide if we could simply hardcode everything into a single struct.
*/
p = NULL;
dev_ctl = edac_align_ptr(&p, sizeof(*dev_ctl), 1);
/* Calc the 'end' offset past end of ONE ctl_info structure
* which will become the start of the 'instance' array
*/
dev_inst = edac_align_ptr(&p, sizeof(*dev_inst), nr_instances);
/* Calc the 'end' offset past the instance array within the ctl_info
* which will become the start of the block array
*/
count = nr_instances * nr_blocks;
dev_blk = edac_align_ptr(&p, sizeof(*dev_blk), count);
/* Calc the 'end' offset past the dev_blk array
* which will become the start of the attrib array, if any.
*/
/* calc how many nr_attrib we need */
if (nr_attrib > 0)
count *= nr_attrib;
dev_attrib = edac_align_ptr(&p, sizeof(*dev_attrib), count);
/* Calc the 'end' offset past the attributes array */
pvt = edac_align_ptr(&p, sz_private, 1);
/* 'pvt' now points to where the private data area is.
* At this point 'pvt' (like dev_inst,dev_blk and dev_attrib)
* is baselined at ZERO
*/
total_size = ((unsigned long)pvt) + sz_private;
/* Allocate the amount of memory for the set of control structures */
dev_ctl = kzalloc(total_size, GFP_KERNEL);
if (dev_ctl == NULL)
return NULL;
/* Adjust pointers so they point within the actual memory we
* just allocated rather than an imaginary chunk of memory
* located at address 0.
* 'dev_ctl' points to REAL memory, while the others are
* ZERO based and thus need to be adjusted to point within
* the allocated memory.
*/
dev_inst = (struct edac_device_instance *)
(((char *)dev_ctl) + ((unsigned long)dev_inst));
dev_blk = (struct edac_device_block *)
(((char *)dev_ctl) + ((unsigned long)dev_blk));
dev_attrib = (struct edac_dev_sysfs_block_attribute *)
(((char *)dev_ctl) + ((unsigned long)dev_attrib));
pvt = sz_private ? (((char *)dev_ctl) + ((unsigned long)pvt)) : NULL;
/* Begin storing the information into the control info structure */
dev_ctl->dev_idx = device_index;
dev_ctl->nr_instances = nr_instances;
dev_ctl->instances = dev_inst;
dev_ctl->pvt_info = pvt;
/* Default logging of CEs and UEs */
dev_ctl->log_ce = 1;
dev_ctl->log_ue = 1;
/* Name of this edac device */
snprintf(dev_ctl->name,sizeof(dev_ctl->name),"%s",edac_device_name);
edac_dbg(4, "edac_dev=%p next after end=%p\n",
dev_ctl, pvt + sz_private);
/* Initialize every Instance */
for (instance = 0; instance < nr_instances; instance++) {
inst = &dev_inst[instance];
inst->ctl = dev_ctl;
inst->nr_blocks = nr_blocks;
blk_p = &dev_blk[instance * nr_blocks];
inst->blocks = blk_p;
/* name of this instance */
snprintf(inst->name, sizeof(inst->name),
"%s%u", edac_device_name, instance);
/* Initialize every block in each instance */
for (block = 0; block < nr_blocks; block++) {
blk = &blk_p[block];
blk->instance = inst;
snprintf(blk->name, sizeof(blk->name),
"%s%d", edac_block_name, block+offset_value);
edac_dbg(4, "instance=%d inst_p=%p block=#%d block_p=%p name='%s'\n",
instance, inst, block, blk, blk->name);
/* if there are NO attributes OR no attribute pointer
* then continue on to next block iteration
*/
if ((nr_attrib == 0) || (attrib_spec == NULL))
continue;
/* setup the attribute array for this block */
blk->nr_attribs = nr_attrib;
attrib_p = &dev_attrib[block*nr_instances*nr_attrib];
blk->block_attributes = attrib_p;
edac_dbg(4, "THIS BLOCK_ATTRIB=%p\n",
blk->block_attributes);
/* Initialize every user specified attribute in this
* block with the data the caller passed in
* Each block gets its own copy of pointers,
* and its unique 'value'
*/
for (attr = 0; attr < nr_attrib; attr++) {
attrib = &attrib_p[attr];
/* populate the unique per attrib
* with the code pointers and info
*/
attrib->attr = attrib_spec[attr].attr;
attrib->show = attrib_spec[attr].show;
attrib->store = attrib_spec[attr].store;
attrib->block = blk; /* up link */
edac_dbg(4, "alloc-attrib=%p attrib_name='%s' attrib-spec=%p spec-name=%s\n",
attrib, attrib->attr.name,
&attrib_spec[attr],
attrib_spec[attr].attr.name
);
}
}
}
/* Mark this instance as merely ALLOCATED */
dev_ctl->op_state = OP_ALLOC;
/*
* Initialize the 'root' kobj for the edac_device controller
*/
err = edac_device_register_sysfs_main_kobj(dev_ctl);
if (err) {
kfree(dev_ctl);
return NULL;
}
/* at this point, the root kobj is valid, and in order to
* 'free' the object, then the function:
* edac_device_unregister_sysfs_main_kobj() must be called
* which will perform kobj unregistration and the actual free
* will occur during the kobject callback operation
*/
return dev_ctl;
}
EXPORT_SYMBOL_GPL(edac_device_alloc_ctl_info);
void edac_device_free_ctl_info(struct edac_device_ctl_info *ctl_info)
{
edac_device_unregister_sysfs_main_kobj(ctl_info);
}
EXPORT_SYMBOL_GPL(edac_device_free_ctl_info);
/*
* find_edac_device_by_dev
* scans the edac_device list for a specific 'struct device *'
*
* lock to be held prior to call: device_ctls_mutex
*
* Return:
* pointer to control structure managing 'dev'
* NULL if not found on list
*/
static struct edac_device_ctl_info *find_edac_device_by_dev(struct device *dev)
{
struct edac_device_ctl_info *edac_dev;
struct list_head *item;
edac_dbg(0, "\n");
list_for_each(item, &edac_device_list) {
edac_dev = list_entry(item, struct edac_device_ctl_info, link);
if (edac_dev->dev == dev)
return edac_dev;
}
return NULL;
}
/*
* add_edac_dev_to_global_list
* Before calling this function, caller must
* assign a unique value to edac_dev->dev_idx.
*
* lock to be held prior to call: device_ctls_mutex
*
* Return:
* 0 on success
* 1 on failure.
*/
static int add_edac_dev_to_global_list(struct edac_device_ctl_info *edac_dev)
{
struct list_head *item, *insert_before;
struct edac_device_ctl_info *rover;
insert_before = &edac_device_list;
/* Determine if already on the list */
rover = find_edac_device_by_dev(edac_dev->dev);
if (unlikely(rover != NULL))
goto fail0;
/* Insert in ascending order by 'dev_idx', so find position */
list_for_each(item, &edac_device_list) {
rover = list_entry(item, struct edac_device_ctl_info, link);
if (rover->dev_idx >= edac_dev->dev_idx) {
if (unlikely(rover->dev_idx == edac_dev->dev_idx))
goto fail1;
insert_before = item;
break;
}
}
list_add_tail_rcu(&edac_dev->link, insert_before);
return 0;
fail0:
edac_printk(KERN_WARNING, EDAC_MC,
"%s (%s) %s %s already assigned %d\n",
dev_name(rover->dev), edac_dev_name(rover),
rover->mod_name, rover->ctl_name, rover->dev_idx);
return 1;
fail1:
edac_printk(KERN_WARNING, EDAC_MC,
"bug in low-level driver: attempt to assign\n"
" duplicate dev_idx %d in %s()\n", rover->dev_idx,
__func__);
return 1;
}
/*
* del_edac_device_from_global_list
*/
static void del_edac_device_from_global_list(struct edac_device_ctl_info
*edac_device)
{
list_del_rcu(&edac_device->link);
/* these are for safe removal of devices from global list while
* NMI handlers may be traversing list
*/
synchronize_rcu();
INIT_LIST_HEAD(&edac_device->link);
}
/*
* edac_device_workq_function
* performs the operation scheduled by a workq request
*
* this workq is embedded within an edac_device_ctl_info
* structure, that needs to be polled for possible error events.
*
* This operation is to acquire the list mutex lock
* (thus preventing insertation or deletion)
* and then call the device's poll function IFF this device is
* running polled and there is a poll function defined.
*/
static void edac_device_workq_function(struct work_struct *work_req)
{
struct delayed_work *d_work = to_delayed_work(work_req);
struct edac_device_ctl_info *edac_dev = to_edac_device_ctl_work(d_work);
mutex_lock(&device_ctls_mutex);
/* If we are being removed, bail out immediately */
if (edac_dev->op_state == OP_OFFLINE) {
mutex_unlock(&device_ctls_mutex);
return;
}
/* Only poll controllers that are running polled and have a check */
if ((edac_dev->op_state == OP_RUNNING_POLL) &&
(edac_dev->edac_check != NULL)) {
edac_dev->edac_check(edac_dev);
}
mutex_unlock(&device_ctls_mutex);
/* Reschedule the workq for the next time period to start again
* if the number of msec is for 1 sec, then adjust to the next
* whole one second to save timers firing all over the period
* between integral seconds
*/
if (edac_dev->poll_msec == 1000)
edac_queue_work(&edac_dev->work, round_jiffies_relative(edac_dev->delay));
else
edac_queue_work(&edac_dev->work, edac_dev->delay);
}
/*
* edac_device_workq_setup
* initialize a workq item for this edac_device instance
* passing in the new delay period in msec
*/
static void edac_device_workq_setup(struct edac_device_ctl_info *edac_dev,
unsigned msec)
{
edac_dbg(0, "\n");
/* take the arg 'msec' and set it into the control structure
* to used in the time period calculation
* then calc the number of jiffies that represents
*/
edac_dev->poll_msec = msec;
edac_dev->delay = msecs_to_jiffies(msec);
INIT_DELAYED_WORK(&edac_dev->work, edac_device_workq_function);
/* optimize here for the 1 second case, which will be normal value, to
* fire ON the 1 second time event. This helps reduce all sorts of
* timers firing on sub-second basis, while they are happy
* to fire together on the 1 second exactly
*/
if (edac_dev->poll_msec == 1000)
edac_queue_work(&edac_dev->work, round_jiffies_relative(edac_dev->delay));
else
edac_queue_work(&edac_dev->work, edac_dev->delay);
}
/*
* edac_device_workq_teardown
* stop the workq processing on this edac_dev
*/
static void edac_device_workq_teardown(struct edac_device_ctl_info *edac_dev)
{
if (!edac_dev->edac_check)
return;
edac_dev->op_state = OP_OFFLINE;
edac_stop_work(&edac_dev->work);
}
/*
* edac_device_reset_delay_period
*
* need to stop any outstanding workq queued up at this time
* because we will be resetting the sleep time.
* Then restart the workq on the new delay
*/
void edac_device_reset_delay_period(struct edac_device_ctl_info *edac_dev,
unsigned long value)
{
unsigned long jiffs = msecs_to_jiffies(value);
if (value == 1000)
jiffs = round_jiffies_relative(value);
edac_dev->poll_msec = value;
edac_dev->delay = jiffs;
edac_mod_work(&edac_dev->work, jiffs);
}
int edac_device_alloc_index(void)
{
static atomic_t device_indexes = ATOMIC_INIT(0);
return atomic_inc_return(&device_indexes) - 1;
}
EXPORT_SYMBOL_GPL(edac_device_alloc_index);
int edac_device_add_device(struct edac_device_ctl_info *edac_dev)
{
edac_dbg(0, "\n");
#ifdef CONFIG_EDAC_DEBUG
if (edac_debug_level >= 3)
edac_device_dump_device(edac_dev);
#endif
mutex_lock(&device_ctls_mutex);
if (add_edac_dev_to_global_list(edac_dev))
goto fail0;
/* set load time so that error rate can be tracked */
edac_dev->start_time = jiffies;
/* create this instance's sysfs entries */
if (edac_device_create_sysfs(edac_dev)) {
edac_device_printk(edac_dev, KERN_WARNING,
"failed to create sysfs device\n");
goto fail1;
}
/* If there IS a check routine, then we are running POLLED */
if (edac_dev->edac_check != NULL) {
/* This instance is NOW RUNNING */
edac_dev->op_state = OP_RUNNING_POLL;
/*
* enable workq processing on this instance,
* default = 1000 msec
*/
edac_device_workq_setup(edac_dev, 1000);
} else {
edac_dev->op_state = OP_RUNNING_INTERRUPT;
}
/* Report action taken */
edac_device_printk(edac_dev, KERN_INFO,
"Giving out device to module %s controller %s: DEV %s (%s)\n",
edac_dev->mod_name, edac_dev->ctl_name, edac_dev->dev_name,
edac_op_state_to_string(edac_dev->op_state));
mutex_unlock(&device_ctls_mutex);
return 0;
fail1:
/* Some error, so remove the entry from the lsit */
del_edac_device_from_global_list(edac_dev);
fail0:
mutex_unlock(&device_ctls_mutex);
return 1;
}
EXPORT_SYMBOL_GPL(edac_device_add_device);
struct edac_device_ctl_info *edac_device_del_device(struct device *dev)
{
struct edac_device_ctl_info *edac_dev;
edac_dbg(0, "\n");
mutex_lock(&device_ctls_mutex);
/* Find the structure on the list, if not there, then leave */
edac_dev = find_edac_device_by_dev(dev);
if (edac_dev == NULL) {
mutex_unlock(&device_ctls_mutex);
return NULL;
}
/* mark this instance as OFFLINE */
edac_dev->op_state = OP_OFFLINE;
/* deregister from global list */
del_edac_device_from_global_list(edac_dev);
mutex_unlock(&device_ctls_mutex);
/* clear workq processing on this instance */
edac_device_workq_teardown(edac_dev);
/* Tear down the sysfs entries for this instance */
edac_device_remove_sysfs(edac_dev);
edac_printk(KERN_INFO, EDAC_MC,
"Removed device %d for %s %s: DEV %s\n",
edac_dev->dev_idx,
edac_dev->mod_name, edac_dev->ctl_name, edac_dev_name(edac_dev));
return edac_dev;
}
EXPORT_SYMBOL_GPL(edac_device_del_device);
static inline int edac_device_get_log_ce(struct edac_device_ctl_info *edac_dev)
{
return edac_dev->log_ce;
}
static inline int edac_device_get_log_ue(struct edac_device_ctl_info *edac_dev)
{
return edac_dev->log_ue;
}
static inline int edac_device_get_panic_on_ue(struct edac_device_ctl_info
*edac_dev)
{
return edac_dev->panic_on_ue;
}
void edac_device_handle_ce_count(struct edac_device_ctl_info *edac_dev,
unsigned int count, int inst_nr, int block_nr,
const char *msg)
{
struct edac_device_instance *instance;
struct edac_device_block *block = NULL;
if (!count)
return;
if ((inst_nr >= edac_dev->nr_instances) || (inst_nr < 0)) {
edac_device_printk(edac_dev, KERN_ERR,
"INTERNAL ERROR: 'instance' out of range "
"(%d >= %d)\n", inst_nr,
edac_dev->nr_instances);
return;
}
instance = edac_dev->instances + inst_nr;
if ((block_nr >= instance->nr_blocks) || (block_nr < 0)) {
edac_device_printk(edac_dev, KERN_ERR,
"INTERNAL ERROR: instance %d 'block' "
"out of range (%d >= %d)\n",
inst_nr, block_nr,
instance->nr_blocks);
return;
}
if (instance->nr_blocks > 0) {
block = instance->blocks + block_nr;
block->counters.ce_count += count;
}
/* Propagate the count up the 'totals' tree */
instance->counters.ce_count += count;
edac_dev->counters.ce_count += count;
if (edac_device_get_log_ce(edac_dev))
edac_device_printk(edac_dev, KERN_WARNING,
"CE: %s instance: %s block: %s count: %d '%s'\n",
edac_dev->ctl_name, instance->name,
block ? block->name : "N/A", count, msg);
}
EXPORT_SYMBOL_GPL(edac_device_handle_ce_count);
void edac_device_handle_ue_count(struct edac_device_ctl_info *edac_dev,
unsigned int count, int inst_nr, int block_nr,
const char *msg)
{
struct edac_device_instance *instance;
struct edac_device_block *block = NULL;
if (!count)
return;
if ((inst_nr >= edac_dev->nr_instances) || (inst_nr < 0)) {
edac_device_printk(edac_dev, KERN_ERR,
"INTERNAL ERROR: 'instance' out of range "
"(%d >= %d)\n", inst_nr,
edac_dev->nr_instances);
return;
}
instance = edac_dev->instances + inst_nr;
if ((block_nr >= instance->nr_blocks) || (block_nr < 0)) {
edac_device_printk(edac_dev, KERN_ERR,
"INTERNAL ERROR: instance %d 'block' "
"out of range (%d >= %d)\n",
inst_nr, block_nr,
instance->nr_blocks);
return;
}
if (instance->nr_blocks > 0) {
block = instance->blocks + block_nr;
block->counters.ue_count += count;
}
/* Propagate the count up the 'totals' tree */
instance->counters.ue_count += count;
edac_dev->counters.ue_count += count;
if (edac_device_get_log_ue(edac_dev))
edac_device_printk(edac_dev, KERN_EMERG,
"UE: %s instance: %s block: %s count: %d '%s'\n",
edac_dev->ctl_name, instance->name,
block ? block->name : "N/A", count, msg);
if (edac_device_get_panic_on_ue(edac_dev))
panic("EDAC %s: UE instance: %s block %s count: %d '%s'\n",
edac_dev->ctl_name, instance->name,
block ? block->name : "N/A", count, msg);
}
EXPORT_SYMBOL_GPL(edac_device_handle_ue_count);
|
import React, { useEffect, useState } from 'react';
import { Helmet } from 'react-helmet-async';
import { useTranslation } from 'react-i18next';
import { translations } from 'locales/i18n';
import { useInjectReducer, useInjectSaga } from 'utils/redux-injectors';
import CurrencyContainer from './components/CurrencyContainer';
import CurrencyDetails from './components/CurrencyDetails';
import { Header } from 'app/components/Header';
import { lendBorrowSovrynSaga } from './saga';
import { actions, reducer, sliceKey } from './slice';
import { useDispatch, useSelector } from 'react-redux';
import { selectLendBorrowSovryn } from './selectors';
import { Footer } from '../../components/Footer';
import { RepayPositionHandler } from 'app/containers/RepayPositionHandler/Loadable';
import { BorrowActivity } from '../../components/BorrowActivity/Loadable';
import { useHistory, useLocation } from 'react-router-dom';
import { IPromotionLinkState } from '../LandingPage/components/Promotions/components/PromotionCard/types';
const BorrowPage: React.FC = () => {
const location = useLocation<IPromotionLinkState>();
const history = useHistory<IPromotionLinkState>();
const [linkAsset] = useState(location.state?.asset);
const { t } = useTranslation();
useInjectReducer({ key: sliceKey, reducer: reducer });
useInjectSaga({ key: sliceKey, saga: lendBorrowSovrynSaga });
const state = useSelector(selectLendBorrowSovryn);
const dispatch = useDispatch();
useEffect(() => linkAsset && history.replace(location.pathname), [
history,
linkAsset,
location.pathname,
location.state,
]);
return (
<>
<Header />
<Helmet>
<title>{t(translations.borrow.meta.title)}</title>
</Helmet>
<main className="tw-container tw-mx-auto tw-mt-4 tw-px-4">
<div className="tw-grid lg:tw-gap-8 tw-grid-cols-1 lg:tw-grid-cols-2">
<div>
<CurrencyContainer
state={linkAsset || state.asset}
setState={asset => dispatch(actions.changeAsset(asset))}
/>
</div>
<div className="tw-mt-4 lg:tw-mt-0">
<CurrencyDetails />
</div>
</div>
</main>
<div className="tw-container tw-mx-auto tw-px-4 tw-mt-6">
<BorrowActivity />
<RepayPositionHandler />
</div>
<Footer />
</>
);
};
export default BorrowPage;
|
<filename>tests/unit/PeopleAndRoles.spec.ts
// Libraries
import Vue from 'vue'
import Vuelidate from 'vuelidate'
import Vuetify from 'vuetify'
import VueRouter from 'vue-router'
import mockRouter from './MockRouter'
// Store
import { getVuexStore } from '@/store'
// Utils
import { createLocalVue, mount } from '@vue/test-utils'
// Components
import { PeopleAndRoles } from '@/components/AddPeopleAndRoles'
Vue.use(Vuetify)
Vue.use(Vuelidate)
const vuetify = new Vuetify({})
const store = getVuexStore()
// Input field selectors to test changes to the DOM elements.
const btnStartAddCompletingParty: string = '#btn-start-add-cp'
const btnAddPerson: string = '#btn-add-person'
const btnAddCompletingParty: string = '#btn-add-cp'
const btnAddCorp: string = '#btn-add-corp'
const btnPanel: string = '.btn-panel'
const appointForm: string = '.appoint-form'
const checkCompletingParty: string = '.cp-valid'
const checkDirector: string = '.dir-valid'
const checkIncorporator: string = '.incorp-valid'
const completingPartyRole = { 'roleType': 'Completing Party', 'appointmentDate': '2020-03-30' }
const directorRole: string = 'Director'
const incorporatorRole: string = 'Incorporator'
function resetStore (): void {
store.state.stateModel.addPeopleAndRoleStep.orgPeople = []
}
function getPersonList (roles = [completingPartyRole]): any {
const mockPersonList = [
{
'officer': {
'id': 0,
'firstName': 'Adam',
'lastName': 'Smith',
'middleName': 'D',
'orgName': '',
'type': 'Person'
},
'roles': roles,
'address': {
'mailingAddress': {
'streetAddress': '123 Fake Street',
'streetAddressAdditional': '',
'addressCity': 'Victoria',
'addressRegion': 'BC',
'postalCode': 'V8Z 5C6',
'addressCountry': 'CA'
},
'deliveryAddress': {
'streetAddress': '123 Fake Street',
'streetAddressAdditional': '',
'addressCity': 'Victoria',
'addressRegion': 'BC',
'postalCode': 'V8Z 5C6',
'addressCountry': 'CA'
}
}
}
]
return mockPersonList
}
describe('People And Roles component', () => {
let wrapperFactory: any
beforeEach(() => {
const localVue = createLocalVue()
localVue.use(VueRouter)
const router = mockRouter.mock()
wrapperFactory = (propsData) => {
return mount(PeopleAndRoles, {
localVue,
router,
store,
vuetify
})
}
})
it('shows Start by Adding Completing Party Button when people list is empty', () => {
store.state.stateModel.addPeopleAndRoleStep.orgPeople = []
const wrapper = wrapperFactory()
expect(wrapper.find(btnStartAddCompletingParty).exists()).toBeTruthy()
expect(wrapper.find(btnStartAddCompletingParty).text()).toContain('Start by Adding the Completing Party')
wrapper.destroy()
})
it('Does not show other add buttons when people list is empty', () => {
store.state.stateModel.addPeopleAndRoleStep.orgPeople = []
const wrapper = wrapperFactory()
expect(wrapper.find(btnAddPerson).exists()).toBeFalsy()
expect(wrapper.find(btnAddCompletingParty).exists()).toBeFalsy()
expect(wrapper.find(btnAddCorp).exists()).toBeFalsy()
wrapper.destroy()
})
it('does not Start by Adding Completing Party Button when people list is not empty', () => {
store.state.stateModel.addPeopleAndRoleStep.orgPeople = getPersonList()
const wrapper = wrapperFactory()
expect(wrapper.find(btnStartAddCompletingParty).exists()).toBeFalsy()
wrapper.destroy()
resetStore()
})
it('shows Add Person and Add Corporation Button when people list is not empty', () => {
store.state.stateModel.addPeopleAndRoleStep.orgPeople = getPersonList()
const wrapper = wrapperFactory()
expect(wrapper.find(btnAddCorp).exists()).toBeTruthy()
expect(wrapper.find(btnAddPerson).exists()).toBeTruthy()
wrapper.destroy()
resetStore()
})
it('shows Add Completing Party Button when people list is not empty and has no Completing Party', () => {
store.state.stateModel.addPeopleAndRoleStep.orgPeople = getPersonList([
{ 'roleType': 'Director', 'appointmentDate': '2020-03-30' }
])
const wrapper = wrapperFactory()
expect(wrapper.find(btnAddCompletingParty).exists()).toBeTruthy()
wrapper.destroy()
resetStore()
})
it('Does not show Add Completing Party Button when people list is not empty and has Completing Party', () => {
store.state.stateModel.addPeopleAndRoleStep.orgPeople = getPersonList()
const wrapper = wrapperFactory()
expect(wrapper.find(btnAddCompletingParty).exists()).toBeFalsy()
wrapper.destroy()
resetStore()
})
it('Sets the data attributes as expected when add button is clicked', async () => {
store.state.stateModel.addPeopleAndRoleStep.orgPeople = getPersonList()
const wrapper = wrapperFactory()
wrapper.find(btnAddPerson).trigger('click')
await wrapper.vm.$nextTick()
expect(wrapper.vm.$data.showOrgPersonForm).toBe(true)
expect(wrapper.vm.$data.addEditInProgress).toBe(true)
expect(wrapper.vm.$data.nextId).toBe(1)
wrapper.destroy()
resetStore()
})
it('Shows the add person form when add person button is clicked', async () => {
store.state.stateModel.addPeopleAndRoleStep.orgPeople = getPersonList()
const wrapper = wrapperFactory()
wrapper.find(btnAddPerson).trigger('click')
await wrapper.vm.$nextTick()
expect(wrapper.find(appointForm).exists()).toBeTruthy()
wrapper.destroy()
resetStore()
})
it('Shows check mark next to roles added', () => {
store.state.stateModel.addPeopleAndRoleStep.orgPeople = getPersonList([
{ 'roleType': 'Director', 'appointmentDate': '2020-03-30' },
{ 'roleType': 'Incorporator', 'appointmentDate': '2020-03-30' }
])
const wrapper = wrapperFactory()
expect(wrapper.find(checkIncorporator).exists()).toBeTruthy()
expect(wrapper.find(checkDirector).exists()).toBeTruthy()
expect(wrapper.find(checkCompletingParty).exists()).toBeFalsy()
wrapper.destroy()
resetStore()
})
})
|
package com.idividends.data.dto;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
public class TaskResultTest {
@Test
public void valuesTest() {
TaskResult result = null;
for (TaskResult value : TaskResult.values()) {
if (value.equals(TaskResult.SUCCESS)) {
result = value;
}
}
assertEquals(TaskResult.SUCCESS, result);
}
@Test
public void valueOfTest() {
assertEquals(TaskResult.valueOf("SUCCESS"), TaskResult.SUCCESS);
}
}
|
Retina-Based Pipe-Like Object Tracking Implemented Through Spiking Neural Network on a Snake Robot
Vision based-target tracking ability is crucial to bio-inspired snake robots for exploring unknown environments. However, it is difficult for the traditional vision modules of snake robots to overcome the image blur resulting from periodic swings. A promising approach is to use a neuromorphic vision sensor (NVS), which mimics the biological retina to detect a target at a higher temporal frequency and in a wider dynamic range. In this study, an NVS and a spiking neural network (SNN) were performed on a snake robot for the first time to achieve pipe-like object tracking. An SNN based on Hough Transform was designed to detect a target with an asynchronous event stream fed by the NVS. Combining the state of snake motion analyzed by the joint position sensors, a tracking framework was proposed. The experimental results obtained from the simulator demonstrated the validity of our framework and the autonomous locomotion ability of our snake robot. Comparing the performances of the SNN model on CPUs and on GPUs, respectively, the SNN model showed the best performance on a GPU under a simplified and synchronous update rule while it possessed higher precision on a CPU in an asynchronous way.
INTRODUCTION
Target tracking performed on mobile robots, such as bio-inspired snake robots, remains a challenging research topic. Specifically, when using visual approaches based on the conventional vision sensor which has a rigid connection to a mobile robot, there are mainly two challenges: (1) The primary issue is how to overcome the image blur resulting from the fast motion and the unpredictable tremble of the robot. Meanwhile, if there was no change in a scene, the traditional camera with a fixed frame rate would bring a large quantity of redundant data, which constraints the design and application of real-time tracking approaches. (2) Another issue is that the relative position of the target cannot be obtained fast and precisely from the sensors assembled on the robot, including IMU sensors, vision sensors, and time-of-flight sensors. In addition, owing to the limitation of space and weight of the real snake robot, it is usually difficult to utilize more sensors with higher precision and larger volume or stereo vision sensors for gaining depth information.
There have been extensive articles aiming to solve the aforementioned problems in visual target tracking on robots. A natural solution for tracking on blurred image sequence is to first perform deblurring and then a apply tracking algorithm on the deblurred sequence. An improved method is directly tracking the target without deblurring (Jin et al., 2005). By generating blur templates of the target from blur-free frames, the target is represented by a sparse matrix and tracked by a particle filter (Wu et al., 2011;Ma et al., 2016). Although these frameworks are blurtolerant, they are still time-consuming. An alternative approach performed on mobile robots is tracking objects in special color. Hu et al. (2009) designed a vision-based autonomous robotic fish and implemented red-ball tracking. However, this method cannot be used in a complex environment or for objects with low color contrast. Recently, researchers have attempted various new types of vision sensors in target tracking, such as structured light sensors (Ponte et al., 2014) and neuromorphic vision sensors (NVS) (Schraml et al., 2010;Glover and Bartolozzi, 2016;Liu et al., 2016;Moeys et al., 2016;Seifozzakerini et al., 2016).
Tracking by using the neuromorphic vision sensors has become a promising solution. The NVS, typically the Dynamic Vision Sensor (DVS) (Lichtsteiner et al., 2008), mimics the biological retina to generate spikes in the order of microseconds in response to the pixel-level changes of brightness caused by motion. An output event (also named as a spike) of the DVS carries three kinds of information, including the timestamp t when the event occurred, the pixel coordinate (x, y), and the polarity p that represents the trend of the brightness change. The polarity 1 represents increasing brightness, while the polarity -1 means the brightness is decreasing. NVSs offer significant advantages over standard frame-based cameras, namely a very high dynamic range, no motion blur, and a latency in the order of microseconds (Gehrig et al., 2018). Hence, the NVS is suitable for working under bad light conditions and on highspeed mobile platforms. There has been substantial research showing the advantages of using a DVS camera in various vision tasks, such as high-speed target tracking (Drazen et al., 2011;Mueggler et al., 2014;Lagorce et al., 2015), object recognition (Kheradpisheh et al., 2018), and visual odometry (Kueng et al., 2016;Rebecq et al., 2017). Moreover, due the fact that a pixel of an NVS is a silicon retinal neuron and an event is a unit impulse with polarity, the asynchronous event train generated by an NVS can be directly fed into Spiking Neural Networks (SNNs) as input spikes for implementing target detecting and tracking in a faster and more brain-like way.
The wheel-less snake robot (Wright et al., 2007) is a kind of typical bio-inspired mobile robot, which is composed of many modules alternately connected in vertical and horizontal planes. Its abundant degrees of freedom help it achieve various threedimensional gaits, such as rolling, side-winding and slithering. The slithering gait is a forward locomotion gait where the biological snakes use undulations to push their bodies forward (Hu and Shelley, 2012). Under this gait, the snake head can still remain stable to locate the moving direction of the quarry or the natural enemies. Similarly, the wheel-less snake robot is able to move and look forward under a slithering gait and achieve target tracking .
In this work, we presented a pipe-like object detecting and autonomous tracking framework, which was performed on our wheel-less snake robot with a monocular DVS camera by applying a spiking neural network which is inspired by the Hough transform (Wiesmann et al., 2012;Seifozzakerini et al., 2016). First, we achieved line detection for a standing pipe and circle detection for a lying pipe on the snake robot in the Neurorobotics Platform (NRP). The fixed connections between the input neurons corresponding to pixels of DVS and neurons representing the points in parameter space were created according to the principle of the Hough transform. Secondly, a depth estimation method based on a monocular DVS was proposed to estimate the pose of the snake robot and the relative position of the target pipe by the change of object size. Thirdly, an adaptive tracking strategy which generates a series of control signals of turning left or turning right was adopted to implement real-time tracking. Finally, target tracking experiments were conducted on the wheel-less snake robot modeled in V-REP and in NRP, respectively; and our SNN model was evaluated on CPUs and on GPUs, respectively. This paper is based on our previous work , which we extend in several ways: • Besides V-REP, we validated our tracking framework in another simulator -NRP. • We extend the range of shapes to detect and track so that the snake robot can track a target pipe in various views. We not only detected and tracked the standing pipe (shape in lines) but also the lying pipe (shape in circle). • We revised conditions applied in detecting the standing pipe, which is more biologically plausible. • GPUs were used as accelerators to speed up object detecting on the SNN.
The rest of the paper is organized as follows. In section 2, we describe the proposed tracking framework, including the overview, the model of spiking neural networks for detecting and the relative position estimation algorithm. In section 3, we show and discuss the results of experiments conducted on a wheel-less snake robot. The conclusions are drawn in section 4.
Tracking Framework
Target tracking is a typical instance of the autonomous locomotion control. Therefore, tracking framework consists of three components: sensing, planning and acting, which are also the essential components of an autonomous system (Ponte et al., 2014). More concretely, the proposed framework for pipe-like object tracking on a wheel-less snake robot is composed of 4 constituents, as shown in Figure 1. (1) Sensor. The DVS camera observes the environment and generates asynchronous events as the input of the SNN. For an event e(t, x, y, p), (x, y) indicates which neuron receives this input spike, t is the time when the spiking neuron receives this input spike, and p (±1) defines the voltage of this input spike. Meanwhile, the joint encoder records the position of each joint of the snake robot in a short time period.
(2) Spiking Neural Network. A two-layer SNN was designed for object detecting. A neuron in the input layer fixedly connected some neurons in the output layer according to the principle of Hough Transform. The asynchronous events were fed into the input layer of the SNN and impacted the neurons in the output layer by propagating the spikes on the synapses. Once any output neuron excited, a output spike was generated which means a successful detection of the target.
(3) Decision maker. It is a non-spiking part. The joint position information obtained from the joint encoders as well as the target position obtained from the SNN were fused to estimate the relative position of target and generate control signals. The function of control signals we used is essentially the same as a Bang-Bang controller. (4) CPG controller. This part is a built-in controller of the wheel-less snake robot that converts the control signals into the parameters of the Central Pattern Generator (CPG) to maintain or adjust the specific locomotion gait.
Model of Spiking Neural Network
Compared to traditional artificial neural networks, spiking neural networks are more similar to the biological brains due to incorporating the concepts of spike-driven synaptic dynamics and temporal dynamics. Temporal dynamics mean the membrane potential of spiking neurons changes spontaneously over time and spike-driven synaptic dynamics describe the information propagation on synapses. For the neuron model, the most popular one is the Leaky Integrate-and-Fire (LIF) model (Burkitt, 2006), which can be described by the following linear differential equation: where v(t) represents the membrane potential at time t, τ m is the time constant and R is the membrane resistance. A LIF neuron is a simple resistor-capacitor circuit where the leakage term −v(t) τ m is due to the resistor and the integration term RI(t) τ m is due to the capacitor that is parallel to the resistor.
The behavior of a LIF neuron can be depicted as follows.
(1) A spiking input causes an increase of the Membrane Potential (MP) of the neuron. (2) In the meantime, the MP always spontaneously decays at a fixed rate. (3) When the MP exceeds the threshold, a spike is generated as an output. Then, the MP of the fired neuron is reset to zero so that the neuron enters a refractory period, during which the MP remains zero and all input spikes are ignored. Because of the similarity between the dynamics of the LIF neuron and the voting process of shape detection based on the Hough transform, the SNN composed of LIF neurons is particularly well adapted for detecting line and circle.
In this work, we designed two SNNs composed of the LIF neurons for line detection and circle detection, respectively. All the SNNs contain a two-layered topological structure, including an input layer and an output layer, as shown in Figure 1. The input neurons obtained the events from DVS and duplicated it immediately. The output neurons integrated the spikes and excited when they received enough spikes, which are extended LIF neurons with both a positive threshold and a negative threshold. Each input neuron permanently connected some output neurons according to the equation of specific shape. The membrane potential dynamics of extended LIF neurons was described as Algorithm 1.
Line Equation
According to the Hough transform, we assume n = (sin θ , cos θ ) as the normal vector perpendicular to the line L and ρ as the normal distance from the line to the origin. Hence, a point p = (x, y) on the line L can be formulated as the equation: FIGURE 1 | Tracking framework. The asynchronous events derived from the DVS are processed by an SNN designed for object detecting. A neuron (in red) in the input layer fixedly connects some neurons (in blue) according to the principle of the Hough transform. Some neuron (in green) excites when the membrane potential exceeds a threshold. By combining the joint position information obtained from joint encoders and the target position in camera, the relative position of the target (in red) is estimated to generate the control signals. Finally, the wheel-less snake robot approaches the target under a slithering gait.
The central neuron in red corresponds to the line L in (A), which is connected to all neurons in its neighbor for local inhibition.
Algorithm 1: The membrane potential updating of an extended LIF neuron (λ is the fixed decay rate, v is the MP) Reset all connected neurons 7: v i ← 0 8: end if 9: end for which maps each point (x, y) from Cartesian coordinate into parameter space of (θ , ρ) as a sinusoidal curves.
As shown in Figure 2, the SNN corresponding to the parameter space of (θ , ρ) is built up, which consists of 180 × 180 spiking neurons. The first dimension of the SNN represents the angle θ and the second dimension is the distance ρ. In this example, the range of θ is pixels (180 approximately equals to the diagonal distance of the view field of a DVS128) with 1-pixel resolution. Each neuron of the SNN represents a line with (θ , ρ) in the parameter space.
Circle Equation
As we all know, a point p = (x, y) on a circle C can be described by the following standard equation: where (a, b) is the coordinate of the center of the detected circle. An SNN for circle detection is a three-dimensional parameter space of (a, b, r), which consists of 128 × 128 × 64 spiking neurons. The first two dimensions of the SNN represent the position of a circle center in the horizontal direction and the vertical direction, respectively, while the third dimension is the radius of a circle. In this case, the resolutions in all of the three dimensions are 1 pixel. Each neuron of the SNN represents a circle with (a, b, r) in the parameter space.
Lateral Inhibition
The local lateral inhibition, which is a nature of biological neurons, was applied to suppress the noise in this work. Every spiking neuron was connected to its adjacent neurons. Once a shape was detected, a spiking neuron would excite and all the spiking neurons connected would be inhibited. In order to make a trade-off between the result of noise suppress and the computation cost, we select a 3 × 3 window as the range of local lateral inhibition. This means that neurons in the 8-adjacent of the fired neuron are reset. When a larger inhibition range is used, the target would be detected less often in approximately the same region, but more reset operations need to be done-otherwise, the reverse.
Pipe Detection
For pipe detection, the different poses of a vision sensor will result in different shapes of a pipe in the image plane, a pair of parallel lines or a circle. The circle can be detected directly by an SNN, while a strategy needs to be proposed for recognizing two parallel lines.
The edges on both sides of the pipe body can be detected as two parallel lines. In an indoor environment, the change of brightness is opposite on both sides of the pipe while the DVS camera moves perpendicular to the pipe. The polarity of the events on one edge of the pipe is positive while that on another edge is negative. Once the DVS camera moves to the opposite direction, the polarity of the events on two edges would reverse. The two lines with opposite polarity can be considered as the body of a pipe. Furthermore, this pair of parallel lines should appear at the maximum frequency which is equivalent to the highest fired rate of the spiking neuron. Therefore, three conditions to judge whether a pipe appears are listed below: • The polarities of the two lines are opposite.
• The two lines are parallel or the difference of the angle is tiny.
• The pair of parallel lines appears at the maximum frequency in a short time period.
In this work, we tested all the detected lines in each time slot and only one pair of lines satisfying the above conditions would be found out. The target can be detected by Algorithm 2 and represented as a 4-tuple P(t, θ , w, pos), where t is the timestamp, θ is the angle of the pipe, the w is the width of the pipe and the pos is the offset in pixel which is relative to the left side of the field of view. for event e i = (t i , x i , y i , p i ) do 3: for every angle θ j in SNN do
5:
Update neuron N(θ j , ρ θ j ) at t i with polarity p i
Motion Analysis of Snake Robot
Due to the fact that the DVS was mounted on the head of the snake robot, the horizontal offset and the orientation of the head should be known for obtaining precise tracking performance. Therefore, we collected the data from the joint encoder installed on the head and recorded the pose of the head, which includes the trajectory and the rotation in the simulation environment. Then, the horizontal offset and the orientation were analyzed by using FFT. After applying the head orientation compensation approach, the head of the snake robot kept always looking forward along the moving direction, which is a benefit for simplifying the model to estimate the relative position of the target. On the other hand, the horizontal offset was estimated based on the head joint position. This idea is derived from the observation that the rotation of the joints is the essential driving force of the wheel-less snake robot.
For tracking tasks, our snake robot moved under a slithering gait, which provides the most stable pose of camera. The horizontal offset of DVS was fitted by FFT. In the meantime, we reconstructed the horizontal trajectory of the head module by integrating the head joint position θ . In fact, the head joint position and the horizontal trajectory are both the periodic signal, which can be formulated as Equation (4) and Equation (6), respectively. Wu and Ma (2010) indicated that these two signals have the same form but different amplitude and phase; however, it was unable to give the offset of the head module in real time. Therefore, considering the joint rotation is the main driving force and reason to the motion of wheel-less snake robots, the head joint position was regarded as the argument of the horizontal trajectory. By tuning the phase and the amplitude, we got the offset of the DVS from the head joint position according to the Equation (6). Further, the situation of turning was approximately treated as that of moving straight. θ = a · sin(ω · t + φ) (4) θ tuned = α · a · sin = α · (θ · cosφ pos + θ ′ · sinφ pos ) .
Position = A · sin(θ tuned ) = A · sin . where A is the amplitude of the swing which is perpendicular to the direction of motion, α is the ratio of the 90 degrees to the a, θ is the value of the joint encoder, θ ′ is the joint velocity which is also the first-order derivative of θ and φ pos is the phase difference. In our case, the tuple (A, α, φ) is (-0.124, 1.000, 0.000) for V-REP and (1.400, 2.571, 0.908) for NRP, respectively. These parameters are different in V-REP and NRP because the joint controllers and environment parameters in these two simulators are a little bit different.
Relative Position Estimation
By using a distance sensor, such as an ultrasonic sensor, an IR sensor, etc., we can actively measure the relative distance between the snake robot and target. However, a time-of-flight distance sensor is usually directed, which means more sensors need to be installed in the limited space of the snake robot, especially when the position of the target is unknown. It is plausible to the actual snake robot as well. Besides that, when using a DVS along with distance sensors, we must ensure the consistency of different sensor measurements, that is, to make sure the measured data represent the same object. Therefore, aiming to use fewer sensors, we detected the target and estimated depth simultaneously by using a single DVS sensor. Once the target is detected by the SNN, we can estimate the offset of the target in the horizontal plane and the forward relative distance between the DVS camera and the target. As shown in Figure 3A, in a time period t = t 2 − t 1 , there is a functional relation between the decrease of the distance d = |d 2 − d 1 | on the z-axis and the increase of the visible width of the target w = |w 2 − w 1 |. Besides that, the distance d is always inversely proportional to the width of the target w, the scale factor is the focus length f multiplying the actual width l. Moreover, we can reasonably assume that the snake robot moves forward at an approximately constant speed since that is very small. Hence, the d can be estimated by multiplying the elapsed time and the speed of the snake robot. In summary, we can calculate the distance d 2 depending on the displacement in a time period and the change of the target visible width according to the following equations.
The snake-like robot moves slowly and the t and the w between two consecutive output spikes are tiny. Therefore, the error of distance calculated by the Equation (9) is remarkable.
To reduce the error, two discrete output spikes are selected for distance estimating and the interval is 10 spikes in this paper.
Assuming that the position of the moving snake robot and the target pipe satisfies the relationship shown in Figure 3B, the snake robot swings along the central line while it is moving forwards. Hence, in order to obtain the offset between the central line and the target, we calculated two kinds of offset, the pipeOffset and the cameraOffset, respectively. Further, we proposed the method to estimate the pipe position for tracking as shown in Algorithm 3. We calculated the pipeOffset according to the ration of tan θ to tan φ 2 (φ is the FOV of DVS). Then we estimated cameraOffset according to the Equation (6). Generate control parameter C = Sign(offset), where −1 means turning left, +1 means turning right and 0 means going straight 15: end for
GPU Acceleration
For artificial SNNs, the neurons will update their states only when input spikes arrive asynchronously. When SNNs are implemented on CPU or neuromorphic chips, the neurons can update asynchronously as well. However, the general CPU is unable to deal with the large quantity of communication between neurons and real-time state updating of neurons. The neuromorphic simulators and chips still have some drawbacks on running a large SNN. Therefore, we tried to accelerate our SNN by using a GPU. By providing a uniform clock, the neurons could update synchronously in a short time period, such as frame-based image processing.
EXPERIMENTS
Our tracking framework was evaluated both in NRP and V-REP, which is a robot-brain simulator and a robotics simulator, respectively. To begin with, we reconstructed the trajectory of the head module of the snake robot for obtaining the camera offset perpendicular to the forward direction. Then, two scenes were built. One of them only had a standing pipe while another one contained a lying pipe. Finally, experiments were conducted on the aforementioned scenes. Meanwhile, the SNN was performed on a CPU by only using a single thread and a CUDA GPU, respectively.
Simulation Environments
The Neuromorphic Platform (NRP) (Roehrbein et al., 2016;Falotico et al., 2017) is an integrated simulation platform to facilitate a direct link between robotics and neuroscience. In its Gazebo-based world simulator, we built a modular wheel-less snake robot and a simple environment in which there was only one pipe, as shown in Figure 4. we built two SNNs as well, respectively for pipe body detection and pipe entry detection. By using the Robot Operating System (ROS) as a communicating middle-ware, our snake robot and the SNNs could exchange data and commands through ROS topics. While V-REP (Rohmer et al., 2013) is a simulator only for robotics in which we implemented SNNs outsides and connected the snake robot and SNNs by utilizing Remote APIs. Although the NRP and the V-REP are both robotics simulators, models of the snake robot are a little bit different in the number of modules, the controller parameters and so on.
Results and Discussions
We conducted experiments in V-REP and NRP, respectively, considering four situations, which are (1) tracking pipe body on the left side of the snake robot, (2) tracking pipe body on the right side of the snake robot, (3) tracking pipe entry on the left side of the snake robot and (4) tracking pipe entry on the right side of the snake robot.
First, we fitted the camera offset by using the head joint position. As shown in Figure 5, the blue curve is an approximately sine curve that represents the actual offset of the DVS camera, the red one is the offset analyzed by FFT and the green one is the reconstructed offset. We not only estimated the offset according to the Equation (6), but also applied a mean filter on the timeline to smooth the estimation result. Therefore, the horizontal offset of the DVS camera was directly obtained in real time under a low estimation error.
Then, for the pipe detection, we put a hollow pipe at different perspectives of the DVS camera. The snake robot always started moving from the same initial position for each situation, but the initial position in V-REP is different from that in NRP. During the detecting procedure, the event sequences generated by the DVS were fed into the vision SNN. Events were asynchronously processed, but we only detected pipe once in each time slot according to the method discussed in section 2.3, and the image and the position of the pipe were recorded and a part of them was shown in Figure 6. The standing pipe was represented by a blue and a green line and the lying pipe was represented by a green circle. The precision of the standing pipe detection is higher than the lying pipe detection because of the limitation of the network size, especially for circle detection. More neurons means greater ability to recognize the much finer structure of the target. Additionally, it suffered from worse precision when the snake robot got close to the target, especially the lying pipe. At the beginning, the circle looks dense and easily distinguished, however it looks noisier because more details of the target which generated noise spikes were seen. If we increased the firing threshold of the membrane potential to increase the precision of target detection, however, the firing rate of output neurons would reduce so significantly that there were not enough output spikes generated.
After that, the relative distance between the DVS camera and the target pipe was estimated by using Algorithm 3. As the snake robot moving toward the pipe, the width of the pipe increased in the image plane. As shown in Figures 7A,B, the relative distance decreased when the width of pipe increased and the average error was around 0.1 m. Nevertheless, the error of Figures 7C,D were much higher than for standing pipe tracking. The reason is that the firing rate was much higher and the difference of width was smaller than parallel lines detection when we detected the circle. Additionally, in all cases, larger error also occurred at the early time period of the simulating experiment. We only evaluated the precision on the data derived from V-REP because we assumed the size of targets was known in NRP to avoid introducing too much error.
Furthermore, we estimated the actual offset of the target pipe and generated control signals so that our snake robot was able to achieve pipe tracking, as shown in Figure 8. Figures 8A-D showed the final trajectory of the snake robot for tracking in V-REP, the shapes in red were the actual position of the target pipe. Then, Figures 8E-H showed the final trajectory of the snake FIGURE 7 | Relative position estimating in V-REP. For each case, the upper line shows the increase of the width of the target pipe in pixels and the lower line shows the distance between the head of the snake robot and the target pipe. In (A) and (C), the target pipe is on the left side of the snake robot, and the target pipe is on the right side in (B) and (D). robot for tracking in NRP. All the results demonstrated that our tracking framework based on SNN was valid and effective. The snake robot was able to find the target pipe and approach it by performing a series of motion, including turning left, turning right and go straight. The trajectories shown in Figure 8 were not representative of a smooth curve because of the swing of the snake robot, but the trend of motion is still correct. Another feature about the curves was that there were several obvious turning points in V-REP but not in NRP.
Finally, we compared the performance of the proposed SNNs on CPU and GPU, respectively. As shown in Table 1, the GPU was able to accelerate the detection procedure for each case, especially for lying pipe detection. Due to the three-dimensional parameter space for lying pipe detection, the GPU could achieve higher speedup than standing pipe detection. Moreover, in cases 3 and 4, the pipe detection could be performed in real time by using the GPU. In addition, the simulating experiments conduced in NRP showed higher frame rate than that of V-REP, especially when the GPU was utilized. The possible reason is that the V-REP spent more time in data transferring between the snake robot and the SNN.
CONCLUSION
In this work, we proposed a pipe-like object detecting and tracking approach by combining the DVS and SNNs, and successfully performed on a wheel-less snake robot. The target pipe was detected by dealing with the asynchronous address-event stream obtained from a DVS. Then, an autonomous tracking method was present according to the relative position between the snake robot and the target. Furthermore, the performances of the proposed SNNs were estimated on CPU and GPU. The experiments demonstrated the efficacy of our tracking approach based on SNNs and showed the practicality and accuracy of the autonomous tracking method.
Comparing the performances of our SNN model on CPUs and on GPUs, respectively, the SNN model showed the best performance on a GPU while is displayed the highest precision on a CPU. However, there are still some drawbacks to our approach. The prime one is that the performance of tracking is sensitive to the noise and the error in detection and position estimation. |
<filename>packages/dm_base/blueprints/base/routes.py
from flask import render_template, redirect, request, url_for
from jumpscale import j
from blueprints.base import blueprint
# from blueprints import login_manager
login_manager = j.servers.web.latest.loader.login_manager
# from blueprints import db
# from blueprints.base.models import User
# from flask_login import (
# current_user,
# login_required,
# login_user,
# logout_user
# )
# from .forms import LoginForm, CreateAccountForm
# @blueprint.route('/')
# def route_default():
# return redirect(url_for('base_blueprint.login'))
# @login_required
@blueprint.route('/githook', methods=['POST'])
def route_github():
d=request.json
name=d["repository"]['full_name']
#will be something like 'threefoldfoundation/info_foundation'
account,repo=name.split("/",1)
print("GITHUB:%s:%s"%(account,repo))
return ("OK")
@blueprint.route('/<template>')
def route_template(template):
if template=="favicon.ico":
return url_for('static',filename="images/favicon.png")
return render_template(template + '.html')
# @blueprint.route('/fixed_<template>')
# @login_required
# def route_fixed_template(template):
# return render_template('fixed/fixed_{}.html'.format(template))
# @blueprint.route('/page_<error>')
# def route_errors(error):
# return render_template('errors/page_{}.html'.format(error))
# ## Login & Registration
# @blueprint.route('/login', methods=['GET', 'POST'])
# def login():
# login_form = LoginForm(request.form)
# create_account_form = CreateAccountForm(request.form)
# if 'login' in request.form:
# username = str(request.form['username'])
# password = str(request.form['password'])
# user = User.query.filter_by(username=username).first()
# if user and password == user.password:
# login_user(user)
# return redirect(url_for('base_blueprint.route_default'))
# return render_template('errors/page_403.html')
# elif 'create_account' in request.form:
# login_form = LoginForm(request.form)
# user = User(**request.form)
# db.session.add(user)
# db.session.commit()
# return redirect(url_for('base_blueprint.login'))
# if not current_user.is_authenticated:
# return render_template(
# 'login/login.html',
# login_form=login_form,
# create_account_form=create_account_form
# )
# return redirect(url_for('home_blueprint.index'))
# @blueprint.route('/logout')
# @login_required
# def logout():
# logout_user()
# return redirect(url_for('base_blueprint.login'))
# @blueprint.route('/shutdown')
# def shutdown():
# func = request.environ.get('werkzeug.server.shutdown')
# if func is None:
# raise RuntimeError('Not running with the Werkzeug Server')
# func()
# return 'Server shutting down...'
# ## Errors
# @login_manager.unauthorized_handler
# def unauthorized_handler():
# return render_template('errors/page_403.html'), 403
@blueprint.errorhandler(403)
def access_forbidden(error):
return render_template('errors/page_403.html'), 403
@blueprint.errorhandler(404)
def not_found_error(error):
return render_template('errors/page_404.html'), 404
|
/**
* Content deserializer will try and use the map built up in the ContentMapper class to determine what subtype of
* content needs to be created.
*
* Currently this is dependent on the register map key being the exact same text as the json type property value stored
* in the database.
*
*/
public class TrimWhitespaceDeserializer extends JsonDeserializer<String> {
@Override
public String deserialize(final JsonParser jsonParser, final DeserializationContext deserializationContext)
throws IOException, JsonProcessingException, JsonMappingException {
return jsonParser.getText().trim();
}
} |
class PyModelState:
"""Represents the state of a model."""
Running = 0
Paused = 1
Resetting = 2
Idle = 3
Stopped = 4
Restoring = 5
Stopping = 6 |
package main
import (
"fmt"
"os"
"regexp"
"github.com/wspace/corpus/tools"
"golang.org/x/sys/execabs"
)
func main() {
projects, err := tools.ReadAllProjects()
try(err)
tools.SortProjectsByID(projects)
var badURLs []*tools.Project
for _, p := range projects {
if p.ID == "" || len(p.Source) == 0 {
continue
}
if _, err := os.Stat(p.ID); err == nil {
continue
}
repo := getGitURL(p.Source[0])
if repo == "" {
badURLs = append(badURLs, p)
continue
}
fmt.Printf("git submodule add %s %s\n", repo, p.ID)
cmd := execabs.Command("git", "submodule", "add", repo, p.ID)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
try(cmd.Run())
}
if len(badURLs) != 0 {
fmt.Println("First source not a recognized repo for:")
for _, p := range badURLs {
url := p.Source[0]
if label, err := tools.GetURLLabel(p.Source[0]); err == nil && label != "" {
url = label
}
fmt.Printf("- %s: %s\n", p.ID, url)
}
}
}
var (
github = regexp.MustCompile(`^https://(?:gist\.)?github\.com/[^/]+/[^/]+$`)
gitlab = regexp.MustCompile(`^https://gitlab\.com/[^/]+/[^/]+$`)
bitbucket = regexp.MustCompile(`^https://bitbucket\.org/[^/]+/[^/]+$`)
)
func getGitURL(url string) string {
if github.MatchString(url) || bitbucket.MatchString(url) {
return url
} else if gitlab.MatchString(url) {
return url + ".git"
}
return ""
}
func try(err error) {
if err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
}
|
/**
* Builds a UIDMeta from the current row in the passed result set
* @param rset The result set to read from
* @param type THe UIDMeta type to build
* @return the built UIDMeta
*/
private UIDMeta buildUIDMeta(final ResultSet rset, final UniqueIdType type) {
try {
UIDMeta meta = new UIDMeta(type, UniqueId.stringToUid(rset.getString("XUID")), rset.getString("NAME"));
meta.setCreated(mstou(rset.getTimestamp("CREATED").getTime()));
String mapStr = rset.getString("CUSTOM");
if(mapStr!=null) {
meta.setCustom((HashMap<String, String>) readMap(mapStr));
}
meta.setDescription(rset.getString("DESCRIPTION"));
meta.setNotes(rset.getString("NOTES"));
meta.setDisplayName(rset.getString("DISPLAY_NAME"));
return meta;
} catch (Exception ex) {
throw new RuntimeException("Failed to build UIDMeta of type [" + type + "]", ex);
}
} |
def _thunkByte(c, mask=0xff, shift=0):
val = c & mask
if shift < 0:
val = val >> abs(shift)
elif shift > 0:
val = val << shift
return val |
// String returns the textual representation of the game phase, or "Unknown" if
// it is not a valid phase.
func (p Phase) String() string {
switch p {
case SetupPhase:
return "Setup"
case MainPhase:
return "Main"
case EndPhase:
return "End"
default:
return "Unknown"
}
} |
<filename>NeuralNet.cpp<gh_stars>0
#include "NeuralNet.h" |
from os.path import dirname, abspath
ROOT = dirname(dirname(abspath(__file__))) |
/**
* A class to handle arbitrary ad exchange info and write it to file
* @author Ben M. Faul
*
*/
public class Inspector extends BidRequest {
static String fileName = null;
static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd-HH:mm");
public Inspector() {
super();
parseSpecial();
}
/**
* Make bid request using a String.
* @param in String. The JSON bid request for Epom
* @throws Exception on JSON errors.
*/
public Inspector(String in) throws Exception {
super(in);
parseSpecial();
}
/**
* Make a bid request using an input stream.
* @param inputStream InputStream. The contents of a HTTP post.
* @throws Exception on JSON errors.
*/
public Inspector(InputStream inputStream) throws Exception {
setExchange( "inspector" );
id = "";
blackListed = true;
if (fileName == null) {
fileName = "inspector-" + sdf.format(new Date());
}
final int bufferSize = 1024;
final char[] buffer = new char[bufferSize];
final StringBuilder out = new StringBuilder();
Reader in = new InputStreamReader(inputStream, "UTF-8");
for (; ; ) {
int rsz = in.read(buffer, 0, buffer.length);
if (rsz < 0)
break;
out.append(buffer, 0, rsz);
}
String content = out.toString();
StringBuilder report = new StringBuilder();
try {
BidRequest br = new BidRequest(new StringBuilder(content));
br.blackListed = true;
br.setExchange( "inspector" );
String good = br.toString();
CampaignSelector.getInstance().getMaxConnections(br);
report.append("GOOD, DATA: ");
report.append(good);
} catch (Exception error) {
error.printStackTrace();
report.append("BAD, REASON: ");
report.append(error.toString());
report.append(", DATA: ");
report.append(content);
}
report.append("\n");
AppendToFile.item(fileName, report);
}
/**
* Process special AdMedia stuff, sets the exchange name. Setss encoding.
*/
@Override
public boolean parseSpecial() {
setExchange( "inspector" );
usesEncodedAdm = false;
return true;
}
/**
* Create a new object from this class instance.
* @throws JsonProcessingException on parse errors.
* @throws Exception on stream reading errors
*/
@Override
public Inspector copy(InputStream in) throws Exception {
return new Inspector(in);
}
} |
Barrett's Esophagus on Repeat Endoscopy: Should We Look More Than Once?
BACKGROUND:Barrett's esophagus (BE) is the precursor lesion for esophageal adenocarcinoma. The major risk factor for BE is chronic gastroesophageal reflux disease (GERD). Screening patients with longstanding GERD for BE with upper esophagogastroduodenoscopy (EGD) has become the standard practice, and guidelines from national gastrointestinal (GI) societies recommend only a single screening EGD because of limited evidence, suggesting that BE develops early in the course of GERD. We hypothesized that BE may be present in patients in whom initial endoscopy was negative, either due to a missed diagnosis or due to the later development of BE.AIM:The purpose of the study was to determine how often BE is identified on repeat endoscopy performed after an initial negative examination.METHODS:The Clinical Outcomes Research Initiative (CORI) National Endoscopic Database was searched for all patients who had more than one EGD during the 5-yr period between January 1, 2000, and December 31, 2004. Patients who had either procedure for an indication of surveillance of BE were excluded. The primary outcome was a finding of newly suspected BE on repeat examination after an initial negative examination.RESULTS:In total, 24,406 patients underwent more than one endoscopy during the study period. Five hundred sixty-one (2.3%) were found to have suspected BE on repeat EGD following an initial negative examination. More men than women had BE on repeat examination (3.1% vs 1.2%, P < 0.0001). BE on repeat examination was more common in patients with reflux as an indication for endoscopy than in patients with any other indication (5% vs 1.6%, P≤ 0.0001). In reflux patients with esophagitis on initial examination, 9.9% were found to have suspected BE on repeat examination versus 1.8% of reflux patients with no esophagitis on initial examination (P < 0.0001).CONCLUSIONS:BE is rarely found on second endoscopy performed less than 5 yr after an initial negative examination except in patients with esophagitis on the first endoscopy. Repeat EGD for Barrett's screening should not be performed out of concern for a missed diagnosis except when BE may have been obscured by overlying esophageal inflammation. |
#include <stdio.h>
#include <ctype.h>
#include <string.h>
int main()
{
char word[101];
int i,j,k,flag=0;
scanf("%s",word);
if(!isupper(word[0])) //first letter small and rest caps
{for(i=1;word[i]!='\0';i++)
if(!isupper(word[i]))
flag++;
if(flag==0)
{word[0]=toupper(word[0]);
for(j=1;word[j]!='\0';j++)
word[j]=tolower(word[j]);
}
}
else //First letter Caps and the rest as well
{for(i=1;word[i]!='\0';i++)
if(!isupper(word[i]))
flag++;
if(flag==0)
for(j=0;word[j]!='\0';j++)
word[j]=tolower(word[j]);
}
printf("%s",word);
return 0;
}
|
/**
* Creates two lists, one containing the booked tours,
* another containing the dates of the bookings.
*
* Fetch all tourIDs from the table 'booking' in the datebase.
* If text input is exactly the same as the tourID,
* create a now booking object with the information store in the database.
* Add the booking object into bookingList, and the date to bookingDateList.
*
* @throws Exception throw for postgresql exception
*/
public void createBookingDateList() throws Exception{
if (selectedTour == null) return;
String text = selectedTour.getID().toLowerCase();
this.connection = this.getConnection();
bookingList = new ArrayList<Booking>();
bookingDateList = new ArrayList<String>();
try {
PreparedStatement stmt = connection.prepareStatement(
"SELECT * FROM booking "
);
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
String tourID = rs.getString("tourId").toLowerCase();
if (!(text.equals(tourID))) continue;
Booking booking = new Booking(
rs.getString("id"),
selectedTour,
null,
rs.getString("hotel"),
rs.getInt("capacity"),
rs.getInt("miniCustomer"),
rs.getInt("currentCustomer")
);
booking.setDateString(rs.getString("dates"));
booking.getTourGuide().setName(rs.getString("tourGuide"));
booking.getTourGuide().setLineAcc(rs.getString("lineAcc"));
if (!matchByWeekDayOnly(tourID, booking.dateToDay())) continue;
bookingList.add(booking);
bookingDateList.add(rs.getString("dates"));
}
rs.close();
stmt.close();
}
catch (Exception e){
System.out.println("searchTour()" + e);
}
connection.close();
} |
// As a special case, the JVMS passed to this CallGenerator is
// for the method execution already in progress, not just the JVMS
// of the caller. Thus, this CallGenerator cannot be mixed with others!
CallGenerator* CallGenerator::for_osr(ciMethod* m, int osr_bci) {
if (!ParseGenerator::can_parse(m, true)) return NULL;
float past_uses = m->interpreter_invocation_count();
float expected_uses = past_uses;
return new ParseGenerator(m, expected_uses, true);
} |
Remember Matt Miazga? He's the New York Red Bulls' homegrown center back, one of just two players in that position to survive the off-season cull of the 2014 squad. (The other is Damien Perrinelle. Armando is, we must assume, still a Red Bull merely by technicality - he has a contract, but that largely seems to be in the service of an entry visa to the US and a gym membership. RBNY has shown no interest in getting him on the field so far in preseason.)
Miazga has had a busy off-season. From December 3-10, he was in a fitness camp as part of the US U-20 team's preparations for the CONCACAF U-20 Championship. Then he had a week in Honduras with the U-20 MNT before Christmas. In late December, he rolled into the final US U-20 training camp before the big show in Jamaica, and then he was a regular starter for the team at the tournament, which ran from January 9-24.
That's pretty much the entire off-season either in training or in competition. To top it off, Papa Red Bull arranged for Miazga to fly out to Qatar as soon as he was relieved of his national team obligations for a trial training stint (it was a trial) that ended roughly two days after he arrived. By February 3, he had joined RBNY's training camp in Orlando, almost two months to the day since he started his mid-winter world tour.
Meanwhile, the Red Bulls signed a whole bunch of center backs and got on with preseason. The team played a closed-door friendly with the University of South Florida before Miazga returned. On February 5, the squad squeezed in two scrimmages - no Miazga.
No great reason for concern: he basically didn't have an off-season. If anything, it was a surprise Miazga was in camp at all in early February. One would have thought he needed a rest more than another training session.
But now he's been back for around three weeks. RBNY has been back home, and is now back in Florida. The team has played two more friendlies - against HB Koge and Oklahoma City Energy - without Miazga. Still no reason to panic: he could surely use an extended rest.
Except, he's not missing these games because he's resting, it would seem. Miazga's Qatar excursion was cut short because he picked up a knock in a scrimmage between RB Leipzig and Papa's Salzburg. The club went out of its way to make sure we understood the injury was not serious. Of course, evaluating an injury sustained in the Middle East from New Jersey isn't recommended medical practice, but it is said there have been great advances in telemetry.
When Miazga arrived in Florida in early February, that not-at-all-serious knock was described as a "right quad contusion", and the club's report stated it was hoped he'd back on the field in a week.
It has been more than a week since February 3. On February 20, RBNY's training report included reference to Miazga "beginning to train again". He's not even been training?
He needed a rest, so maybe he should have missed the bulk of the team's preseason work anyway. The Red Bulls have two friendlies left before, in theory, they wrap up training camp and head back to Harrison to get started on the regular season.
If Miazga doesn't play in either of those games, there would seem to be only one conclusion: a couple of days in Qatar wrecked his preseason. Papa Red Bull yanked a young player halfway round the world, broke him, and then sent him back to RBNY to get fixed. The injury may not have been serious, but missing out on the chance to challenge for a starting role right out of the gate surely is for a highly-rated (though presumably exhausted - he's had no off-season) young player.
The start of the regular season may yet be delayed due to MLS's ongoing labor negotiations. That scenario would reduce the significance of RB Leipzig's decision to break but not buy Matt Miazga. But it ought not to be permitted to let the situation escape notice. |
def index(self, word):
if word in self._vocabulary:
return self._vocabulary.index(word)
return self._vocabulary.index(self._unk) |
I really love PC games. I’ve been playing them since childhood and I hope I’ll keep up till I’m old and grumpy ☺ One of the games I’m currently playing is Starcraft 2. It is a Real Time Strategy where you play one of 3 different races, manage your economy and command wast armies. Some people call it a modern day chess and they are in some sense right. The game requires a lot of strategy, fast-thinking, quick hands and training. Since the release of the first version of the game there are being hold tournaments where professional players are fighting for glory and sizeable money prizes. For example in latest tournament Intel Extreme Masters Katowice the winner took a staggering 100 000 $ . Esports is becoming a big industry with large teams, managers, sponsors and famous players. Many young people want to become gaming athletes and compete with the best. I would like to explore some of the fascinating data concerning Starcraft 2. If you already play Starcraft 2 you might find the second part of my post interesting.
The most important part are the players. Below we can see a little infographic showing the earings of the very best progamers. The data is available here. |
<reponame>diable201/WEB-development
export const categories = [
{
id: 1,
name: 'Processors'
},
{
id: 2,
name: 'Videocards'
},
{
id: 3,
name: 'HDD, SSD'
},
{
id: 4,
name: 'RAM'
},
{
id: 5,
name: 'Other'
}
];
|
/**
* Performs the second pass of the assembly procedure, which actually assembles
* the instructions and operands in the program.
* @param in Pointer to the file containing the code. It should already
* have been opened in "r" mode and should point to the begin-
* ning of the file.
* @param out Pointer to the output file, where the corresponding machine
* code should be saved. It should have already been opened in
* "w" mode, and should point to the beginning of the file.
* @param idt Instruction data table, created by idtCreate ("idt.h").
* @param sym_table Symbol table obtained in the first pass.
* @return 0 if everything went alright.
*/
int asmReplaceAndSave(FILE *in, FILE *out, Map *idt, Map *sym_table,
output_mode om){
Line l;
Instr ins;
char buf[BUF_LEN + 1];
AsmData ad = {.ilc = 0, .out = out};
while (fgets(buf, BUF_LEN, in)){
parseLine(buf, &l);
if (!l.instr)
continue;
int get_status = mapGet(idt, l.instr, &ins);
if (get_status == 1){
fprintf(stderr, "Invalid instruction: %s "
"(ILC: %d)\n", l.instr, ad.ilc);
exit(1);
}
if (ins.type == ins_t_real){
fprintf(out, "%d\n", ins.data.real.opcode);
ad.ilc++;
assembleOperand(l.op1, ins.data.real.op1, &ad, sym_table, om);
assembleOperand(l.op2, ins.data.real.op2, &ad, sym_table, om);
} else {
int call_status;
if (ins.data.pseudo.num_ops == 0){
call_status = ins.data.pseudo.call.zero(&ad);
} else if (ins.data.pseudo.num_ops == 1){
call_status = ins.data.pseudo.call.one(&ad, l.op1);
} else if (ins.data.pseudo.num_ops == 2){
call_status = ins.data.pseudo.call.two(&ad, l.op1, l.op2);
}
ad.ilc += ins.data.pseudo.ilc_inc;
if (call_status == INTERRUPT_SIGNAL)
break;
}
}
return 0;
} |
<filename>src/Euler131.hs
{--
https://projecteuler.net/problem=131
I referred to SMQ's comments.
There is no way to solve this problem solving method.
1)
n ^ 3 + n ^ 2 * p = n * n * (n + p)
(p is prime)
In order for n * n * (n + p) is cubic number, n and (n + p) must be themselves cubic number.
Since (n + p) - n = p, so p is difference of cubic numbers.
2)
a ^ 3 - b ^ 3 = (a - b)(a ^ 2 + a * b + b ^ 2)
Since a ^ 3 - b ^ 3 must be prime, the difference a and b must be 1.
If the difference a and b is larger than 1, (a - b) is larger than 1. This means that (a ^ 3 - b ^ 3) is multiple of (a - b).
3)
Since 1) and 2), a and b must be consective.
--}
module Euler131 (e131Solve) where
import MillerRabin (isPrime)
e131Solve :: Int
e131Solve = length . takeWhile (< limit) . filter isPrime . map diffConsecutiveCubes $ [1..]
diffConsecutiveCubes :: Integral a => a -> a
diffConsecutiveCubes n = (n + 1) ^ 3 - n ^ 3
limit :: Integral a => a
limit = 10 ^ 6
|
def IPv4ToID(x):
if six.PY2:
id = int(ipaddress.IPv4Address(x.decode('utf-8')))
else:
id = int(ipaddress.IPv4Address(x))
return id |
# cook your dish here
from math import ceil
for _ in range(int(input())):
n,k=[int(x) for x in input().split()]
a=[int(x) for x in input().split()]
s=a[0]
res=0
for i in range(1,n):
eff=(a[i]/s)
if eff>(k/100):
change=((100*a[i])-(s*k))/k
change=ceil(change)
#print(change)
s+=change
res+=change
s+=a[i]
print(res) |
// Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.skyframe;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import static com.google.devtools.build.lib.actions.FilesetTraversalParams.PackageBoundaryMode.CROSS;
import static com.google.devtools.build.lib.actions.FilesetTraversalParams.PackageBoundaryMode.DONT_CROSS;
import static com.google.devtools.build.lib.actions.FilesetTraversalParams.PackageBoundaryMode.REPORT_ERROR;
import static com.google.devtools.build.lib.skyframe.RecursiveFilesystemTraversalValue.ResolvedFileFactory.danglingSymlink;
import static com.google.devtools.build.lib.skyframe.RecursiveFilesystemTraversalValue.ResolvedFileFactory.regularFile;
import static com.google.devtools.build.lib.skyframe.RecursiveFilesystemTraversalValue.ResolvedFileFactory.symlinkToDirectory;
import static com.google.devtools.build.lib.skyframe.RecursiveFilesystemTraversalValue.ResolvedFileFactory.symlinkToFile;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThrows;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import com.google.devtools.build.lib.actions.ActionLookupData;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.Artifact.SpecialArtifact;
import com.google.devtools.build.lib.actions.Artifact.TreeFileArtifact;
import com.google.devtools.build.lib.actions.ArtifactRoot;
import com.google.devtools.build.lib.actions.ArtifactRoot.RootType;
import com.google.devtools.build.lib.actions.FileArtifactValue;
import com.google.devtools.build.lib.actions.FileContentsProxy;
import com.google.devtools.build.lib.actions.FileStateValue;
import com.google.devtools.build.lib.actions.FileStateValue.RegularFileStateValue;
import com.google.devtools.build.lib.actions.FileValue;
import com.google.devtools.build.lib.actions.FilesetTraversalParams.DirectTraversalRoot;
import com.google.devtools.build.lib.actions.FilesetTraversalParams.PackageBoundaryMode;
import com.google.devtools.build.lib.actions.HasDigest;
import com.google.devtools.build.lib.actions.util.ActionsTestUtil;
import com.google.devtools.build.lib.analysis.BlazeDirectories;
import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider;
import com.google.devtools.build.lib.analysis.ServerDirectories;
import com.google.devtools.build.lib.analysis.util.AnalysisMock;
import com.google.devtools.build.lib.cmdline.PackageIdentifier;
import com.google.devtools.build.lib.events.NullEventHandler;
import com.google.devtools.build.lib.packages.WorkspaceFileValue;
import com.google.devtools.build.lib.pkgcache.PathPackageLocator;
import com.google.devtools.build.lib.skyframe.ExternalFilesHelper.ExternalFileAction;
import com.google.devtools.build.lib.skyframe.PackageLookupFunction.CrossRepositoryLabelViolationStrategy;
import com.google.devtools.build.lib.skyframe.RecursiveFilesystemTraversalFunction.DanglingSymlinkException;
import com.google.devtools.build.lib.skyframe.RecursiveFilesystemTraversalFunction.RecursiveFilesystemTraversalException;
import com.google.devtools.build.lib.skyframe.RecursiveFilesystemTraversalValue.ResolvedFile;
import com.google.devtools.build.lib.skyframe.RecursiveFilesystemTraversalValue.TraversalRequest;
import com.google.devtools.build.lib.testutil.FoundationTestCase;
import com.google.devtools.build.lib.testutil.TimestampGranularityUtils;
import com.google.devtools.build.lib.util.io.OutErr;
import com.google.devtools.build.lib.vfs.FileStatus;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.vfs.Root;
import com.google.devtools.build.lib.vfs.RootedPath;
import com.google.devtools.build.lib.vfs.UnixGlob;
import com.google.devtools.build.skyframe.AbstractSkyKey;
import com.google.devtools.build.skyframe.ErrorInfo;
import com.google.devtools.build.skyframe.EvaluationContext;
import com.google.devtools.build.skyframe.EvaluationProgressReceiver;
import com.google.devtools.build.skyframe.EvaluationResult;
import com.google.devtools.build.skyframe.InMemoryMemoizingEvaluator;
import com.google.devtools.build.skyframe.MemoizingEvaluator;
import com.google.devtools.build.skyframe.RecordingDifferencer;
import com.google.devtools.build.skyframe.SequencedRecordingDifferencer;
import com.google.devtools.build.skyframe.SequentialBuildDriver;
import com.google.devtools.build.skyframe.SkyFunction;
import com.google.devtools.build.skyframe.SkyFunctionException;
import com.google.devtools.build.skyframe.SkyFunctionException.Transience;
import com.google.devtools.build.skyframe.SkyFunctionName;
import com.google.devtools.build.skyframe.SkyKey;
import com.google.devtools.build.skyframe.SkyValue;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Supplier;
import javax.annotation.Nullable;
import net.starlark.java.eval.StarlarkSemantics;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests for {@link RecursiveFilesystemTraversalFunction}. */
@RunWith(JUnit4.class)
public final class RecursiveFilesystemTraversalFunctionTest extends FoundationTestCase {
private static final HasDigest EMPTY_METADATA = HasDigest.EMPTY;
private RecordingEvaluationProgressReceiver progressReceiver;
private MemoizingEvaluator evaluator;
private SequentialBuildDriver driver;
private RecordingDifferencer differencer;
private AtomicReference<PathPackageLocator> pkgLocator;
private NonHermeticArtifactFakeFunction artifactFunction;
private List<Artifact.DerivedArtifact> artifacts;
@Before
public final void setUp() {
artifacts = new ArrayList<>();
AnalysisMock analysisMock = AnalysisMock.get();
pkgLocator =
new AtomicReference<>(
new PathPackageLocator(
outputBase,
ImmutableList.of(Root.fromPath(rootDirectory)),
BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY));
AtomicReference<ImmutableSet<PackageIdentifier>> deletedPackages =
new AtomicReference<>(ImmutableSet.<PackageIdentifier>of());
BlazeDirectories directories =
new BlazeDirectories(
new ServerDirectories(rootDirectory, outputBase, rootDirectory),
rootDirectory,
null,
analysisMock.getProductName());
ExternalFilesHelper externalFilesHelper = ExternalFilesHelper.createForTesting(
pkgLocator, ExternalFileAction.DEPEND_ON_EXTERNAL_PKG_FOR_EXTERNAL_REPO_PATHS, directories);
ConfiguredRuleClassProvider ruleClassProvider = analysisMock.createRuleClassProvider();
Map<SkyFunctionName, SkyFunction> skyFunctions = new HashMap<>();
skyFunctions.put(
FileStateValue.FILE_STATE,
new FileStateFunction(
new AtomicReference<>(),
new AtomicReference<>(UnixGlob.DEFAULT_SYSCALLS),
externalFilesHelper));
skyFunctions.put(FileValue.FILE, new FileFunction(pkgLocator));
skyFunctions.put(SkyFunctions.DIRECTORY_LISTING, new DirectoryListingFunction());
skyFunctions.put(
SkyFunctions.DIRECTORY_LISTING_STATE,
new DirectoryListingStateFunction(
externalFilesHelper, new AtomicReference<>(UnixGlob.DEFAULT_SYSCALLS)));
skyFunctions.put(
SkyFunctions.RECURSIVE_FILESYSTEM_TRAVERSAL, new RecursiveFilesystemTraversalFunction());
skyFunctions.put(
SkyFunctions.PACKAGE_LOOKUP,
new PackageLookupFunction(
deletedPackages,
CrossRepositoryLabelViolationStrategy.ERROR,
BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY,
BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER));
skyFunctions.put(
SkyFunctions.IGNORED_PACKAGE_PREFIXES,
new IgnoredPackagePrefixesFunction(
/*ignoredPackagePrefixesFile=*/ PathFragment.EMPTY_FRAGMENT));
skyFunctions.put(
SkyFunctions.PACKAGE, new PackageFunction(null, null, null, null, null, null, null, null));
skyFunctions.put(
WorkspaceFileValue.WORKSPACE_FILE,
new WorkspaceFileFunction(
ruleClassProvider,
analysisMock
.getPackageFactoryBuilderForTesting(directories)
.build(ruleClassProvider, fileSystem),
directories,
/*bzlLoadFunctionForInlining=*/ null));
skyFunctions.put(
SkyFunctions.EXTERNAL_PACKAGE,
new ExternalPackageFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER));
skyFunctions.put(
SkyFunctions.LOCAL_REPOSITORY_LOOKUP,
new LocalRepositoryLookupFunction(BazelSkyframeExecutorConstants.EXTERNAL_PACKAGE_HELPER));
skyFunctions.put(
SkyFunctions.FILE_SYMLINK_INFINITE_EXPANSION_UNIQUENESS,
new FileSymlinkInfiniteExpansionUniquenessFunction());
skyFunctions.put(
SkyFunctions.FILE_SYMLINK_CYCLE_UNIQUENESS, new FileSymlinkCycleUniquenessFunction());
// We use a non-hermetic key to allow us to invalidate the proper artifacts on rebuilds. We
// could have the artifact depend on the corresponding FileValue, but that would not cover the
// case of a generated directory, which we have test coverage for.
skyFunctions.put(Artifact.ARTIFACT, new ArtifactFakeFunction());
artifactFunction = new NonHermeticArtifactFakeFunction();
skyFunctions.put(SkyFunctions.ACTION_EXECUTION, new ActionFakeFunction());
skyFunctions.put(NONHERMETIC_ARTIFACT, artifactFunction);
progressReceiver = new RecordingEvaluationProgressReceiver();
differencer = new SequencedRecordingDifferencer();
evaluator = new InMemoryMemoizingEvaluator(skyFunctions, differencer, progressReceiver);
driver = new SequentialBuildDriver(evaluator);
PrecomputedValue.BUILD_ID.set(differencer, UUID.randomUUID());
PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get());
PrecomputedValue.STARLARK_SEMANTICS.set(differencer, StarlarkSemantics.DEFAULT);
}
private Artifact sourceArtifact(String path) {
return ActionsTestUtil.createArtifact(
ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory)), path);
}
private Artifact sourceArtifactUnderPackagePath(String path, String packagePath) {
return ActionsTestUtil.createArtifact(
ArtifactRoot.asSourceRoot(Root.fromPath(rootDirectory.getRelative(packagePath))), path);
}
private SpecialArtifact treeArtifact(String path) {
return ActionsTestUtil.createTreeArtifactWithGeneratingAction(
ArtifactRoot.asDerivedRoot(rootDirectory, RootType.Output, "out"),
PathFragment.create("out/" + path));
}
private void addNewTreeFileArtifact(SpecialArtifact parent, String relatedPath)
throws IOException {
TreeFileArtifact treeFileArtifact = TreeFileArtifact.createTreeOutput(parent, relatedPath);
artifactFunction.addNewTreeFileArtifact(treeFileArtifact);
}
private Artifact derivedArtifact(String path) {
PathFragment execPath = PathFragment.create("out").getRelative(path);
Artifact.DerivedArtifact result =
(Artifact.DerivedArtifact)
ActionsTestUtil.createArtifactWithExecPath(
ArtifactRoot.asDerivedRoot(rootDirectory, RootType.Output, "out"), execPath);
result.setGeneratingActionKey(
ActionLookupData.create(ActionsTestUtil.NULL_ARTIFACT_OWNER, artifacts.size()));
artifacts.add(result);
return result;
}
private static RootedPath rootedPath(Artifact artifact) {
return RootedPath.toRootedPath(artifact.getRoot().getRoot(), artifact.getRootRelativePath());
}
private RootedPath rootedPath(String path, String packagePath) {
return RootedPath.toRootedPath(
Root.fromPath(rootDirectory.getRelative(packagePath)), PathFragment.create(path));
}
private static RootedPath childOf(Artifact artifact, String relative) {
return RootedPath.toRootedPath(
artifact.getRoot().getRoot(), artifact.getRootRelativePath().getRelative(relative));
}
private static RootedPath childOf(RootedPath path, String relative) {
return RootedPath.toRootedPath(
path.getRoot(), path.getRootRelativePath().getRelative(relative));
}
private static RootedPath parentOf(RootedPath path) {
return Preconditions.checkNotNull(path.getParentDirectory());
}
private static RootedPath siblingOf(RootedPath path, String relative) {
PathFragment parent =
Preconditions.checkNotNull(path.getRootRelativePath().getParentDirectory());
return RootedPath.toRootedPath(path.getRoot(), parent.getRelative(relative));
}
private static RootedPath siblingOf(Artifact artifact, String relative) {
PathFragment parent =
Preconditions.checkNotNull(artifact.getRootRelativePath().getParentDirectory());
return RootedPath.toRootedPath(artifact.getRoot().getRoot(), parent.getRelative(relative));
}
private void createFile(Path path, String... contents) throws Exception {
if (!path.getParentDirectory().exists()) {
scratch.dir(path.getParentDirectory().getPathString());
}
scratch.file(path.getPathString(), contents);
}
private void createFile(Artifact artifact, String... contents) throws Exception {
createFile(artifact.getPath(), contents);
}
private RootedPath createFile(RootedPath path, String... contents) throws Exception {
scratch.dir(parentOf(path).asPath().getPathString());
createFile(path.asPath(), contents);
return path;
}
private static TraversalRequest fileLikeRoot(Artifact file, PackageBoundaryMode pkgBoundaryMode,
boolean strictOutput) {
return TraversalRequest.create(
DirectTraversalRoot.forFileOrDirectory(file),
!file.isSourceArtifact(),
pkgBoundaryMode,
strictOutput, false,
null);
}
private static TraversalRequest fileLikeRoot(Artifact file, PackageBoundaryMode pkgBoundaryMode) {
return fileLikeRoot(file, pkgBoundaryMode, false);
}
private static TraversalRequest pkgRoot(
RootedPath pkgDirectory, PackageBoundaryMode pkgBoundaryMode) {
return TraversalRequest.create(
DirectTraversalRoot.forRootedPath(pkgDirectory), false, pkgBoundaryMode,
false, true, null);
}
private <T extends SkyValue> EvaluationResult<T> eval(SkyKey key) throws Exception {
EvaluationContext evaluationContext =
EvaluationContext.newBuilder()
.setKeepGoing(false)
.setNumThreads(SkyframeExecutor.DEFAULT_THREAD_COUNT)
.setEventHandler(NullEventHandler.INSTANCE)
.build();
return driver.evaluate(ImmutableList.of(key), evaluationContext);
}
private RecursiveFilesystemTraversalValue evalTraversalRequest(TraversalRequest params)
throws Exception {
EvaluationResult<RecursiveFilesystemTraversalValue> result = eval(params);
assertThat(result.hasError()).isFalse();
return result.get(params);
}
/**
* Asserts that the requested SkyValue can be built and results in the expected set of files.
*
* <p>The metadata of files is ignored in comparing the actual results with the expected ones.
* The returned object however contains the actual metadata.
*/
@SafeVarargs
private final RecursiveFilesystemTraversalValue traverseAndAssertFiles(
TraversalRequest params, ResolvedFile... expectedFilesIgnoringMetadata) throws Exception {
RecursiveFilesystemTraversalValue result = evalTraversalRequest(params);
Map<PathFragment, ResolvedFile> nameToActualResolvedFiles = new HashMap<>();
for (ResolvedFile act : result.getTransitiveFiles().toList()) {
// We can't compare directly, since metadata would be different, so we compare
// by comparing the results of public method calls..
nameToActualResolvedFiles.put(act.getNameInSymlinkTree(), act);
}
assertExpectedResolvedFilesPresent(nameToActualResolvedFiles, expectedFilesIgnoringMetadata);
return result;
}
@SafeVarargs
private static final void assertExpectedResolvedFilesPresent(
Map<PathFragment, ResolvedFile> nameToActualResolvedFiles,
ResolvedFile... expectedFilesIgnoringMetadata)
throws Exception {
assertWithMessage("Expected files " + Arrays.toString(expectedFilesIgnoringMetadata))
.that(nameToActualResolvedFiles)
.hasSize(expectedFilesIgnoringMetadata.length);
assertEquals(
"Unequal number of ResolvedFiles in Actual and expected.",
expectedFilesIgnoringMetadata.length,
nameToActualResolvedFiles.size());
for (ResolvedFile expected : expectedFilesIgnoringMetadata) {
ResolvedFile actual = nameToActualResolvedFiles.get(expected.getNameInSymlinkTree());
assertEquals(expected.getType(), actual.getType());
assertEquals(expected.getPath(), actual.getPath());
assertEquals(expected.getTargetInSymlinkTree(false), actual.getTargetInSymlinkTree(false));
try {
expected.getTargetInSymlinkTree(true);
// No exception thrown, let's safely compare results.
assertEquals(expected.getTargetInSymlinkTree(true), actual.getTargetInSymlinkTree(true));
} catch (DanglingSymlinkException e) {
assertThrows(
"Expected exception not thrown while requesting resolved symlink.",
DanglingSymlinkException.class,
() -> actual.getTargetInSymlinkTree(true));
}
}
}
private void appendToFile(RootedPath rootedPath, SkyKey toInvalidate, String content)
throws Exception {
Path path = rootedPath.asPath();
if (path.exists()) {
try (OutputStream os = path.getOutputStream(/*append=*/ true)) {
os.write(content.getBytes(StandardCharsets.UTF_8));
}
differencer.invalidate(ImmutableList.of(toInvalidate));
} else {
createFile(path, content);
}
}
private void appendToFile(RootedPath rootedPath, String content) throws Exception {
appendToFile(rootedPath, FileStateValue.key(rootedPath), content);
}
private void appendToFile(Artifact file, String content) throws Exception {
SkyKey key =
file.isSourceArtifact()
? FileStateValue.key(rootedPath(file))
: new NonHermeticArtifactSkyKey(file);
appendToFile(rootedPath(file), key, content);
}
private void invalidateDirectory(RootedPath path) {
differencer.invalidate(ImmutableList.of(DirectoryListingStateValue.key(path)));
}
private void invalidateDirectory(Artifact directoryArtifact) {
invalidateDirectory(rootedPath(directoryArtifact));
}
private void invalidateOutputArtifact(Artifact output) {
assertThat(output.isSourceArtifact()).isFalse();
differencer.invalidate(ImmutableList.of(new NonHermeticArtifactSkyKey(output)));
}
private static final class RecordingEvaluationProgressReceiver
extends EvaluationProgressReceiver.NullEvaluationProgressReceiver {
Set<SkyKey> invalidations;
Set<SkyKey> evaluations;
RecordingEvaluationProgressReceiver() {
clear();
}
void clear() {
invalidations = Sets.newConcurrentHashSet();
evaluations = Sets.newConcurrentHashSet();
}
@Override
public void invalidated(SkyKey skyKey, InvalidationState state) {
invalidations.add(skyKey);
}
@Override
public void evaluated(
SkyKey skyKey,
@Nullable SkyValue value,
Supplier<EvaluationSuccessState> evaluationSuccessState,
EvaluationState state) {
if (evaluationSuccessState.get().succeeded()) {
evaluations.add(skyKey);
}
}
}
private static void assertTraversalRootHashesAre(
boolean equal, RecursiveFilesystemTraversalValue a, RecursiveFilesystemTraversalValue b)
throws Exception {
if (equal) {
assertThat(a.getResolvedRoot().get().hashCode())
.isEqualTo(b.getResolvedRoot().get().hashCode());
} else {
assertThat(a.getResolvedRoot().get().hashCode())
.isNotEqualTo(b.getResolvedRoot().get().hashCode());
}
}
private static void assertTraversalRootHashesAreEqual(
RecursiveFilesystemTraversalValue a, RecursiveFilesystemTraversalValue b) throws Exception {
assertTraversalRootHashesAre(true, a, b);
}
private static void assertTraversalRootHashesAreNotEqual(
RecursiveFilesystemTraversalValue a, RecursiveFilesystemTraversalValue b) throws Exception {
assertTraversalRootHashesAre(false, a, b);
}
private void assertTraversalOfFile(Artifact rootArtifact, boolean strictOutput) throws Exception {
TraversalRequest traversalRoot = fileLikeRoot(rootArtifact, DONT_CROSS, strictOutput);
RootedPath rootedPath = createFile(rootedPath(rootArtifact), "foo");
// Assert that the SkyValue is built and looks right.
ResolvedFile expected = regularFile(rootedPath, EMPTY_METADATA);
RecursiveFilesystemTraversalValue v1 = traverseAndAssertFiles(traversalRoot, expected);
assertThat(progressReceiver.invalidations).isEmpty();
assertThat(progressReceiver.evaluations).contains(traversalRoot);
progressReceiver.clear();
// Edit the file and verify that the value is rebuilt.
appendToFile(rootArtifact, "bar");
RecursiveFilesystemTraversalValue v2 = traverseAndAssertFiles(traversalRoot, expected);
assertThat(progressReceiver.invalidations).contains(traversalRoot);
assertThat(progressReceiver.evaluations).contains(traversalRoot);
assertThat(v2).isNotEqualTo(v1);
assertTraversalRootHashesAreNotEqual(v1, v2);
progressReceiver.clear();
}
@Test
public void testTraversalOfSourceFile() throws Exception {
assertTraversalOfFile(sourceArtifact("foo/bar.txt"), false);
}
@Test
public void testTraversalOfGeneratedFile() throws Exception {
assertTraversalOfFile(derivedArtifact("foo/bar.txt"), false);
}
@Test
public void testTraversalOfGeneratedFileWithStrictOutput() throws Exception {
assertTraversalOfFile(derivedArtifact("foo/bar.txt"), true);
}
@Test
public void testTraversalOfSymlinkToFile() throws Exception {
Artifact linkNameArtifact = sourceArtifact("foo/baz/qux.sym");
Artifact linkTargetArtifact = sourceArtifact("foo/bar/baz.txt");
PathFragment linkValue = PathFragment.create("../bar/baz.txt");
TraversalRequest traversalRoot = fileLikeRoot(linkNameArtifact, DONT_CROSS);
createFile(linkTargetArtifact);
scratch.dir(linkNameArtifact.getExecPath().getParentDirectory().getPathString());
rootDirectory.getRelative(linkNameArtifact.getExecPath()).createSymbolicLink(linkValue);
// Assert that the SkyValue is built and looks right.
RootedPath symlinkNamePath = rootedPath(linkNameArtifact);
RootedPath symlinkTargetPath = rootedPath(linkTargetArtifact);
ResolvedFile expected =
symlinkToFile(symlinkTargetPath, symlinkNamePath, linkValue, EMPTY_METADATA);
RecursiveFilesystemTraversalValue v1 = traverseAndAssertFiles(traversalRoot, expected);
assertThat(progressReceiver.invalidations).isEmpty();
assertThat(progressReceiver.evaluations).contains(traversalRoot);
progressReceiver.clear();
// Edit the target of the symlink and verify that the value is rebuilt.
appendToFile(linkTargetArtifact, "bar");
RecursiveFilesystemTraversalValue v2 = traverseAndAssertFiles(traversalRoot, expected);
assertThat(progressReceiver.invalidations).contains(traversalRoot);
assertThat(progressReceiver.evaluations).contains(traversalRoot);
assertThat(v2).isNotEqualTo(v1);
assertTraversalRootHashesAreNotEqual(v1, v2);
}
@Test
public void testTraversalOfTransitiveSymlinkToFile() throws Exception {
Artifact directLinkArtifact = sourceArtifact("direct/file.sym");
Artifact transitiveLinkArtifact = sourceArtifact("transitive/sym.sym");
RootedPath fileA = createFile(rootedPath(sourceArtifact("a/file.a")));
RootedPath directLink = rootedPath(directLinkArtifact);
RootedPath transitiveLink = rootedPath(transitiveLinkArtifact);
PathFragment directLinkPath = PathFragment.create("../a/file.a");
PathFragment transitiveLinkPath = PathFragment.create("../direct/file.sym");
parentOf(directLink).asPath().createDirectory();
parentOf(transitiveLink).asPath().createDirectory();
directLink.asPath().createSymbolicLink(directLinkPath);
transitiveLink.asPath().createSymbolicLink(transitiveLinkPath);
traverseAndAssertFiles(
fileLikeRoot(directLinkArtifact, DONT_CROSS),
symlinkToFile(fileA, directLink, directLinkPath, EMPTY_METADATA));
traverseAndAssertFiles(
fileLikeRoot(transitiveLinkArtifact, DONT_CROSS),
symlinkToFile(fileA, transitiveLink, transitiveLinkPath, EMPTY_METADATA));
}
private void assertTraversalOfDirectory(Artifact directoryArtifact) throws Exception {
// Create files under the directory.
// Use the root + root-relative path of the rootArtifact to create these files, rather than
// using the rootDirectory + execpath of the rootArtifact. The resulting paths are the same
// but the RootedPaths are different:
// in the 1st case, it is: RootedPath(/root/execroot, relative), in the second it is
// in the 2nd case, it is: RootedPath(/root, execroot/relative).
// Creating the files will also create the parent directories.
RootedPath file1 = createFile(childOf(directoryArtifact, "bar.txt"));
RootedPath file2;
if (directoryArtifact.isTreeArtifact()) {
file2 = createFile(childOf(directoryArtifact, "qux.txt"));
addNewTreeFileArtifact((SpecialArtifact) directoryArtifact, "bar.txt");
addNewTreeFileArtifact((SpecialArtifact) directoryArtifact, "qux.txt");
} else {
file2 = createFile(childOf(directoryArtifact, "baz/qux.txt"));
}
TraversalRequest traversalRoot = fileLikeRoot(directoryArtifact, DONT_CROSS);
// Assert that the SkyValue is built and looks right.
ResolvedFile expected1 = regularFile(file1, EMPTY_METADATA);
ResolvedFile expected2 = regularFile(file2, EMPTY_METADATA);
RecursiveFilesystemTraversalValue v1 =
traverseAndAssertFiles(traversalRoot, expected1, expected2);
assertThat(progressReceiver.invalidations).isEmpty();
assertThat(progressReceiver.evaluations).contains(traversalRoot);
progressReceiver.clear();
// Add a new file to the directory and see that the value is rebuilt.
TimestampGranularityUtils.waitForTimestampGranularity(
directoryArtifact.getPath().stat().getLastChangeTime(), OutErr.SYSTEM_OUT_ERR);
RootedPath file3 = createFile(childOf(directoryArtifact, "foo.txt"));
if (directoryArtifact.isTreeArtifact()) {
addNewTreeFileArtifact((SpecialArtifact) directoryArtifact, "foo.txt");
}
if (directoryArtifact.isSourceArtifact()) {
invalidateDirectory(directoryArtifact);
} else {
invalidateOutputArtifact(directoryArtifact);
}
ResolvedFile expected3 = regularFile(file3, EMPTY_METADATA);
RecursiveFilesystemTraversalValue v2 =
traverseAndAssertFiles(traversalRoot, expected1, expected2, expected3);
assertThat(progressReceiver.invalidations).contains(traversalRoot);
assertThat(progressReceiver.evaluations).contains(traversalRoot);
// Directories always have the same hash code, but that is fine because their contents are also
// part of the RecursiveFilesystemTraversalValue, so v1 and v2 are unequal.
assertThat(v2).isNotEqualTo(v1);
assertTraversalRootHashesAreEqual(v1, v2);
progressReceiver.clear();
// Edit a file in the directory and see that the value is rebuilt.
RecursiveFilesystemTraversalValue v3;
if (directoryArtifact.isSourceArtifact()) {
SkyKey toInvalidate = FileStateValue.key(file1);
appendToFile(file1, toInvalidate, "bar");
v3 = traverseAndAssertFiles(traversalRoot, expected1, expected2, expected3);
assertThat(progressReceiver.invalidations).contains(traversalRoot);
assertThat(progressReceiver.evaluations).contains(traversalRoot);
assertThat(v3).isNotEqualTo(v2);
// Directories always have the same hash code, but that is fine because their contents are
// also part of the RecursiveFilesystemTraversalValue, so v2 and v3 are unequal.
assertTraversalRootHashesAreEqual(v2, v3);
progressReceiver.clear();
} else {
// Dependency checking of output directories is unsound. Specifically, the directory mtime
// is not changed when a contained file is modified.
v3 = v2;
}
// Add a new file *outside* of the directory and see that the value is *not* rebuilt.
Artifact someFile = sourceArtifact("somewhere/else/a.file");
createFile(someFile, "new file");
appendToFile(someFile, "not all changes are treated equal");
RecursiveFilesystemTraversalValue v4 =
traverseAndAssertFiles(traversalRoot, expected1, expected2, expected3);
assertThat(v4).isEqualTo(v3);
assertTraversalRootHashesAreEqual(v3, v4);
assertThat(progressReceiver.invalidations).doesNotContain(traversalRoot);
}
@Test
public void testTraversalOfSourceDirectory() throws Exception {
assertTraversalOfDirectory(sourceArtifact("dir"));
}
@Test
public void testTraversalOfSourceTreeArtifact() throws Exception {
assertTraversalOfDirectory(treeArtifact("dir"));
}
// Note that in actual Bazel derived artifact directories are not checked for modifications on
// incremental builds, so this test is testing a feature that Bazel does not have. It's included
// aspirationally.
@Test
public void testTraversalOfGeneratedDirectory() throws Exception {
assertTraversalOfDirectory(derivedArtifact("dir"));
}
@Test
public void testTraversalOfTransitiveSymlinkToDirectory() throws Exception {
Artifact directLinkArtifact = sourceArtifact("direct/dir.sym");
Artifact transitiveLinkArtifact = sourceArtifact("transitive/sym.sym");
RootedPath fileA = createFile(rootedPath(sourceArtifact("a/file.a")));
RootedPath directLink = rootedPath(directLinkArtifact);
RootedPath transitiveLink = rootedPath(transitiveLinkArtifact);
PathFragment directLinkPath = PathFragment.create("../a");
PathFragment transitiveLinkPath = PathFragment.create("../direct/dir.sym");
parentOf(directLink).asPath().createDirectory();
parentOf(transitiveLink).asPath().createDirectory();
directLink.asPath().createSymbolicLink(directLinkPath);
transitiveLink.asPath().createSymbolicLink(transitiveLinkPath);
// Expect the file as if was a child of the direct symlink, not of the actual directory.
traverseAndAssertFiles(
fileLikeRoot(directLinkArtifact, DONT_CROSS),
symlinkToDirectory(parentOf(fileA), directLink, directLinkPath, EMPTY_METADATA),
regularFile(childOf(directLinkArtifact, "file.a"), EMPTY_METADATA));
// Expect the file as if was a child of the transitive symlink, not of the actual directory.
traverseAndAssertFiles(
fileLikeRoot(transitiveLinkArtifact, DONT_CROSS),
symlinkToDirectory(parentOf(fileA), transitiveLink, transitiveLinkPath, EMPTY_METADATA),
regularFile(childOf(transitiveLinkArtifact, "file.a"), EMPTY_METADATA));
}
@Test
public void testTraversePackage() throws Exception {
Artifact buildFile = sourceArtifact("pkg/BUILD");
RootedPath buildFilePath = createFile(rootedPath(buildFile));
RootedPath file1 = createFile(siblingOf(buildFile, "subdir/file.a"));
traverseAndAssertFiles(
pkgRoot(parentOf(buildFilePath), DONT_CROSS),
regularFile(buildFilePath, EMPTY_METADATA),
regularFile(file1, EMPTY_METADATA));
}
@Test
public void testTraversalOfSymlinkToDirectory() throws Exception {
Artifact linkNameArtifact = sourceArtifact("link/foo.sym");
Artifact linkTargetArtifact = sourceArtifact("dir");
RootedPath linkName = rootedPath(linkNameArtifact);
PathFragment linkValue = PathFragment.create("../dir");
RootedPath file1 = createFile(childOf(linkTargetArtifact, "file.1"));
createFile(childOf(linkTargetArtifact, "sub/file.2"));
scratch.dir(parentOf(linkName).asPath().getPathString());
linkName.asPath().createSymbolicLink(linkValue);
// Assert that the SkyValue is built and looks right.
TraversalRequest traversalRoot = fileLikeRoot(linkNameArtifact, DONT_CROSS);
ResolvedFile expected1 =
symlinkToDirectory(rootedPath(linkTargetArtifact), linkName, linkValue, EMPTY_METADATA);
ResolvedFile expected2 = regularFile(childOf(linkNameArtifact, "file.1"), EMPTY_METADATA);
ResolvedFile expected3 = regularFile(childOf(linkNameArtifact, "sub/file.2"), EMPTY_METADATA);
// We expect to see all the files from the symlink'd directory, under the symlink's path, not
// under the symlink target's path.
RecursiveFilesystemTraversalValue v1 =
traverseAndAssertFiles(traversalRoot, expected1, expected2, expected3);
assertThat(progressReceiver.invalidations).isEmpty();
assertThat(progressReceiver.evaluations).contains(traversalRoot);
progressReceiver.clear();
// Add a new file to the directory and see that the value is rebuilt.
createFile(childOf(linkTargetArtifact, "file.3"));
invalidateDirectory(linkTargetArtifact);
ResolvedFile expected4 = regularFile(childOf(linkNameArtifact, "file.3"), EMPTY_METADATA);
RecursiveFilesystemTraversalValue v2 =
traverseAndAssertFiles(traversalRoot, expected1, expected2, expected3, expected4);
assertThat(progressReceiver.invalidations).contains(traversalRoot);
assertThat(progressReceiver.evaluations).contains(traversalRoot);
assertThat(v2).isNotEqualTo(v1);
assertTraversalRootHashesAreNotEqual(v1, v2);
progressReceiver.clear();
// Edit a file in the directory and see that the value is rebuilt.
appendToFile(file1, "bar");
RecursiveFilesystemTraversalValue v3 =
traverseAndAssertFiles(traversalRoot, expected1, expected2, expected3, expected4);
assertThat(progressReceiver.invalidations).contains(traversalRoot);
assertThat(progressReceiver.evaluations).contains(traversalRoot);
assertThat(v3).isNotEqualTo(v2);
assertTraversalRootHashesAreNotEqual(v2, v3);
progressReceiver.clear();
// Add a new file *outside* of the directory and see that the value is *not* rebuilt.
Artifact someFile = sourceArtifact("somewhere/else/a.file");
createFile(someFile, "new file");
appendToFile(someFile, "not all changes are treated equal");
RecursiveFilesystemTraversalValue v4 =
traverseAndAssertFiles(traversalRoot, expected1, expected2, expected3, expected4);
assertThat(v4).isEqualTo(v3);
assertTraversalRootHashesAreEqual(v3, v4);
assertThat(progressReceiver.invalidations).doesNotContain(traversalRoot);
}
@Test
public void testTraversalOfDanglingSymlink() throws Exception {
Artifact linkArtifact = sourceArtifact("a/dangling.sym");
RootedPath link = rootedPath(linkArtifact);
PathFragment linkTarget = PathFragment.create("non_existent");
parentOf(link).asPath().createDirectory();
link.asPath().createSymbolicLink(linkTarget);
traverseAndAssertFiles(
fileLikeRoot(linkArtifact, DONT_CROSS), danglingSymlink(link, linkTarget, EMPTY_METADATA));
}
@Test
public void testTraversalOfDanglingSymlinkInADirectory() throws Exception {
Artifact dirArtifact = sourceArtifact("a");
RootedPath file = createFile(childOf(dirArtifact, "file.txt"));
RootedPath link = rootedPath(sourceArtifact("a/dangling.sym"));
PathFragment linkTarget = PathFragment.create("non_existent");
parentOf(link).asPath().createDirectory();
link.asPath().createSymbolicLink(linkTarget);
traverseAndAssertFiles(
fileLikeRoot(dirArtifact, DONT_CROSS),
regularFile(file, EMPTY_METADATA),
danglingSymlink(link, linkTarget, EMPTY_METADATA));
}
private void assertTraverseSubpackages(PackageBoundaryMode traverseSubpackages) throws Exception {
Artifact pkgDirArtifact = sourceArtifact("pkg1/foo");
Artifact subpkgDirArtifact = sourceArtifact("pkg1/foo/subdir/subpkg");
RootedPath pkgBuildFile = childOf(pkgDirArtifact, "BUILD");
RootedPath subpkgBuildFile = childOf(subpkgDirArtifact, "BUILD");
scratch.dir(rootedPath(pkgDirArtifact).asPath().getPathString());
scratch.dir(rootedPath(subpkgDirArtifact).asPath().getPathString());
createFile(pkgBuildFile);
createFile(subpkgBuildFile);
TraversalRequest traversalRoot = pkgRoot(parentOf(pkgBuildFile), traverseSubpackages);
ResolvedFile expected1 = regularFile(pkgBuildFile, EMPTY_METADATA);
ResolvedFile expected2 = regularFile(subpkgBuildFile, EMPTY_METADATA);
switch (traverseSubpackages) {
case CROSS:
traverseAndAssertFiles(traversalRoot, expected1, expected2);
break;
case DONT_CROSS:
traverseAndAssertFiles(traversalRoot, expected1);
break;
case REPORT_ERROR:
SkyKey key = traversalRoot;
EvaluationResult<SkyValue> result = eval(key);
assertThat(result.hasError()).isTrue();
assertThat(result.getError().getException())
.hasMessageThat()
.contains("crosses package boundary into package rooted at");
break;
default:
throw new IllegalStateException(traverseSubpackages.toString());
}
}
@Test
public void testTraverseSubpackages() throws Exception {
assertTraverseSubpackages(CROSS);
}
@Test
public void testDoNotTraverseSubpackages() throws Exception {
assertTraverseSubpackages(DONT_CROSS);
}
@Test
public void testReportErrorWhenTraversingSubpackages() throws Exception {
assertTraverseSubpackages(REPORT_ERROR);
}
@Test
public void testSwitchPackageRootsWhenUsingMultiplePackagePaths() throws Exception {
// Layout:
// pp1://a/BUILD
// pp1://a/file.a
// pp1://a/b.sym -> b/ (only created later)
// pp1://a/b/
// pp1://a/b/file.fake
// pp1://a/subdir/file.b
//
// pp2://a/BUILD
// pp2://a/b/
// pp2://a/b/BUILD
// pp2://a/b/file.a
// pp2://a/subdir.fake/
// pp2://a/subdir.fake/file.fake
//
// Notice that pp1://a/b will be overlaid by pp2://a/b as the latter has a BUILD file and that
// takes precedence. On the other hand the package definition pp2://a/BUILD will be ignored
// since package //a is already defined under pp1.
//
// Notice also that pp1://a/b.sym is a relative symlink pointing to b/. This should be resolved
// to the definition of //a/b/ under pp1, not under pp2.
// Set the package paths.
pkgLocator.set(
new PathPackageLocator(
outputBase,
ImmutableList.of(
Root.fromPath(rootDirectory.getRelative("pp1")),
Root.fromPath(rootDirectory.getRelative("pp2"))),
BazelSkyframeExecutorConstants.BUILD_FILES_BY_PRIORITY));
PrecomputedValue.PATH_PACKAGE_LOCATOR.set(differencer, pkgLocator.get());
Artifact aBuildArtifact = sourceArtifactUnderPackagePath("a/BUILD", "pp1");
Artifact bBuildArtifact = sourceArtifactUnderPackagePath("a/b/BUILD", "pp2");
RootedPath pp1aBuild = createFile(rootedPath(aBuildArtifact));
RootedPath pp1aFileA = createFile(siblingOf(pp1aBuild, "file.a"));
RootedPath pp1bFileFake = createFile(siblingOf(pp1aBuild, "b/file.fake"));
RootedPath pp1aSubdirFileB = createFile(siblingOf(pp1aBuild, "subdir/file.b"));
RootedPath pp2aBuild = createFile(rootedPath("a/BUILD", "pp2"));
RootedPath pp2bBuild = createFile(rootedPath(bBuildArtifact));
RootedPath pp2bFileA = createFile(siblingOf(pp2bBuild, "file.a"));
createFile(siblingOf(pp2aBuild, "subdir.fake/file.fake"));
// Traverse //a including subpackages. The result should contain the pp1-definition of //a and
// the pp2-definition of //a/b.
traverseAndAssertFiles(
pkgRoot(parentOf(rootedPath(aBuildArtifact)), CROSS),
regularFile(pp1aBuild, EMPTY_METADATA),
regularFile(pp1aFileA, EMPTY_METADATA),
regularFile(pp1aSubdirFileB, EMPTY_METADATA),
regularFile(pp2bBuild, EMPTY_METADATA),
regularFile(pp2bFileA, EMPTY_METADATA));
// Traverse //a excluding subpackages. The result should only contain files from //a and not
// from //a/b.
traverseAndAssertFiles(
pkgRoot(parentOf(rootedPath(aBuildArtifact)), DONT_CROSS),
regularFile(pp1aBuild, EMPTY_METADATA),
regularFile(pp1aFileA, EMPTY_METADATA),
regularFile(pp1aSubdirFileB, EMPTY_METADATA));
// Create a relative symlink pp1://a/b.sym -> b/. It will be resolved to the subdirectory
// pp1://a/b, even though a package definition pp2://a/b exists.
RootedPath pp1aBsym = siblingOf(pp1aFileA, "b.sym");
pp1aBsym.asPath().createSymbolicLink(PathFragment.create("b"));
invalidateDirectory(parentOf(pp1aBsym));
// Traverse //a excluding subpackages. The relative symlink //a/b.sym points to the subdirectory
// a/b, i.e. the pp1-definition, even though there is a pp2-defined package //a/b and we expect
// to see b.sym/b.fake (not b/b.fake).
traverseAndAssertFiles(
pkgRoot(parentOf(rootedPath(aBuildArtifact)), DONT_CROSS),
regularFile(pp1aBuild, EMPTY_METADATA),
regularFile(pp1aFileA, EMPTY_METADATA),
regularFile(childOf(pp1aBsym, "file.fake"), EMPTY_METADATA),
symlinkToDirectory(
parentOf(pp1bFileFake), pp1aBsym, PathFragment.create("b"), EMPTY_METADATA),
regularFile(pp1aSubdirFileB, EMPTY_METADATA));
}
@Test
public void testFileDigestChangeCausesRebuild() throws Exception {
Artifact artifact = sourceArtifact("foo/bar.txt");
RootedPath path = rootedPath(artifact);
createFile(path, "hello");
// Assert that the SkyValue is built and looks right.
TraversalRequest params = fileLikeRoot(artifact, DONT_CROSS);
ResolvedFile expected = regularFile(path, EMPTY_METADATA);
RecursiveFilesystemTraversalValue v1 = traverseAndAssertFiles(params, expected);
assertThat(progressReceiver.evaluations).contains(params);
progressReceiver.clear();
// Change the digest of the file. See that the value is rebuilt.
appendToFile(path, "world");
RecursiveFilesystemTraversalValue v2 = traverseAndAssertFiles(params, expected);
assertThat(progressReceiver.invalidations).contains(params);
assertThat(v2).isNotEqualTo(v1);
assertTraversalRootHashesAreNotEqual(v1, v2);
}
@Test
public void testFileMtimeChangeDoesNotCauseRebuildIfDigestIsUnchanged() throws Exception {
Artifact artifact = sourceArtifact("foo/bar.txt");
RootedPath path = rootedPath(artifact);
createFile(path, "hello");
// Assert that the SkyValue is built and looks right.
TraversalRequest params = fileLikeRoot(artifact, DONT_CROSS);
ResolvedFile expected = regularFile(path, EMPTY_METADATA);
RecursiveFilesystemTraversalValue v1 = traverseAndAssertFiles(params, expected);
assertThat(progressReceiver.evaluations).contains(params);
progressReceiver.clear();
// Change the mtime of the file but not the digest. See that the value is *not* rebuilt.
TimestampGranularityUtils.waitForTimestampGranularity(
path.asPath().stat().getLastChangeTime(), OutErr.SYSTEM_OUT_ERR);
path.asPath().setLastModifiedTime(System.currentTimeMillis());
RecursiveFilesystemTraversalValue v2 = traverseAndAssertFiles(params, expected);
assertThat(v2).isEqualTo(v1);
assertTraversalRootHashesAreEqual(v1, v2);
}
@Test
public void testGeneratedDirectoryConflictsWithPackage() throws Exception {
Artifact genDir = derivedArtifact("a/b");
createFile(rootedPath(sourceArtifact("a/b/c/file.real")));
createFile(rootedPath(derivedArtifact("a/b/c/file.fake")));
createFile(sourceArtifact("a/b/c/BUILD"));
SkyKey key = fileLikeRoot(genDir, CROSS);
EvaluationResult<SkyValue> result = eval(key);
assertThat(result.hasError()).isTrue();
ErrorInfo error = result.getError(key);
assertThat(error.isTransitivelyTransient()).isFalse();
assertThat(error.getException())
.hasMessageThat()
.contains("Generated directory a/b/c conflicts with package under the same path.");
}
@Test
public void unboundedSymlinkExpansionError() throws Exception {
Artifact bazLink = sourceArtifact("foo/baz.sym");
Path parentDir = scratch.dir("foo");
bazLink.getPath().createSymbolicLink(parentDir);
SkyKey key = pkgRoot(parentOf(rootedPath(bazLink)), DONT_CROSS);
EvaluationResult<SkyValue> result = eval(key);
assertThat(result.hasError()).isTrue();
ErrorInfo error = result.getError(key);
assertThat(error.getException()).isInstanceOf(RecursiveFilesystemTraversalException.class);
assertThat(((RecursiveFilesystemTraversalException) error.getException()).getType())
.isEqualTo(RecursiveFilesystemTraversalException.Type.FILE_OPERATION_FAILURE);
assertThat(error.getException()).hasMessageThat().contains("Infinite symlink expansion");
}
@Test
public void symlinkChainError() throws Exception {
scratch.dir("a");
Artifact fooLink = sourceArtifact("a/foo.sym");
Artifact barLink = sourceArtifact("a/bar.sym");
Artifact bazLink = sourceArtifact("a/baz.sym");
fooLink.getPath().createSymbolicLink(barLink.getPath());
barLink.getPath().createSymbolicLink(bazLink.getPath());
bazLink.getPath().createSymbolicLink(fooLink.getPath());
SkyKey key = pkgRoot(parentOf(rootedPath(bazLink)), DONT_CROSS);
EvaluationResult<SkyValue> result = eval(key);
assertThat(result.hasError()).isTrue();
ErrorInfo error = result.getError(key);
assertThat(error.getException()).isInstanceOf(RecursiveFilesystemTraversalException.class);
assertThat(((RecursiveFilesystemTraversalException) error.getException()).getType())
.isEqualTo(RecursiveFilesystemTraversalException.Type.FILE_OPERATION_FAILURE);
assertThat(error.getException()).hasMessageThat().contains("Symlink cycle");
}
private static final class NonHermeticArtifactFakeFunction implements SkyFunction {
private TreeArtifactValue.Builder tree;
@Override
public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException {
try {
if (skyKey.argument() instanceof Artifact
&& ((Artifact) skyKey.argument()).isTreeArtifact()) {
return tree.build();
}
return FileArtifactValue.createForTesting(((Artifact) skyKey.argument()).getPath());
} catch (IOException e) {
throw new SkyFunctionException(e, Transience.PERSISTENT){};
}
}
@Nullable
@Override
public String extractTag(SkyKey skyKey) {
return null;
}
void addNewTreeFileArtifact(TreeFileArtifact input) throws IOException {
if (tree == null) {
tree = TreeArtifactValue.newBuilder(input.getParent());
}
tree.putChild(input, FileArtifactValue.createForTesting(input.getPath()));
}
}
private static final class ArtifactFakeFunction implements SkyFunction {
@Override
public SkyValue compute(SkyKey skyKey, Environment env) throws InterruptedException {
return env.getValue(new NonHermeticArtifactSkyKey(skyKey));
}
@Nullable
@Override
public String extractTag(SkyKey skyKey) {
return null;
}
}
private final class ActionFakeFunction implements SkyFunction {
@Nullable
@Override
public SkyValue compute(SkyKey skyKey, Environment env) throws InterruptedException {
return env.getValue(
new NonHermeticArtifactSkyKey(
Preconditions.checkNotNull(
artifacts.get(((ActionLookupData) skyKey).getActionIndex()), skyKey)));
}
@Nullable
@Override
public String extractTag(SkyKey skyKey) {
return null;
}
}
@Test
public void testFileArtifactValueRetainsData() throws Exception {
Artifact artifact = derivedArtifact("foo/fooy.txt");
Artifact strictArtifact = derivedArtifact("goo/gooy.txt");
createFile(rootedPath(artifact), "fooy");
createFile(rootedPath(strictArtifact), "gooy");
TraversalRequest request = fileLikeRoot(artifact, DONT_CROSS, false);
TraversalRequest strictRequest = fileLikeRoot(strictArtifact, DONT_CROSS, true);
EvaluationResult<RecursiveFilesystemTraversalValue> result = eval(request);
EvaluationResult<RecursiveFilesystemTraversalValue> strictResult = eval(strictRequest);
assertThat(result.values()).hasSize(1);
assertThat(strictResult.values()).hasSize(1);
RecursiveFilesystemTraversalValue value = result.values().iterator().next();
RecursiveFilesystemTraversalValue strictValue = strictResult.values().iterator().next();
ResolvedFile resolvedFile = value.getResolvedRoot().get();
ResolvedFile strictResolvedFile = strictValue.getResolvedRoot().get();
assertThat(resolvedFile.getMetadata()).isInstanceOf(FileArtifactValue.class);
assertThat(strictResolvedFile.getMetadata()).isInstanceOf(FileArtifactValue.class);
}
@Test
public void testWithDigestFileArtifactValue() throws Exception {
// file artifacts will return the same bytes as it was initialized with
byte[] expectedBytes = new byte[] {1, 2, 3};
FileArtifactValue fav = FileArtifactValue.createForVirtualActionInput(expectedBytes, 10L);
HasDigest result = RecursiveFilesystemTraversalFunction.withDigest(fav, null);
assertThat(result).isInstanceOf(FileArtifactValue.class);
assertThat(result.getDigest()).isEqualTo(expectedBytes);
// Directories do not have digest but the result will have a fingerprinted digest
FileArtifactValue directoryFav = FileArtifactValue.createForDirectoryWithMtime(10L);
HasDigest directoryResult = RecursiveFilesystemTraversalFunction.withDigest(directoryFav, null);
assertThat(directoryResult).isInstanceOf(HasDigest.ByteStringDigest.class);
assertThat(directoryResult.getDigest()).isNotNull();
}
@Test
public void testWithDigestFileStateValue() throws Exception {
// RegularFileStateValue with actual digest will be transformed with the same digest
byte[] expectedBytes = new byte[] {1, 2, 3};
RegularFileStateValue withDigest =
new RegularFileStateValue(10L, expectedBytes, /* contentsProxy */ null);
HasDigest result = RecursiveFilesystemTraversalFunction.withDigest(withDigest, null);
assertThat(result).isInstanceOf(FileArtifactValue.class);
assertThat(result.getDigest()).isEqualTo(expectedBytes);
// FileStateValue will be transformed with fingerprinted digest
RootedPath rootedPath = rootedPath("bar", "foo");
FileStateValue fsv = FileStateValue.create(rootedPath, null);
HasDigest fsvResult = RecursiveFilesystemTraversalFunction.withDigest(fsv, null);
assertThat(fsvResult).isInstanceOf(HasDigest.ByteStringDigest.class);
assertThat(fsvResult.getDigest()).isNotNull();
}
@Test
public void testRegularFileStateValueWithoutDigest() throws Exception {
Artifact artifact = derivedArtifact("foo/fooy.txt");
RootedPath rootedPath = rootedPath(artifact);
createFile(rootedPath, "fooy-content");
FileStatus status = rootedPath.asPath().stat();
RegularFileStateValue withoutDigest =
new RegularFileStateValue(
status.getSize(), /* digest */
null, /* contentsProxy */
FileContentsProxy.create(status));
HasDigest withoutDigestResult =
RecursiveFilesystemTraversalFunction.withDigest(withoutDigest, rootedPath.asPath());
// withDigest will construct a FileArtifactValue using the Path
assertThat(withoutDigestResult).isInstanceOf(FileArtifactValue.class);
assertThat(withoutDigestResult.getDigest()).isNotNull();
}
@Test
public void testWithDigestByteStringDigest() throws Exception {
byte[] expectedBytes = new byte[] {1, 2, 3};
HasDigest.ByteStringDigest byteStringDigest = new HasDigest.ByteStringDigest(expectedBytes);
HasDigest result = RecursiveFilesystemTraversalFunction.withDigest(byteStringDigest, null);
assertThat(result).isInstanceOf(HasDigest.ByteStringDigest.class);
assertThat(result.getDigest()).isEqualTo(expectedBytes);
}
private static class NonHermeticArtifactSkyKey extends AbstractSkyKey<SkyKey> {
private NonHermeticArtifactSkyKey(SkyKey arg) {
super(arg);
}
@Override
public SkyFunctionName functionName() {
return NONHERMETIC_ARTIFACT;
}
}
private static final SkyFunctionName NONHERMETIC_ARTIFACT =
SkyFunctionName.createNonHermetic("NONHERMETIC_ARTIFACT");
}
|
"""add dem schema
Revision ID: c82f8499050e
Revises: <KEY>
Create Date: 2021-03-16 12:34:20.205965
"""
import os
from alembic import op
import sqlalchemy as sa
from api.config import ENV_DEV, WALLY_ENV
# revision identifiers, used by Alembic.
revision = '<KEY>'
down_revision = '<KEY>'
branch_labels = None
depends_on = None
def upgrade():
op.execute("create schema if not exists dem")
if WALLY_ENV == ENV_DEV:
op.execute("""
CREATE TABLE dem.cdem ("rid" serial PRIMARY KEY,"rast" raster);
""")
dirname = os.path.dirname(__file__)
sql_file = open(dirname + "/sql/cdem_dev.sql")
cdem_insert_dev_data = sql_file.read()
op.execute(cdem_insert_dev_data)
op.execute("""
CREATE INDEX ON dem.cdem USING gist (st_convexhull("rast"));
""")
def downgrade():
op.execute("drop schema dem")
|
/**
* The JavaMail authenticator object. Needed for cases where we need to perform
* authenticated logins to be able to send mails.
*/
private class SMTPAuthenticator extends javax.mail.Authenticator
{
@Override
public PasswordAuthentication getPasswordAuthentication()
{
return new PasswordAuthentication(userName, passWord);
}
} |
import cryptoRandomString from 'crypto-random-string';
import express from 'express';
import { GraphQLBoolean, GraphQLFloat, GraphQLList, GraphQLNonNull, GraphQLObjectType, GraphQLString } from 'graphql';
import { GraphQLJSON } from 'graphql-type-json';
import { cloneDeep, set } from 'lodash';
import plans from '../../../constants/plans';
import cache, { purgeAllCachesForAccount, purgeGQLCacheForCollective } from '../../../lib/cache';
import { purgeCacheForPage } from '../../../lib/cloudflare';
import { invalidateContributorsCache } from '../../../lib/contributors';
import { crypto } from '../../../lib/encryption';
import { mergeAccounts, simulateMergeAccounts } from '../../../lib/merge-accounts';
import { verifyTwoFactorAuthenticatorCode } from '../../../lib/two-factor-authentication';
import models, { sequelize } from '../../../models';
import { Forbidden, NotFound, Unauthorized, ValidationFailed } from '../../errors';
import { AccountCacheType } from '../enum/AccountCacheType';
import { AccountTypeToModelMapping } from '../enum/AccountType';
import { idDecode } from '../identifiers';
import { AccountReferenceInput, fetchAccountWithReference } from '../input/AccountReferenceInput';
import { AccountUpdateInput } from '../input/AccountUpdateInput';
import { Account } from '../interface/Account';
import { Host } from '../object/Host';
import { Individual } from '../object/Individual';
import { MergeAccountsResponse } from '../object/MergeAccountsResponse';
import AccountSettingsKey from '../scalar/AccountSettingsKey';
const AddTwoFactorAuthTokenToIndividualResponse = new GraphQLObjectType({
name: 'AddTwoFactorAuthTokenToIndividualResponse',
description: 'Response for the addTwoFactorAuthTokenToIndividual mutation',
fields: () => ({
account: {
type: new GraphQLNonNull(Individual),
description: 'The Individual that the 2FA has been enabled for',
},
recoveryCodes: {
type: new GraphQLList(GraphQLString),
description: 'The recovery codes for the Individual to write down',
},
}),
});
const accountMutations = {
editAccountSetting: {
type: new GraphQLNonNull(Account),
description: 'Edit the settings for the given account',
args: {
account: {
type: new GraphQLNonNull(AccountReferenceInput),
description: 'Account where the settings will be updated',
},
key: {
type: new GraphQLNonNull(AccountSettingsKey),
description: 'The key that you want to edit in settings',
},
value: {
type: new GraphQLNonNull(GraphQLJSON),
description: 'The value to set for this key',
},
},
async resolve(_: void, args, req: express.Request): Promise<Record<string, unknown>> {
if (!req.remoteUser) {
throw new Unauthorized();
}
return sequelize.transaction(async transaction => {
const account = await fetchAccountWithReference(args.account, {
dbTransaction: transaction,
lock: true,
throwIfMissing: true,
});
const isKeyEditableByHostAdmins = ['expenseTypes'].includes(args.key);
const permissionMethod = isKeyEditableByHostAdmins ? 'isAdminOfCollectiveOrHost' : 'isAdminOfCollective';
if (!req.remoteUser[permissionMethod](account)) {
throw new Forbidden();
}
if (
args.key === 'collectivePage' &&
![AccountTypeToModelMapping.FUND, AccountTypeToModelMapping.PROJECT].includes(account.type)
) {
const budgetSection = args.value.sections?.find(s => s.section === 'budget');
if (budgetSection && !budgetSection.isEnabled) {
throw new Forbidden();
}
}
const settings = account.settings ? cloneDeep(account.settings) : {};
set(settings, args.key, args.value);
return account.update({ settings }, { transaction });
});
},
},
editAccountFeeStructure: {
type: new GraphQLNonNull(Account),
description: 'An endpoint for hosts to edit the fees structure of their hosted accounts',
args: {
account: {
type: new GraphQLNonNull(AccountReferenceInput),
description: 'Account where the settings will be updated',
},
hostFeePercent: {
type: new GraphQLNonNull(GraphQLFloat),
description: 'The host fee percent to apply to this account',
},
isCustomFee: {
type: new GraphQLNonNull(GraphQLBoolean),
description: 'If using a custom fee, set this to true',
},
},
async resolve(_: void, args, req: express.Request): Promise<Record<string, unknown>> {
return sequelize.transaction(async dbTransaction => {
const account = await fetchAccountWithReference(args.account, {
throwIfMissing: true,
dbTransaction,
lock: true,
});
if (!account.HostCollectiveId) {
throw new ValidationFailed('Fees structure can only be edited for accounts that you are hosting');
} else if (!req.remoteUser?.isAdmin(account.HostCollectiveId)) {
throw new Forbidden(
'You need to be logged in as an host admin to change the fees structure of the hosted accounts',
);
} else if (!account.approvedAt) {
throw new ValidationFailed('The collective needs to be approved before you can change the fees structure');
}
return account.update(
{
hostFeePercent: args.hostFeePercent,
data: { ...account.data, useCustomHostFee: args.isCustomFee },
},
{ transaction: dbTransaction },
);
});
},
},
addTwoFactorAuthTokenToIndividual: {
type: new GraphQLNonNull(AddTwoFactorAuthTokenToIndividualResponse),
description: 'Add 2FA to the Individual if it does not have it',
args: {
account: {
type: new GraphQLNonNull(AccountReferenceInput),
description: 'Individual that will have 2FA added to it',
},
token: {
type: new GraphQLNonNull(GraphQLString),
description: 'The generated secret to save to the Individual',
},
},
async resolve(_: void, args, req: express.Request): Promise<Record<string, unknown>> {
if (!req.remoteUser) {
throw new Unauthorized();
}
const account = await fetchAccountWithReference(args.account);
if (!req.remoteUser.isAdminOfCollective(account)) {
throw new Forbidden();
}
const user = await models.User.findOne({ where: { CollectiveId: account.id } });
if (!user) {
throw new NotFound('Account not found.');
}
if (user.twoFactorAuthToken !== null) {
throw new Unauthorized('This account already has 2FA enabled.');
}
/*
check that base32 secret is only capital letters, numbers (2-7), 103 chars long;
Our secret is 64 ascii characters which is encoded into 104 base32 characters
(base32 should be divisible by 8). But the last character is an = to pad, and
speakeasy library cuts out any = padding
**/
const verifyToken = args.token.match(/([A-Z2-7]){103}/);
if (!verifyToken) {
throw new ValidationFailed('Invalid 2FA token');
}
const encryptedText = crypto.encrypt(args.token);
/** Generate recovery codes, hash and store them in the table, and return them to the user to write down */
const recoveryCodesArray = Array.from({ length: 6 }, () =>
cryptoRandomString({ length: 16, type: 'distinguishable' }),
);
const hashedRecoveryCodesArray = recoveryCodesArray.map(code => {
return crypto.hash(code);
});
await user.update({ twoFactorAuthToken: encryptedText, twoFactorAuthRecoveryCodes: hashedRecoveryCodesArray });
return { account: account, recoveryCodes: recoveryCodesArray };
},
},
removeTwoFactorAuthTokenFromIndividual: {
type: new GraphQLNonNull(Individual),
description: 'Remove 2FA from the Individual if it has been enabled',
args: {
account: {
type: new GraphQLNonNull(AccountReferenceInput),
description: 'Account that will have 2FA removed from it',
},
code: {
type: new GraphQLNonNull(GraphQLString),
description: 'The 6-digit 2FA code',
},
},
async resolve(_: void, args, req: express.Request): Promise<Record<string, unknown>> {
if (!req.remoteUser) {
throw new Unauthorized();
}
const account = await fetchAccountWithReference(args.account);
if (!req.remoteUser.isAdminOfCollective(account)) {
throw new Forbidden();
}
const user = await models.User.findOne({ where: { CollectiveId: account.id } });
if (!user) {
throw new NotFound('Account not found.');
}
if (!user.twoFactorAuthToken) {
throw new Unauthorized('This account already has 2FA disabled.');
}
const verified = verifyTwoFactorAuthenticatorCode(user.twoFactorAuthToken, args.code);
if (!verified) {
throw new Unauthorized('Two-factor authentication code failed. Please try again');
}
await user.update({ twoFactorAuthToken: null, twoFactorAuthRecoveryCodes: null });
return account;
},
},
editHostPlan: {
type: new GraphQLNonNull(Host),
description: 'Update the plan',
args: {
account: {
type: new GraphQLNonNull(AccountReferenceInput),
description: 'Account where the host plan will be edited.',
},
plan: {
type: new GraphQLNonNull(GraphQLString),
description: 'The name of the plan to subscribe to.',
},
},
async resolve(_: void, args, req: express.Request): Promise<Record<string, unknown>> {
if (!req.remoteUser) {
throw new Unauthorized();
}
const account = await fetchAccountWithReference(args.account);
if (!req.remoteUser.isAdminOfCollective(account)) {
throw new Forbidden();
}
if (!account.isHostAccount) {
throw new Error(`Only Fiscal Hosts can set their plan.`);
}
const plan = args.plan;
if (!plans[plan]) {
throw new Error(`Unknown plan: ${plan}`);
}
await account.update({ plan });
if (plan === 'start-plan-2021') {
// This should cascade to all Collectives
await account.updateHostFee(0, req.remoteUser);
}
if (plan === 'start-plan-2021' || plan === 'grow-plan-2021') {
// This should cascade to all Collectives
await account.updatePlatformFee(0, req.remoteUser);
// Make sure budget is activated
await account.activateBudget();
}
await cache.del(`plan_${account.id}`);
return account;
},
},
editAccount: {
type: new GraphQLNonNull(Host),
description: 'Edit key properties of an account.',
args: {
account: {
type: new GraphQLNonNull(AccountUpdateInput),
description: 'Account to edit.',
},
},
async resolve(_: void, args, req: express.Request): Promise<Record<string, unknown>> {
if (!req.remoteUser) {
throw new Unauthorized();
}
const id = idDecode(args.account.id, 'account');
const account = await req.loaders.Collective.byId.load(id);
if (!account) {
throw new NotFound('Account Not Found');
}
if (!req.remoteUser.isAdminOfCollective(account) && !req.remoteUser.isRoot()) {
throw new Forbidden();
}
for (const key of Object.keys(args.account)) {
switch (key) {
case 'currency':
await account.setCurrency(args.account[key]);
}
}
return account;
},
},
clearCacheForAccount: {
type: new GraphQLNonNull(Account),
description: '[Root only] Clears the cache for a given account',
args: {
account: {
type: new GraphQLNonNull(AccountReferenceInput),
description: 'Account to clear the cache for',
},
type: {
type: new GraphQLNonNull(new GraphQLList(AccountCacheType)),
description: 'Types of cache to clear',
defaultValue: ['CLOUDFLARE', 'GRAPHQL_QUERIES', 'CONTRIBUTORS'],
},
},
async resolve(_: void, args, req: express.Request): Promise<Record<string, unknown>> {
if (!req.remoteUser?.isRoot()) {
throw new Forbidden('Only root users can perform this action');
}
const account = await fetchAccountWithReference(args.account, { throwIfMissing: true });
if (args.type.includes('CLOUDFLARE')) {
purgeCacheForPage(`/${account.slug}`);
}
if (args.type.includes('GRAPHQL_QUERIES')) {
purgeGQLCacheForCollective(account.slug);
}
if (args.type.includes('CONTRIBUTORS')) {
await invalidateContributorsCache(account.id);
}
return account;
},
},
mergeAccounts: {
type: new GraphQLNonNull(MergeAccountsResponse),
description: '[Root only] Merge two accounts, returns the result account',
args: {
fromAccount: {
type: new GraphQLNonNull(AccountReferenceInput),
description: 'Account to merge from',
},
toAccount: {
type: new GraphQLNonNull(AccountReferenceInput),
description: 'Account to merge to',
},
dryRun: {
type: new GraphQLNonNull(GraphQLBoolean),
description: 'If true, the result will be simulated and summarized in the response message',
defaultValue: true,
},
},
async resolve(_: void, args, req: express.Request): Promise<Record<string, unknown>> {
if (!req.remoteUser?.isRoot()) {
throw new Forbidden('Only root users can perform this action');
}
const fromAccount = await fetchAccountWithReference(args.fromAccount, { throwIfMissing: true });
const toAccount = await fetchAccountWithReference(args.toAccount, { throwIfMissing: true });
if (args.dryRun) {
const message = await simulateMergeAccounts(fromAccount, toAccount);
return { account: toAccount, message };
} else {
const warnings = await mergeAccounts(fromAccount, toAccount, req.remoteUser.id);
await Promise.all([purgeAllCachesForAccount(fromAccount), purgeAllCachesForAccount(toAccount)]).catch(() => {
// Ignore errors
});
const message = warnings.join('\n');
return { account: await toAccount.reload(), message: message || null };
}
},
},
};
export default accountMutations;
|
// implementing validation strategy
public class StrategyPattern {
public static void main(String[] args) {
String stringToValidate = "SSSSSGGFGFGG";
// old school validators
List<ValidatorStrategy> oldSchoolValidators = Arrays.asList(new isNotEmptyCheck(), new isUpperCase(), new containsSpecialChar());
System.out.println("\nexecuting validators using OLD java way");
for (ValidatorStrategy v : oldSchoolValidators) {
v.validate(stringToValidate);
}
// java 8 validators
Validator v1_IsNotEmptyCheck = new Validator((String s) -> !s.isEmpty());
Validator v2_IsUpperCase = new Validator((String s) -> s.toUpperCase().equals(s));
Validator v3_ContainsSpecialChar = new Validator((String s) -> {
List<Character> specialCharList = Arrays.asList('%', '#');
char[] charArray = s.toCharArray();
for (char chr : charArray) {
if (specialCharList.contains(chr)) {
return true;
}
}
return false;
});
System.out.println("\nexecuting validators using java 8 way");
for (Validator java8Validator : Arrays.asList(v1_IsNotEmptyCheck, v2_IsUpperCase, v3_ContainsSpecialChar)) {
java8Validator.validate("sfsdfsfsdf");
}
}
} |
A Novel Carriers Selection Scheme for OFDM with Index Modulation
In this paper, we investigate the subcarriers combination selection for OFDM-IM system, which fully utilize transfer rate and diversity gain characteristics of OFDM-IM system based on the lexicographic ordering principle. Aiming at the two optimization problems, we give the corresponding algorithms. Finally, we verify the low complexity and high efficiency of the two algorithms. |
/**
* The type When.
*
* @param <I> the type parameter
* @param <O> the type parameter
*/
public static class When<I, O> implements SerializableFunction<I, O> {
private final SerializablePredicate<? super I> predicate;
private final SerializableFunction<? super I, ? extends O> trueAction;
private SerializableFunction<? super I, ? extends O> falseAction = i -> null;
private When(SerializablePredicate<? super I> predicate, SerializableFunction<? super I, ? extends O> trueAction) {
this.predicate = predicate;
this.trueAction = trueAction;
}
@Override
public O apply(I i) {
return predicate.test(i) ? trueAction.apply(i) : falseAction.apply(i);
}
/**
* Otherwise function.
*
* @param falseAction the false action
* @return the function
*/
public SerializableFunction<I, O> otherwise(SerializableFunction<? super I, ? extends O> falseAction) {
this.falseAction = notNull(falseAction);
return this;
}
/**
* Otherwise function.
*
* @param falseValue the false value
* @return the function
*/
public SerializableFunction<I, O> otherwise(O falseValue) {
this.falseAction = i -> falseValue;
return this;
}
} |
By on
Library DVD borrowing has fallen sharply during the past year, and library users are rapidly migrating toward streaming services for both music and movies, according to the July 2012 edition of LJ‘s Patron Profiles, which examines trends in Media Consumption and Library Use.
DVDs are the top format for films loaned by libraries, and 27 percent of respondents said that libraries remain their primary source for movies—down from 36 percent in the first Patron Profiles survey, conducted less than a year ago. “A strong indicator of the changing media landscape is the rise of streaming and disc-by-mail services—both currently dominated by Netflix,” the report states.
In the first Patron Profiles survey, only three percent of respondents described streaming services such as Netflix, Hulu, Apple, and Amazon as their primary source for movies. That figure rose to 17 percent in this most recent survey, while the popularity of delivery-by-mail services fell from 27 percent to just over 13 percent.
Netflix’ September 2011 decision to split its streaming and DVD delivery services into separate subscription models may have been one factor driving this shift. The public’s rapid embrace of tablet computers and other mobile devices capable of viewing streamed content is another, “making it a medium that libraries would do well to explore despite challenges,” the report reads.
Maintaining a DVD collection during the transition to other media formats will pose a separate challenge to libraries. Their decline in popularity may be hastened by bad experiences with damaged or unreadable discs. Nearly 45 percent of patrons said that they sometimes have had trouble playing library DVDs.
“Notwithstanding the long-term fate of DVDs, they are still very popular and libraries will need to manage the physical discs for some time to come,” the report notes.
Similarly, Internet radio or streaming services such as Pandora and Spotify ranked second only to traditional radio as patrons’ primary source of music. About 32 percent of patrons described it as their primary source of music in this most recent survey, up from 20 percent a year ago.
“Although the physical CD is in decline, it is likely to continue its relatively small but positive role in the library experience. In the meantime, libraries should view the rise of streaming audio and legal downloading as an opportunity,” the report notes. Like DVDs, many CD borrowers had trouble with unreadable discs. More than a third of users reported problems with playback.
The popularity of music downloads continued to grow as well, with more than 30 percent of respondents describing downloads from services such as Apple iTunes as their primary source of music. And over 60 percent of respondents expressed interest in their library offering music downloads.
This latest Patron Profiles report contains additional information about format preferences, including audiobooks and digital games, and also examines the most popular movie and music genres, information about how often patrons tend to place holds on different formats, and their habits when content is not immediately available. Separately, the report includes a spotlight on library media consumption by the 61 to 80 age group, and delves into the habits of “Power Media Patrons,” closely examining the tendencies of patrons who demonstrate above-average use of library media services. Powered by Real Time Reporting from Bowker PubTrack Consumer, the survey included responses from 2020 individuals, all U.S. residents aged 18 and over. |
import { ActionOrchestrator } from './action-orchestrator';
import { EntityRequestAction } from '../../../../../store/src/types/request.types';
import { PaginatedAction } from '../../../../../store/src/types/pagination.types';
const BASE_ACTIONS = [
'get',
'delete',
'update',
'create',
'getMultiple'
];
const fakeActions = [
'myMadeUpAction1',
'myMadeUpAction2'
];
function assertActions(actionOrchestrator: ActionOrchestrator<any>, actionsNotToHave: string[], actionsToHave?: string[]) {
actionsNotToHave.forEach(action => expect(actionOrchestrator.hasActionBuilder(action)).toBe(false));
if (actionsToHave) {
actionsToHave.forEach(action => expect(actionOrchestrator.hasActionBuilder(action)).toBe(true));
}
}
export function getBaseActionKeys() {
return [...BASE_ACTIONS];
}
export function hasActions<T extends ActionOrchestrator<any>>(actionOrchestrator: T, expectToHave?: string[]) {
const baseActions = getBaseActionKeys();
const baseActionsToNotHave = expectToHave ? getBaseActionKeys().reduce((actions, action) => {
if (!expectToHave.find((expectAction) => expectAction === action)) {
actions.push(action);
}
return actions;
}, [] as string[]) : baseActions;
assertActions(actionOrchestrator, [
...baseActionsToNotHave,
...fakeActions
], expectToHave);
}
export function getRequestAction() {
return {} as EntityRequestAction;
}
export function getPaginationAction() {
return {} as PaginatedAction;
}
|
<reponame>ewok-janitor-external/nova
import { ChangeDetectorRef, Component, OnInit } from "@angular/core";
import {
ComponentRegistryService,
EmbeddedContentComponent,
EmbeddedContentConfigurationComponent,
EmbeddedContentMode,
IDashboard,
IWidget,
IWidgets,
PizzagnaLayer,
WidgetTypesService
} from "@nova-ui/dashboards";
import { GridsterConfig, GridsterItem } from "angular-gridster2";
@Component({
selector: "embedded-content-widget-example",
templateUrl: "./embedded-content-widget-example.component.html",
styleUrls: ["./embedded-content-widget-example.component.less"],
})
export class EmbeddedContentWidgetExampleComponent implements OnInit {
// This variable will hold all the data needed to define the layout and behavior of the widgets.
// Pass this to the dashboard component's dashboard input in the template.
public dashboard: IDashboard | undefined;
// Angular gridster requires a configuration object even if it's empty.
// Pass this to the dashboard component's gridsterConfig input in the template.
public gridsterConfig: GridsterConfig = {};
// Boolean passed as an input to the dashboard. When true, widgets can be moved, resized, removed, or edited
public editMode: boolean = false;
constructor(
// WidgetTypesService provides the widget's necessary structure information
private widgetTypesService: WidgetTypesService,
private componentRegistry: ComponentRegistryService,
private changeDetectorRef: ChangeDetectorRef
) { }
public ngOnInit(): void {
const widgetTemplate = this.widgetTypesService.getWidgetType("embedded-content");
this.prepareNovaDashboards();
this.initializeDashboard();
}
/** Used for restoring widgets state */
public reInitializeDashboard() {
// destroys the components and their providers so the dashboard can re init data
this.dashboard = undefined;
this.changeDetectorRef.detectChanges();
this.initializeDashboard();
}
public initializeDashboard(): void {
// We're using a static configuration object for this example, but this is where
// the widget's configuration could potentially be populated from a database
const embeddedContentWidget = widgetConfig;
const widgets: IWidgets = {
// Complete the widget with information coming from its type definition
[embeddedContentWidget.id]: this.widgetTypesService.mergeWithWidgetType(embeddedContentWidget),
};
// Setting the widget dimensions and position (this is for gridster)
const positions: Record<string, GridsterItem> = {
[embeddedContentWidget.id]: {
cols: 10,
rows: 10,
y: 0,
x: 0,
},
};
// Finally, assigning the variables we created above to the dashboard
this.dashboard = { positions, widgets };
}
private prepareNovaDashboards() {
this.componentRegistry.registerByLateLoadKey(EmbeddedContentComponent);
this.componentRegistry.registerByLateLoadKey(EmbeddedContentConfigurationComponent);
}
}
const widgetConfig: IWidget = {
id: "embeddedContentWidgetId",
type: "embedded-content",
pizzagna: {
[PizzagnaLayer.Configuration]: {
"header": {
"properties": {
"title": "Embedded Content Widget",
"subtitle": "",
},
},
"mainContent": {
"properties": {
sanitized: true,
mode: EmbeddedContentMode.URL,
customEmbeddedContent: "https://www.ventusky.com/",
},
},
},
},
};
|
package apple.avfoundation;
import apple.NSObject;
import apple.avfoundation.protocol.AVContentKeyRecipient;
import apple.avfoundation.protocol.AVContentKeySessionDelegate;
import apple.foundation.NSArray;
import apple.foundation.NSData;
import apple.foundation.NSDictionary;
import apple.foundation.NSError;
import apple.foundation.NSMethodSignature;
import apple.foundation.NSSet;
import apple.foundation.NSURL;
import org.moe.natj.c.ann.FunctionPtr;
import org.moe.natj.general.NatJ;
import org.moe.natj.general.Pointer;
import org.moe.natj.general.ann.Generated;
import org.moe.natj.general.ann.Library;
import org.moe.natj.general.ann.Mapped;
import org.moe.natj.general.ann.MappedReturn;
import org.moe.natj.general.ann.NInt;
import org.moe.natj.general.ann.NUInt;
import org.moe.natj.general.ann.Owned;
import org.moe.natj.general.ann.Runtime;
import org.moe.natj.general.ptr.VoidPtr;
import org.moe.natj.objc.Class;
import org.moe.natj.objc.ObjCRuntime;
import org.moe.natj.objc.SEL;
import org.moe.natj.objc.ann.ObjCBlock;
import org.moe.natj.objc.ann.ObjCClassBinding;
import org.moe.natj.objc.ann.Selector;
import org.moe.natj.objc.map.ObjCObjectMapper;
@Generated
@Library("AVFoundation")
@Runtime(ObjCRuntime.class)
@ObjCClassBinding
public class AVContentKeySession extends NSObject {
static {
NatJ.register();
}
@Generated
protected AVContentKeySession(Pointer peer) {
super(peer);
}
@Generated
@Selector("accessInstanceVariablesDirectly")
public static native boolean accessInstanceVariablesDirectly();
/**
* addContentKeyRecipient:
*
* Informs the receiver that the specified recipient will be used for the session.
*
* It is an error to add recipient to sessions that have received an expire message. It is also an error to add recipients after they have already begun to process media data (e.g. after an AVURLAsset has loaded the values of any of its keys). Such errors will result in NSInternalInconsistencyExceptions. Sending this message to an AVContentKeySession is atomic.
*/
@Generated
@Selector("addContentKeyRecipient:")
public native void addContentKeyRecipient(@Mapped(ObjCObjectMapper.class) AVContentKeyRecipient recipient);
@Generated
@Owned
@Selector("alloc")
public static native AVContentKeySession alloc();
@Generated
@Selector("allocWithZone:")
@MappedReturn(ObjCObjectMapper.class)
public static native Object allocWithZone(VoidPtr zone);
@Generated
@Selector("automaticallyNotifiesObserversForKey:")
public static native boolean automaticallyNotifiesObserversForKey(String key);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:")
public static native void cancelPreviousPerformRequestsWithTarget(@Mapped(ObjCObjectMapper.class) Object aTarget);
@Generated
@Selector("cancelPreviousPerformRequestsWithTarget:selector:object:")
public static native void cancelPreviousPerformRequestsWithTargetSelectorObject(
@Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector,
@Mapped(ObjCObjectMapper.class) Object anArgument);
@Generated
@Selector("classFallbacksForKeyedArchiver")
public static native NSArray<String> classFallbacksForKeyedArchiver();
@Generated
@Selector("classForKeyedUnarchiver")
public static native Class classForKeyedUnarchiver();
/**
* [@property] contentKeyRecipients
*
* The array of recipients of content keys currently associated with the AVContentKeySession.
*/
@Generated
@Selector("contentKeyRecipients")
public native NSArray<?> contentKeyRecipients();
/**
* contentKeySessionWithKeySystem:
*
* Creates a new instance of AVContentKeySession to manage a collection of media content keys.
*
* This method returns an AVContentKeySession instance that is capable of managing collection of media content keys corresponding to the input keySystem. An NSInvalidArgumentException will be raised if the value of keySystem is unsupported.
*
* @param keySystem
* A valid key system for retrieving keys.
* @return A new AVContentKeySession.
*/
@Generated
@Selector("contentKeySessionWithKeySystem:")
public static native AVContentKeySession contentKeySessionWithKeySystem(String keySystem);
/**
* contentKeySessionWithKeySystem:storageDirectoryAtURL:
*
* Creates a new instance of AVContentKeySession to manage a collection of media content keys.
*
* This method returns an AVContentKeySession instance that is capable of managing collection of media content keys corresponding to the input keySystem. An NSInvalidArgumentException will be raised if the value of keySystem is unsupported.
*
* @param keySystem
* A valid key system for retrieving keys.
* @param storageURL
* URL to a writable directory that the session will use to facilitate expired session reports after abnormal session termination.
* @return A new AVContentKeySession.
*/
@Generated
@Selector("contentKeySessionWithKeySystem:storageDirectoryAtURL:")
public static native AVContentKeySession contentKeySessionWithKeySystemStorageDirectoryAtURL(String keySystem,
NSURL storageURL);
/**
* [@property] contentProtectionSessionIdentifier
*
* An opaque identifier for the current content protection session.
*
* May be nil. Will call the delegate's contentKeySessionContentProtectionSessionIdentifierDidChange: when the identifier changes. The protection session ID is a unique string identifier generated by the AVContentKeySession that can be used by the application to identify content key session objects.
*/
@Generated
@Selector("contentProtectionSessionIdentifier")
public native NSData contentProtectionSessionIdentifier();
@Generated
@Selector("debugDescription")
public static native String debugDescription_static();
/**
* [@property] delegate
*
* The receiver's delegate.
*
* The value of this property is an object conforming to the AVContentKeySessionDelegate protocol. The delegate is set using the setDelegate:queue: method.
*/
@Generated
@Selector("delegate")
@MappedReturn(ObjCObjectMapper.class)
public native AVContentKeySessionDelegate delegate();
/**
* [@property] delegateQueue
*
* The dispatch queue on which all delegate methods will be invoked whenever processes requiring content keys are executed asynchronously.
*
* The value of this property is a dispatch_queue_t. The queue is set using the setDelegate:queue: method.
*/
@Generated
@Selector("delegateQueue")
public native NSObject delegateQueue();
@Generated
@Selector("description")
public static native String description_static();
/**
* expire
*
* Tells the receiver to treat the session as having been intentionally and normally expired.
*
* When an instance of AVContentKeySession receives an expire message, all of its associated objects conforming to the AVContentKeyRecipient protocol will become inoperable. Send this message only after you have finished operating on the media data.
*/
@Generated
@Selector("expire")
public native void expire();
@Generated
@Selector("hash")
@NUInt
public static native long hash_static();
@Generated
@Selector("init")
public native AVContentKeySession init();
@Generated
@Selector("instanceMethodForSelector:")
@FunctionPtr(name = "call_instanceMethodForSelector_ret")
public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector(SEL aSelector);
@Generated
@Selector("instanceMethodSignatureForSelector:")
public static native NSMethodSignature instanceMethodSignatureForSelector(SEL aSelector);
@Generated
@Selector("instancesRespondToSelector:")
public static native boolean instancesRespondToSelector(SEL aSelector);
@Generated
@Selector("isSubclassOfClass:")
public static native boolean isSubclassOfClass(Class aClass);
@Generated
@Selector("keyPathsForValuesAffectingValueForKey:")
public static native NSSet<String> keyPathsForValuesAffectingValueForKey(String key);
/**
* [@property] keySystem
*
* The key system used for retrieving keys
*/
@Generated
@Selector("keySystem")
public native String keySystem();
/**
* makeSecureTokenForExpirationDateOfPersistableContentKey:completionHandler:
*
* Creates a secure server playback context (SPC) that the client could send to the key server to obtain an expiration date for the provided persistable content key data.
*
* @param persistableContentKeyData
* Persistable content key data that was previously created using -[AVContentKeyRequest persistableContentKeyFromKeyVendorResponse:options:error:] or obtained via AVContentKeySessionDelegate callback -contentKeySession:didUpdatePersistableContentKey:forContentKeyIdentifier:.
* @param handler
* Once the secure token is ready, this block will be called with the token or an error describing the failure.
*/
@Generated
@Selector("makeSecureTokenForExpirationDateOfPersistableContentKey:completionHandler:")
public native void makeSecureTokenForExpirationDateOfPersistableContentKeyCompletionHandler(
NSData persistableContentKeyData,
@ObjCBlock(name = "call_makeSecureTokenForExpirationDateOfPersistableContentKeyCompletionHandler") Block_makeSecureTokenForExpirationDateOfPersistableContentKeyCompletionHandler handler);
@Runtime(ObjCRuntime.class)
@Generated
public interface Block_makeSecureTokenForExpirationDateOfPersistableContentKeyCompletionHandler {
@Generated
void call_makeSecureTokenForExpirationDateOfPersistableContentKeyCompletionHandler(NSData secureTokenData,
NSError error);
}
@Generated
@Owned
@Selector("new")
@MappedReturn(ObjCObjectMapper.class)
public static native Object new_objc();
/**
* pendingExpiredSessionReportsWithAppIdentifier:storageDirectoryAtURL:
*
* Provides "expired session reports" for prior AVContentKeySessions created with the specified app identifier that have expired either normally or abnormally.
*
* Note that no reports for sessions still in progress will be included.
*
* @param appIdentifier
* An opaque identifier for the application. The contents of this identifier depend on the particular protocol in use by the entity that controls the use of the media data.
* @param storageURL
* URL to a directory previously used with one or more instances of AVContentKeySession for the storage of expired session reports.
* @return An NSArray containing instances of NSData, each containing a pending expired session report as a property-list serialization of an NSDictionary object. The contents of expired session reports depend on the particular protocol in use by the entity that controls the use of the media data.
*/
@Generated
@Selector("pendingExpiredSessionReportsWithAppIdentifier:storageDirectoryAtURL:")
public static native NSArray<? extends NSData> pendingExpiredSessionReportsWithAppIdentifierStorageDirectoryAtURL(
NSData appIdentifier, NSURL storageURL);
/**
* processContentKeyRequestWithIdentifier:initializationData:options:
*
* Informs the receiver that it should attempt to instantiate a content decryption key using the specified initialization data.
*
* May be used to generate an AVContentKeyRequest from request initialization data already in hand, without awaiting such data during the processing of media data of an associated recipient.
*
* @param identifier
* Container- and protocol-specific identifier to be used to obtain a key response. Either identifier or initializationData must be non-nil. Both can be non-nil, if the content protection protocol requires both.
* @param initializationData
* Container- and protocol-specific data to be used to obtain a key response. Either identifier or initializationData must be non-nil. Both can be non-nil, if the content protection protocol requires both.
* @param options
* Additional information necessary to obtain the key, or nil if none. See AVContentKeyRequest*Key below.
*/
@Generated
@Selector("processContentKeyRequestWithIdentifier:initializationData:options:")
public native void processContentKeyRequestWithIdentifierInitializationDataOptions(
@Mapped(ObjCObjectMapper.class) Object identifier, NSData initializationData,
NSDictionary<String, ?> options);
/**
* removeContentKeyRecipient:
*
* Informs the receiver that the specified recipient will no longer be used.
*
* After the specified recipient is removed from the receiver it will become inoperable. Remove the recipient only after you have finished operating on the media data associated with it. Sending this message to an AVContentKeySession is atomic.
*/
@Generated
@Selector("removeContentKeyRecipient:")
public native void removeContentKeyRecipient(@Mapped(ObjCObjectMapper.class) AVContentKeyRecipient recipient);
/**
* removePendingExpiredSessionReports:withAppIdentifier:storageDirectoryAtURL:
*
* Removes expired session reports for prior AVContentKeySessions from storage. Once they have been removed, they will no longer be available via subsequent invocations of +pendingExpiredSessionReportsWithAppIdentifier:.
*
* This method is most suitable for use only after the specified expired session reports have been sent to the entity that controls the use of the media data and the entity has acknowledged their receipt.
*
* @param expiredSessionReports
* An array of expired session reports to be discarded.
* @param appIdentifier
* An opaque identifier for the application. The contents of this identifier depend on the particular protocol in use by the entity that controls the use of the media data.
* @param storageURL
* URL to a writable folder.
*/
@Generated
@Selector("removePendingExpiredSessionReports:withAppIdentifier:storageDirectoryAtURL:")
public static native void removePendingExpiredSessionReportsWithAppIdentifierStorageDirectoryAtURL(
NSArray<? extends NSData> expiredSessionReports, NSData appIdentifier, NSURL storageURL);
/**
* renewExpiringResponseDataForContentKeyRequest:
*
* Informs the receiver that the already provided response data for an earlier AVContentKeyRequest will imminently expire.
*
* In response the receiver will invoke your delegate with a new content key request entreating it to renew the expiring response data, via -contentKeySession:didProvideRenewingContentKeyRequest:.
*/
@Generated
@Selector("renewExpiringResponseDataForContentKeyRequest:")
public native void renewExpiringResponseDataForContentKeyRequest(AVContentKeyRequest contentKeyRequest);
@Generated
@Selector("resolveClassMethod:")
public static native boolean resolveClassMethod(SEL sel);
@Generated
@Selector("resolveInstanceMethod:")
public static native boolean resolveInstanceMethod(SEL sel);
/**
* setDelegate:queue:
*
* Sets the receiver's delegate. A delegate is required to handle content key initialization.
*
* @param delegate
* An object conforming to the AVContentKeySessionDelegate protocol.
* @param delegateQueue
* A dispatch queue on which delegate methods will be invoked whenever processes requiring content keys are executed asynchronously. Passing a value of nil for the delegateQueue parameter along with a non-nil value for the delegate parameter will result in an invalid argument exception.
*/
@Generated
@Selector("setDelegate:queue:")
public native void setDelegateQueue(@Mapped(ObjCObjectMapper.class) AVContentKeySessionDelegate delegate,
NSObject delegateQueue);
@Generated
@Selector("setVersion:")
public static native void setVersion_static(@NInt long aVersion);
/**
* [@property] storageURL
*
* The storage URL provided when the AVContentKeySession was created. May be nil.
*
* URL to a writable directory; may be nil. The session will use this to facilitate expired session reports after abnormal session termination.
*/
@Generated
@Selector("storageURL")
public native NSURL storageURL();
@Generated
@Selector("superclass")
public static native Class superclass_static();
@Generated
@Selector("version")
@NInt
public static native long version_static();
/**
* invalidateAllPersistableContentKeysForApp:options:completionHandler:
*
* Invalidates all persistable content keys associated with the application and creates a secure server playback context (SPC) that the client could send to the key server to verify the outcome of invalidation request.
*
* Once invalidated, persistable content keys cannot be used to answer key requests during later playback sessions.
*
* @param appIdentifier
* An opaque identifier for the application. The contents of this identifier depend on the particular protocol in use by the entity that controls the use of the media data.
* @param options
* Additional information necessary to generate the server playback context, or nil if none. See AVContentKeySessionServerPlaybackContextOption for supported options.
* @param handler
* Once the server playback context is ready, this block will be called with the data or an error describing the failure.
*/
@Generated
@Selector("invalidateAllPersistableContentKeysForApp:options:completionHandler:")
public native void invalidateAllPersistableContentKeysForAppOptionsCompletionHandler(NSData appIdentifier,
NSDictionary<String, ?> options,
@ObjCBlock(name = "call_invalidateAllPersistableContentKeysForAppOptionsCompletionHandler") Block_invalidateAllPersistableContentKeysForAppOptionsCompletionHandler handler);
@Runtime(ObjCRuntime.class)
@Generated
public interface Block_invalidateAllPersistableContentKeysForAppOptionsCompletionHandler {
@Generated
void call_invalidateAllPersistableContentKeysForAppOptionsCompletionHandler(NSData secureTokenData,
NSError error);
}
/**
* invalidatePersistableContentKey:options:completionHandler:
*
* Invalidates the persistable content key and creates a secure server playback context (SPC) that the client could send to the key server to verify the outcome of invalidation request.
*
* Once invalidated, a persistable content key cannot be used to answer key requests during later playback sessions.
*
* @param persistableContentKeyData
* Persistable content key data that was previously created using -[AVContentKeyRequest persistableContentKeyFromKeyVendorResponse:options:error:] or obtained via AVContentKeySessionDelegate callback -contentKeySession:didUpdatePersistableContentKey:forContentKeyIdentifier:.
* @param options
* Additional information necessary to generate the server playback context, or nil if none. See AVContentKeySessionServerPlaybackContextOption for supported options.
* @param handler
* Once the server playback context is ready, this block will be called with the data or an error describing the failure.
*/
@Generated
@Selector("invalidatePersistableContentKey:options:completionHandler:")
public native void invalidatePersistableContentKeyOptionsCompletionHandler(NSData persistableContentKeyData,
NSDictionary<String, ?> options,
@ObjCBlock(name = "call_invalidatePersistableContentKeyOptionsCompletionHandler") Block_invalidatePersistableContentKeyOptionsCompletionHandler handler);
@Runtime(ObjCRuntime.class)
@Generated
public interface Block_invalidatePersistableContentKeyOptionsCompletionHandler {
@Generated
void call_invalidatePersistableContentKeyOptionsCompletionHandler(NSData secureTokenData, NSError error);
}
} |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright 2007-2011 SSAC(Systems of Social Accounting Consortium)
* <author> <NAME> (PieCake,Inc.)
* <author> <NAME> (TOKYO INSTITUTE OF TECHNOLOGY)
* <author> <NAME> (Statistics Bureau)
* <author> <NAME> (Tokyo University of Technology)
* <author> <NAME> (HOSEI UNIVERSITY)
* <author> <NAME> (TOKYO INSTITUTE OF TECHNOLOGY)
*/
/*
* @(#)BigDecimalRangeImpl.java 1.70 2011/06/29
* - created by Y.Ishizuka(PieCake.inc,)
*/
package ssac.aadl.runtime.util.range.internal;
import java.math.BigDecimal;
import java.util.Iterator;
import java.util.NoSuchElementException;
import ssac.aadl.runtime.util.range.EmptyRangeIterator;
/**
* <tt>BigDecimal</tt> で表現可能な数値範囲オブジェクト。
* この数値範囲では、範囲先頭、範囲終端、数値間隔を保持し、
* 範囲終端を含む数値範囲を表します。
* また、数値間隔が増加方向でも (範囲先頭 > 範囲終端) の場合や、
* 減少方向でも (範囲先頭 < 範囲終端) の場合、数値範囲は無効となります。
*
* @version 1.70 2011/06/29
*
* @author <NAME> (PieCake,Inc.)
* @author <NAME> (TOKYO INSTITUTE OF TECHNOLOGY)
* @author <NAME> (Statistics Bureau)
* @author <NAME> (Tokyo University of Technology)
* @author <NAME> (HOSEI UNIVERSITY)
* @author <NAME> (TOKYO INSTITUTE OF TECHNOLOGY)
*
* @since 1.70
*/
public class BigDecimalRangeImpl implements NumberRangeImpl
{
//------------------------------------------------------------
// Constants
//------------------------------------------------------------
//------------------------------------------------------------
// Fields
//------------------------------------------------------------
/** 範囲先頭 **/
protected BigDecimal _from;
/** 範囲終端 **/
protected BigDecimal _to;
/** 数値間隔 **/
protected BigDecimal _step;
/** 数値間隔の符号値 **/
protected int _stepSignum;
//------------------------------------------------------------
// Constructions
//------------------------------------------------------------
public BigDecimalRangeImpl(BigDecimal from, BigDecimal to, BigDecimal step) {
int stepSignum = step.signum();
this._from = from;
this._to = to;
if (stepSignum > 0) {
// incremental
if (from.compareTo(to) > 0) {
// empty : from > to
step = BigDecimal.ZERO;
}
}
else if (stepSignum < 0) {
// decremental
if (from.compareTo(to) < 0) {
// empty : from < to
step = BigDecimal.ZERO;
}
}
else {
// empty
step = BigDecimal.ZERO;
}
this._step = step;
this._stepSignum = step.signum();
}
//------------------------------------------------------------
// Public interfaces
//------------------------------------------------------------
/**
* このオブジェクトが表現可能な数値範囲の数値オブジェクトのクラスを返します。
* <p>このメソッドが <code>Short.class</code> を返す場合、
* {@link #getShortFromValue()}、{@link #getShortToValue()}、{@link #getShortStepValue()}、{@link #getShortRangeIterator()} の
* 返す数値が、<code>short</code> の範囲内の数値であることを保証します。
* <p>このメソッドが <code>Integer.class</code> を返す場合、
* {@link #getIntegerFromValue()}、{@link #getIntegerToValue()}、{@link #getIntegerStepValue()}、{@link #getIntegerRangeIterator()} の
* 返す数値が、<code>int</code> の範囲内の数値であることを保証します。
* <p>このメソッドが <code>Long.class</code> を返す場合、
* {@link #getLongFromValue()}、{@link #getLongToValue()}、{@link #getLongStepValue()}、{@link #getLongRangeIterator()} の
* 返す数値が、<code>long</code> の範囲内の数値であることを保証します。
* @return 数値範囲が表現可能な数値オブジェクトのクラス
*/
public Class<?> getValueClass() {
Class<?> retClazz;
try {
long lf = _from.longValueExact();
long lt = _to.longValueExact();
long ls = _step.longValueExact();
if (((short)lf == lf) && ((short)lt == lt) && ((short)ls == ls)) {
retClazz = Short.class;
}
else if (((int)lf == lf) && ((int)lt == lt) && ((int)ls == ls)) {
retClazz = Integer.class;
}
else {
retClazz = Long.class;
}
}
catch (ArithmeticException ex) {
retClazz = BigDecimal.class;
}
return retClazz;
}
/**
* 定義されている数値範囲が無効である場合に <tt>true</tt> を返します。
* @return 数値範囲が無効であれば <tt>true</tt>、そうでない場合は <tt>false</tt>
*/
public boolean isEmpty() {
return (_stepSignum == 0);
}
/**
* 数値範囲が増加方向か減少方向かを返します。
* @return 増加方向の数値範囲の場合は <tt>true</tt>、減少方向の数値範囲の場合は <tt>false</tt>
*/
public boolean isIncremental() {
return (_stepSignum >= 0);
}
/**
* 数値間隔の符号値を返します。
* このメソッドが 0 を返す場合、この数値範囲は無効であることを示します。
* @return 数値間隔の値が負の場合は -1、ゼロの場合は 0、正の場合は 1
*/
public int stepSignum() {
return _stepSignum;
}
/**
* 指定された数値が数値範囲内の数値かどうかを判定します。
* 数値範囲内の数値とは、指定された数値が、{@link #getDecimalFromValue()} と {@link #getDecimalToValue()} の
* 範囲内かどうかで判定します。
* @param value 判定対象の数値
* @return 数値範囲内であれば <tt>true</tt>、そうでない場合は <tt>false</tt>
*/
public boolean isIncludeValue(BigDecimal value) {
if (value == null || isEmpty()) {
return false;
}
if (_stepSignum > 0) {
// incremental
if (value.compareTo(_from) >= 0 && value.compareTo(_to) <= 0) {
return true;
} else {
return false;
}
} else {
// decremental
if (value.compareTo(_to) >= 0 && value.compareTo(_from) <= 0) {
return true;
} else {
return false;
}
}
}
/**
* 指定された数値が数値範囲オブジェクトの要素かどうかを判定します。
* この判定では、範囲先頭の値と数値間隔によって構成される
* 数値範囲オブジェクトの要素と等しいかどうかを判定します。
* @param value 判定対象の数値
* @return 数値範囲の要素と等しい場合は <tt>true</tt>、そうでない場合は <tt>false</tt>
*/
public boolean containsValue(BigDecimal value) {
if (!isIncludeValue(value)) {
return false;
}
// 要素の数値か判定
if (BigDecimal.ZERO.compareTo(value.subtract(_from).remainder(_step)) == 0) {
// (value - _from) % _step == 0
return true;
} else {
// (value - _from) % _step != 0
return false;
}
}
/**
* この数値範囲の範囲先頭の値を返します。
* このメソッドは、範囲先頭の値を <tt>short</tt> に変換します。
* この変換は、『Java 言語仕様』で定義された <tt>short</tt> への 「ナロープリミティブ変換」と同様です。
* 基本的に、{@link java.math.BigDecimal#shortValue()} が返す値と同じであり、<tt>short</tt> 内に収まらない
* 値の場合、{@link #getDecimalFromValue()} が返す値と異なる場合があります。
* @return <tt>short</tt> に変換された、範囲先頭の値
*/
public short getShortFromValue() {
return _from.shortValue();
}
/**
* この数値範囲の範囲先頭の値を返します。
* このメソッドは、範囲先頭の値を <tt>int</tt> に変換します。
* この変換は、『Java 言語仕様』で定義された <tt>int</tt> への 「ナロープリミティブ変換」と同様です。
* 基本的に、{@link java.math.BigDecimal#intValue()} が返す値と同じであり、<tt>int</tt> 内に収まらない
* 値の場合、{@link #getDecimalFromValue()} が返す値と異なる場合があります。
* @return <tt>int</tt> に変換された、範囲先頭の値
*/
public int getIntegerFromValue() {
return _from.intValue();
}
/**
* この数値範囲の範囲先頭の値を返します。
* このメソッドは、範囲先頭の値を <tt>long</tt> に変換します。
* この変換は、『Java 言語仕様』で定義された <tt>long</tt> への 「ナロープリミティブ変換」と同様です。
* 基本的に、{@link java.math.BigDecimal#longValue()} が返す値と同じであり、<tt>long</tt> 内に収まらない
* 値の場合、{@link #getDecimalFromValue()} が返す値と異なる場合があります。
* @return <tt>long</tt> に変換された、範囲先頭の値
*/
public long getLongFromValue() {
return _from.longValue();
}
/**
* この数値範囲の範囲先頭の値を返します。
* @return 範囲先頭の値
*/
public BigDecimal getDecimalFromValue() {
return _from;
}
/**
* この数値範囲の範囲終端の値を返します。
* このメソッドは、範囲終端の値を <tt>short</tt> に変換します。
* この変換は、『Java 言語仕様』で定義された <tt>short</tt> への 「ナロープリミティブ変換」と同様です。
* 基本的に、{@link java.math.BigDecimal#shortValue()} が返す値と同じであり、<tt>short</tt> 内に収まらない
* 値の場合、{@link #getDecimalToValue()} が返す値と異なる場合があります。
* @return <tt>short</tt> に変換された、範囲終端の値
*/
public short getShortToValue() {
return _to.shortValue();
}
/**
* この数値範囲の範囲終端の値を返します。
* このメソッドは、範囲終端の値を <tt>int</tt> に変換します。
* この変換は、『Java 言語仕様』で定義された <tt>int</tt> への 「ナロープリミティブ変換」と同様です。
* 基本的に、{@link java.math.BigDecimal#intValue()} が返す値と同じであり、<tt>int</tt> 内に収まらない
* 値の場合、{@link #getDecimalToValue()} が返す値と異なる場合があります。
* @return <tt>int</tt> に変換された、範囲終端の値
*/
public int getIntegerToValue() {
return _to.intValue();
}
/**
* この数値範囲の範囲終端の値を返します。
* このメソッドは、範囲終端の値を <tt>long</tt> に変換します。
* この変換は、『Java 言語仕様』で定義された <tt>long</tt> への 「ナロープリミティブ変換」と同様です。
* 基本的に、{@link java.math.BigDecimal#longValue()} が返す値と同じであり、<tt>long</tt> 内に収まらない
* 値の場合、{@link #getDecimalToValue()} が返す値と異なる場合があります。
* @return <tt>long</tt> に変換された、範囲終端の値
*/
public long getLongToValue() {
return _to.longValue();
}
/**
* この数値範囲の範囲終端の値を返します。
* @return 範囲終端の値
*/
public BigDecimal getDecimalToValue() {
return _to;
}
/**
* この数値範囲の数値間隔の値を返します。
* このメソッドは、数値間隔の値を <tt>short</tt> に変換します。
* この変換は、『Java 言語仕様』で定義された <tt>short</tt> への 「ナロープリミティブ変換」と同様です。
* 基本的に、{@link java.math.BigDecimal#shortValue()} が返す値と同じであり、<tt>short</tt> 内に収まらない
* 値の場合、{@link #getDecimalStepValue()} が返す値と異なる場合があります。
* @return <tt>short</tt> に変換された、数値間隔の値
*/
public short getShortStepValue() {
return _step.shortValue();
}
/**
* この数値範囲の数値間隔の値を返します。
* このメソッドは、数値間隔の値を <tt>int</tt> に変換します。
* この変換は、『Java 言語仕様』で定義された <tt>int</tt> への 「ナロープリミティブ変換」と同様です。
* 基本的に、{@link java.math.BigDecimal#intValue()} が返す値と同じであり、<tt>int</tt> 内に収まらない
* 値の場合、{@link #getDecimalStepValue()} が返す値と異なる場合があります。
* @return <tt>int</tt> に変換された、数値間隔の値
*/
public int getIntegerStepValue() {
return _step.intValue();
}
/**
* この数値範囲の数値間隔の値を返します。
* このメソッドは、数値間隔の値を <tt>long</tt> に変換します。
* この変換は、『Java 言語仕様』で定義された <tt>long</tt> への 「ナロープリミティブ変換」と同様です。
* 基本的に、{@link java.math.BigDecimal#longValue()} が返す値と同じであり、<tt>long</tt> 内に収まらない
* 値の場合、{@link #getDecimalStepValue()} が返す値と異なる場合があります。
* @return <tt>long</tt> に変換された、数値間隔の値
*/
public long getLongStepValue() {
return _step.longValue();
}
/**
* この数値範囲の数値間隔の値を返します。
* @return 数値間隔の値
*/
public BigDecimal getDecimalStepValue() {
return _step;
}
/**
* この数値範囲の要素を、範囲先頭から終端まで繰り返し処理する反復子を返します。
* このメソッドは、反復子が返す値を <tt>short</tt> に変換します。
* この変換は、『Java 言語仕様』で定義された <tt>short</tt> への 「ナロープリミティブ変換」と同様です。
* 基本的に、{@link java.math.BigDecimal#shortValue()} が返す値と同じであり、<tt>short</tt> 内に収まらない
* 値の場合、{@link #getDecimalRangeIterator()} が返す反復子の値と異なる場合があります。
* @return <tt>short</tt> に変換された値を返す、数値範囲の要素を繰り返し処理する反復子
*/
public Iterator<Short> getShortRangeIterator() {
if (_stepSignum > 0) {
// incremental
return new IncrementalRangeIteratorImpl<Short>() {
public Short next() {
return getNextValue().shortValue();
}
};
}
else if (_stepSignum < 0) {
// decremental
return new DecrementalRangeIteratorImpl<Short>() {
public Short next() {
return getNextValue().shortValue();
}
};
}
else {
// empty
return new EmptyRangeIterator<Short>();
}
}
/**
* この数値範囲の要素を、範囲先頭から終端まで繰り返し処理する反復子を返します。
* このメソッドは、反復子が返す値を <tt>int</tt> に変換します。
* この変換は、『Java 言語仕様』で定義された <tt>int</tt> への 「ナロープリミティブ変換」と同様です。
* 基本的に、{@link java.math.BigDecimal#intValue()} が返す値と同じであり、<tt>int</tt> 内に収まらない
* 値の場合、{@link #getDecimalRangeIterator()} が返す反復子の値と異なる場合があります。
* @return <tt>int</tt> に変換された値を返す、数値範囲の要素を繰り返し処理する反復子
*/
public Iterator<Integer> getIntegerRangeIterator() {
if (_stepSignum > 0) {
// incremental
return new IncrementalRangeIteratorImpl<Integer>() {
public Integer next() {
return getNextValue().intValue();
}
};
}
else if (_stepSignum < 0) {
// decremental
return new DecrementalRangeIteratorImpl<Integer>() {
public Integer next() {
return getNextValue().intValue();
}
};
}
else {
// empty
return new EmptyRangeIterator<Integer>();
}
}
/**
* この数値範囲の要素を、範囲先頭から終端まで繰り返し処理する反復子を返します。
* このメソッドは、反復子が返す値を <tt>long</tt> に変換します。
* この変換は、『Java 言語仕様』で定義された <tt>long</tt> への 「ナロープリミティブ変換」と同様です。
* 基本的に、{@link java.math.BigDecimal#longValue()} が返す値と同じであり、<tt>long</tt> 内に収まらない
* 値の場合、{@link #getDecimalRangeIterator()} が返す反復子の値と異なる場合があります。
* @return <tt>long</tt> に変換された値を返す、数値範囲の要素を繰り返し処理する反復子
*/
public Iterator<Long> getLongRangeIterator() {
if (_stepSignum > 0) {
// incremental
return new IncrementalRangeIteratorImpl<Long>() {
public Long next() {
return getNextValue().longValue();
}
};
}
else if (_stepSignum < 0) {
// decremental
return new DecrementalRangeIteratorImpl<Long>() {
public Long next() {
return getNextValue().longValue();
}
};
}
else {
// empty
return new EmptyRangeIterator<Long>();
}
}
/**
* この数値範囲の要素を、範囲先頭から終端まで繰り返し処理する反復子を返します。
* @return 数値範囲の要素を繰り返し処理する反復子
*/
public Iterator<BigDecimal> getDecimalRangeIterator() {
if (_stepSignum > 0) {
// incremental
return new IncrementalRangeIteratorImpl<BigDecimal>() {
public BigDecimal next() {
return getNextValue();
}
};
}
else if (_stepSignum < 0) {
// decremental
return new DecrementalRangeIteratorImpl<BigDecimal>() {
public BigDecimal next() {
return getNextValue();
}
};
}
else {
// empty
return new EmptyRangeIterator<BigDecimal>();
}
}
//------------------------------------------------------------
// Internal methods
//------------------------------------------------------------
//------------------------------------------------------------
// Inner classes
//------------------------------------------------------------
protected abstract class RangeIteratorImpl<E> implements Iterator<E> {
protected BigDecimal _curValue = _from;
protected boolean _valid = true;
public boolean hasNext() {
return _valid;
}
public void remove() {
throw new UnsupportedOperationException();
}
abstract protected BigDecimal getNextValue();
}
protected abstract class IncrementalRangeIteratorImpl<E> extends RangeIteratorImpl<E> {
protected BigDecimal getNextValue() {
if (_valid) {
BigDecimal retValue = _curValue;
_curValue = _curValue.add(_step);
if (_curValue.compareTo(_to) > 0) {
_valid = false;
}
return retValue;
} else {
throw new NoSuchElementException();
}
}
}
protected abstract class DecrementalRangeIteratorImpl<E> extends RangeIteratorImpl<E> {
protected BigDecimal getNextValue() {
if (_valid) {
BigDecimal retValue = _curValue;
_curValue = _curValue.add(_step);
if (_curValue.compareTo(_to) < 0) {
_valid = false;
}
return retValue;
} else {
throw new NoSuchElementException();
}
}
}
}
|
def merge(cls, *args):
for i, arg in enumerate(args):
assert arg.__class__.__name__ == 'ThreeInputsRunner', \
" <ThreeInputsRunner> : {}th arg is not ThreeInputsRunner.".format(i)
_input_names_ = args[0]._input_names_
_output_names_ = args[0]._output_names_
___kwargs___ = args[0].___kwargs___
for arg in args:
assert arg._input_names_ == _input_names_, " <ThreeInputsRunner> "
assert arg._output_names_ == _output_names_, " <ThreeInputsRunner> "
assert arg.___kwargs___ == ___kwargs___, " <ThreeInputsRunner> "
TIRinstance = ThreeInputsRunner()
TIRinstance._input_names_ = _input_names_
TIRinstance._output_names_ = _output_names_
TIRinstance.___kwargs___ = ___kwargs___
I0seq, I1seq, I2seq = set(), set(), set()
for i, tir in enumerate(args):
I0seq.update(set(tir.I0seq))
I1seq.update(set(tir.I1seq))
I2seq.update(set(tir.I2seq))
TIRinstance._I0seq_ = list(I0seq)
TIRinstance._I1seq_ = list(I1seq)
TIRinstance._I2seq_ = list(I2seq)
_inputs_dict_ = {}
for i, tiri in enumerate(args):
for inputs in tiri._inputs_index_dict_:
_inputs_dict_[str(i) + '-' + str(inputs)] = tiri._inputs_index_dict_[inputs]
result_keys = args[0].results.keys()
TIRinstance._results_ = {}
for key in result_keys:
TIRinstance._results_[key] = []
I, J, K = len(I0seq), len(I1seq), len(I2seq)
for k in range(K):
for j in range(J):
for i in range(I):
_inputs_ = (TIRinstance._I0seq_[i], TIRinstance._I1seq_[j], TIRinstance._I2seq_[k])
for thekey, value in _inputs_dict_.items():
if _inputs_ == value:
break
assert _inputs_dict_[thekey] == _inputs_, \
" <ThreeInputsRunner> : no data found for inputs: {}.".format(_inputs_)
ith_arg, jth_result = thekey.split('-')
ith_arg = int(ith_arg)
jth_result = int(jth_result)
for key in result_keys:
TIRinstance._results_[key].append(args[ith_arg].results[key][jth_result])
return TIRinstance |
/*[ Class ---------------------------------------------------------------------
Name:
ivWndArray - Utility class to manage a list of windows.
Description:
Utility class for managing a list of windows. This class is used by ivtWin32Wnd
to map Windows window messages back to a class that has created the window.
Base Classes:
Include:
#include <IVT/Kernel/SceneGraph/ivtWin32Wnd.hpp>
Requirement Key:
Hazard Key:
None
API:
no
Derivable:
yes
-----------------------------------------------------------------------------*/
class ivWndArray
{
public:
ivWndArray();
virtual ~ivWndArray();
virtual bool add(HWND hWnd, void *ptr);
virtual bool remove(HWND hWnd);
virtual void *getPtr(HWND hWnd) const;
virtual HWND getDefaultWnd() const;
virtual bool isEmpty() const;
protected:
int getSize() const;
struct ArrayType
{
HWND hWnd;
void *ptr;
} mWindows[16];
} |
def TDataXtd_Constraint_GetID(*args):
return _TDataXtd.TDataXtd_Constraint_GetID(*args) |
<reponame>AutumnalDream/tartiflette-plugin-scalars
import pytest
from tartiflette import Resolver, create_engine
@pytest.mark.asyncio
async def test_us_currency_ok():
@Resolver("Query.usCurrency", schema_name="test_us_currency_ok")
async def us_currency_resolver(*_args, **_kwargs):
return 10050
sdl = """
type Query {
usCurrency: USCurrency
}
"""
engine = await create_engine(
sdl=sdl,
modules=[{"name": "tartiflette_plugin_scalars", "config": {}}],
schema_name="test_us_currency_ok",
)
assert await engine.execute("query usCurrencyOk { usCurrency }") == {
"data": {"usCurrency": "$100.50"}
}
@pytest.mark.asyncio
async def test_us_currency_nok():
@Resolver("Query.usCurrency", schema_name="test_us_currency_nok")
async def us_currency_resolver(*_args, **_kwargs):
return "nok"
sdl = """
type Query {
usCurrency: USCurrency
}
"""
engine = await create_engine(
sdl=sdl,
modules=[{"name": "tartiflette_plugin_scalars", "config": {}}],
schema_name="test_us_currency_nok",
)
result = await engine.execute("query usCurrencyNok { usCurrency }")
assert result["data"]["usCurrency"] is None
assert len(result["errors"]) == 1
assert (
result["errors"][0]["message"]
== "USCurrency cannot represent value: < nok >"
)
@pytest.mark.asyncio
async def test_us_currency_mutation_ok():
@Resolver(
"Mutation.usCurrency", schema_name="test_us_currency_mutation_ok"
)
async def us_currency_resolver(*_args, **_kwargs):
return True
sdl = """
type Query {
usCurrency: USCurrency
}
type Mutation {
usCurrency(input: USCurrency): Boolean
}
"""
engine = await create_engine(
sdl=sdl,
modules=[{"name": "tartiflette_plugin_scalars", "config": {}}],
schema_name="test_us_currency_mutation_ok",
)
assert await engine.execute(
'mutation usCurrency { usCurrency(input:"75017") }'
) == {"data": {"usCurrency": True}}
@pytest.mark.asyncio
async def test_us_currency_mutation_nok():
@Resolver(
"Mutation.usCurrency", schema_name="test_us_currency_mutation_nok"
)
async def us_currency_resolver(*_args, **_kwargs):
return True
sdl = """
type Query {
usCurrency: USCurrency
}
type Mutation {
usCurrency(input: USCurrency): Boolean
}
"""
engine = await create_engine(
sdl=sdl,
modules=[{"name": "tartiflette_plugin_scalars", "config": {}}],
schema_name="test_us_currency_mutation_nok",
)
result = await engine.execute(
'mutation usCurrency { usCurrency(input:"nok") }'
)
assert result["data"] is None
assert len(result["errors"]) == 1
assert (
result["errors"][0]["message"]
== "Value nok is not of correct type USCurrency"
)
|
From Al-Itihaad to Al-Shabaab: how the Ethiopian intervention and the ‘War on Terror’ exacerbated the conflict in Somalia
Abstract External intervention has frustrated and continues to frustrate peace and stability in the Horn of Africa and Somalia, adding various adverse layers to an already complicated and complex conflict. The level of forceful military engagement intended for regional domination has profoundly affected negatively the efforts of peacebuilding and statebuilding in Somalia. This article examines how the earlier Ethiopian policies towards Somalia has reshaped the (post)-Cold War politics of the Horn. In doing so, it traces the roots of the Ethiopian intervention in Somalia vis-à-vis new non-state armed groups to chart the changing political dynamics of the conflict in Somalia. By using historical approach, the article argues that Ethiopia’s agenda is central to understanding why the ‘War on Terror’ has strengthened and subsequently midwifed armed militant movements (e.g. new insurgency groups) in Somalia, starting from Al-Itihaad to today’s Al-Shabaab. In focusing upon various regional actors and groups, the article moves from the emphasis of internal systems to external power structures, considering the wider historical and political factors in the region that must be closely examined if the regional and local conflicts are to be deeply understood. While it is a context-specific study, the article aims to contribute fresh perspectives and insights to ongoing discussions on the consequences of the Ethiopian intervention in Somalia. |
/**
* Permet de vider la "memoire" de ce qui a ete insere
*/
private void viderPaiementEnCours()
{
for (Piece piece : Piece.values())
{
StockPiecePaiement.put(piece, 0);
}
for (BilletMonnaie billet : BilletMonnaie.values())
{
StockBilletPaiement.put(billet, 0);
}
} |
#include <stdio.h>
int main()
{
int i,t,n,d;
char ch;
long long s;
scanf("%d %lld",&n,&s);
t = 0;
for(i = 1;i <= n;i++){
scanf(" %c",&ch);
scanf("%d",&d);
if(ch == '+'){
s = s + d;
}
else if(ch == '-' && d > s){
t++;
}
else if(ch == '-' && d <= s){
s = s - d;
}
}
printf("%lld %d\n",s,t);
return 0;
}
|
<gh_stars>1-10
##############################################################################
# Copyright by The HDF Group. #
# All rights reserved. #
# #
# This file is part of HSDS (HDF5 Scalable Data Service), Libraries and #
# Utilities. The full HSDS copyright notice, including #
# terms governing use, modification, and redistribution, is contained in #
# the file COPYING, which can be found at the root of the source code #
# distribution tree. If you do not have access to this file, you may #
# request a copy from <EMAIL>. #
##############################################################################
#
# service node of hsds cluster
#
import os.path as op
from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPNotFound, HTTPInternalServerError
from util.idUtil import getDataNodeUrl, getCollectionForId, isSchema2Id, getS3Key
from util.storUtil import getStorJSONObj
from util.authUtil import aclCheck
from util.httpUtil import http_get
from util.domainUtil import getBucketForDomain
import hsds_logger as log
async def getDomainJson(app, domain, reload=False):
""" Return domain JSON from cache or fetch from DN if not found
Note: only call from sn!
"""
# TBD - default reload to True because some h5pyd tests fail due to
# cached values being picked up (test case deletes/re-creates domain)
# It would be desirable to use default of False to avoid extra
# round-trips to DN node
log.info(f"getDomainJson({domain}, reload={reload})")
if app["node_type"] != "sn":
log.error("wrong node_type")
raise HTTPInternalServerError()
domain_cache = app["domain_cache"]
if domain in domain_cache:
if reload:
del domain_cache[domain]
else:
log.debug("returning domain_cache value")
return domain_cache[domain]
req = getDataNodeUrl(app, domain)
req += "/domains"
params = { "domain": domain }
log.debug(f"sending dn req: {req} params: {params}")
domain_json = await http_get(app, req, params=params)
if 'owner' not in domain_json:
log.warn("No owner key found in domain")
raise HTTPInternalServerError()
if 'acls' not in domain_json:
log.warn("No acls key found in domain")
raise HTTPInternalServerError()
domain_cache[domain] = domain_json # add to cache
return domain_json
async def validateAction(app, domain, obj_id, username, action):
""" check that the given object belongs in the domain and that the
requested action (create, read, update, delete, readACL, udpateACL)
is permitted for the requesting user.
"""
meta_cache = app['meta_cache']
log.info(f"validateAction(domain={domain}, obj_id={obj_id}, username={username}, action={action})")
# get domain JSON
domain_json = await getDomainJson(app, domain)
if "root" not in domain_json:
msg = f"Expected root key for domain: {domain}"
log.warn(msg)
raise HTTPBadRequest(reason=msg)
obj_json = None
if obj_id in meta_cache:
obj_json = meta_cache[obj_id]
else:
# fetch from DN
collection = getCollectionForId(obj_id)
req = getDataNodeUrl(app, obj_id)
req += '/' + collection + '/' + obj_id
bucket = getBucketForDomain(domain)
params = {}
if bucket:
params["bucket"] = bucket
obj_json = await http_get(app, req, params=params)
meta_cache[obj_id] = obj_json
log.debug("obj_json[root]: {} domain_json[root]: {}".format(obj_json["root"], domain_json["root"]))
if obj_json["root"] != domain_json["root"]:
log.info("unexpected root, reloading domain")
domain_json = await getDomainJson(app, domain, reload=True)
if "root" not in domain_json or obj_json["root"] != domain_json["root"]:
msg = "Object id is not a member of the given domain"
log.warn(msg)
raise HTTPBadRequest(reason=msg)
if action not in ("create", "read", "update", "delete", "readACL", "updateACL"):
log.error(f"unexpected action: {action}")
raise HTTPInternalServerError()
reload = False
try:
aclCheck(domain_json, action, username) # throws exception if not allowed
except HTTPForbidden:
log.info(f"got HttpProcessing error on validate action for domain: {domain}, reloading...")
# just in case the ACL was recently updated, refetch the domain
reload = True
if reload:
domain_json = await getDomainJson(app, domain, reload=True)
aclCheck(domain_json, action, username)
async def getObjectJson(app, obj_id, bucket=None, refresh=False, include_links=False, include_attrs=False):
""" Return top-level json (i.e. excluding attributes or links by default) for a given obj_id.
If refresh is False, any data present in the meta_cache will be returned. If not
the DN will be queries, and any resultant data added to the meta_cache.
Note: meta_cache values may be stale, but use of immutable data (e.g. type of a dataset)
is always valid
"""
meta_cache = app['meta_cache']
obj_json = None
if include_links or include_attrs:
# links and attributes are subject to change, so always refresh
refresh = True
log.info(f"getObjectJson {obj_id}")
if obj_id in meta_cache and not refresh:
log.debug(f"found {obj_id} in meta_cache")
obj_json = meta_cache[obj_id]
else:
req = getDataNodeUrl(app, obj_id)
collection = getCollectionForId(obj_id)
params = {}
if include_links:
params["include_links"] = 1
if include_attrs:
params["include_attrs"] = 1
if bucket:
params["bucket"] = bucket
req += '/' + collection + '/' + obj_id
obj_json = await http_get(app, req, params=params) # throws 404 if doesn't exist
meta_cache[obj_id] = obj_json
if obj_json is None:
msg = f"Object: {obj_id} not found"
log.warn(msg)
raise HTTPNotFound()
return obj_json
async def getObjectIdByPath(app, obj_id, h5path, bucket=None, refresh=False):
""" Find the object at the provided h5path location.
If not found raise 404 error.
"""
log.info(f"getObjectIdByPath obj_id: {obj_id} h5path: {h5path} refresh: {refresh}")
if h5path.startswith("./"):
h5path = h5path[2:] # treat as relative path
links = h5path.split('/')
for link in links:
if not link:
continue # skip empty link
log.debug(f"getObjectIdByPath for objid: {obj_id} got link: {link}")
if getCollectionForId(obj_id) != "groups":
# not a group, so won't have links
msg = f"h5path: {h5path} not found"
log.warn(msg)
raise HTTPNotFound()
req = getDataNodeUrl(app, obj_id)
req += "/groups/" + obj_id + "/links/" + link
log.debug("get LINK: " + req)
params = {}
if bucket:
params["bucket"] = bucket
link_json = await http_get(app, req, params=params)
log.debug("got link_json: " + str(link_json))
if link_json["class"] != 'H5L_TYPE_HARD':
# don't follow soft/external links
msg = f"h5path: {h5path} not found"
log.warn(msg)
raise HTTPInternalServerError()
obj_id = link_json["id"]
# if we get here, we've traveresed the entire path and found the object
return obj_id
async def getPathForObjectId(app, parent_id, idpath_map, tgt_id=None, bucket=None):
""" Search the object starting with the given parent_id.
idpath should be a dict with at minimum the key: parent_id: <parent_path>.
If tgt_id is not None, returns first path that matches the tgt_id or None if not found.
If Tgt_id is no, returns the idpath_map.
"""
if not parent_id:
log.error("No parent_id passed to getPathForObjectId")
raise HTTPInternalServerError()
if parent_id not in idpath_map:
msg = f"Obj {parent_id} expected to be found in idpath_map"
log.error(msg)
raise HTTPInternalServerError()
parent_path = idpath_map[parent_id]
if parent_id == tgt_id:
return parent_path
req = getDataNodeUrl(app, parent_id)
req += "/groups/" + parent_id + "/links"
params = {}
if bucket:
params["bucket"] = bucket
log.debug("getPathForObjectId LINKS: " + req)
links_json = await http_get(app, req, params=params)
log.debug(f"getPathForObjectId got links json from dn for parent_id: {parent_id}")
links = links_json["links"]
h5path = None
for link in links:
if link["class"] != "H5L_TYPE_HARD":
continue # ignore everything except hard links
link_id = link["id"]
if link_id in idpath_map:
continue # this node has already been visited
title = link["title"]
if tgt_id is not None and link_id == tgt_id:
# found it!
h5path = op.join(parent_path, title)
break
idpath_map[link_id] = op.join(parent_path, title)
if getCollectionForId(link_id) != "groups":
continue
h5path = await getPathForObjectId(app, link_id, idpath_map, tgt_id=tgt_id, bucket=bucket) # recursive call
if tgt_id is not None and h5path:
break
return h5path
async def getRootInfo(app, root_id, bucket=None):
""" Get extra information the root collection. """
# Gather additional info on the domain
log.debug(f"getRootInfo {root_id}")
if not isSchema2Id(root_id):
log.info(f"no dataset details not available for schema v1 id: {root_id} returning null results")
return None
s3_key = getS3Key(root_id)
parts = s3_key.split('/')
# dset_key is in the format db/<root>/d/<dset>/.dataset.json
# get the key for the root info object as: db/<root>/.info.json
if len(parts) != 3:
log.error(f"Unexpected s3key format: {s3_key}")
return None
info_key = f"db/{parts[1]}/.info.json"
try:
info_json = await getStorJSONObj(app, info_key, bucket=bucket)
except HTTPNotFound:
log.warn(f"info.json not found for key: {info_key}")
return None
return info_json
|
<reponame>kylekanos/project-euler-1<filename>python/86.py
#!/usr/bin/env python
def issquare(n):
return (int(round(n**.5)))**2 == n
total=m=0
while total < 10**6:
m += 1
for ipj in xrange(2*m+1):
if not issquare(ipj**2+m**2): continue
if ipj > m+1: total += (2*m+2-ipj)/2
else: total += ipj/2
print m
|
use crate::prelude::*;
use num::Float;
use polars_arrow::kernels::float::*;
use polars_arrow::kernels::set::set_at_nulls;
impl<T> ChunkedArray<T>
where
T: PolarsFloatType,
T::Native: Float,
{
pub fn is_nan(&self) -> BooleanChunked {
self.apply_kernel_cast(is_nan::<T::Native>)
}
pub fn is_not_nan(&self) -> BooleanChunked {
self.apply_kernel_cast(is_not_nan::<T::Native>)
}
pub fn is_finite(&self) -> BooleanChunked {
self.apply_kernel_cast(is_finite)
}
pub fn is_infinite(&self) -> BooleanChunked {
self.apply_kernel_cast(is_infinite)
}
#[must_use]
/// Convert missing values to `NaN` values.
pub fn none_to_nan(&self) -> Self {
let chunks = self
.downcast_iter()
.map(|arr| Arc::new(set_at_nulls(arr, T::Native::nan())) as ArrayRef)
.collect();
ChunkedArray::from_chunks(self.name(), chunks)
}
}
|
import { IValidationErrorText } from "..";
export declare class StringTools {
static specialCharReplaceSpecific(originalErrorText: IValidationErrorText, character: string, replaceBy: string): IValidationErrorText;
static specialCharReplace(originalErrorText: IValidationErrorText, replaceBy: string): IValidationErrorText;
static returnAllCharacterIndexes(text: string): number[];
static returnAllCharacterIndexesSince(text: string, since?: number): number[];
static returnMatchingIndexes(text: string, regEx: RegExp): number[];
static returnNotMatchingIndexes(text: string, regEx: RegExp, exceptions?: string[]): number[];
static returnSubstringMatchingIndexes(text: string, substringToFind: string): number[];
static returnSpacesLocations(text: string): number[];
static returnDoubleSpacesLocations(text: string): number[];
static returnWordsFirstLetterLocations(text: string): number[];
static returnWords(text: string): string[];
static removeCharacters(text: string, charactersIndexes: number[]): string;
static replaceAt(text: string, index: number, replacement: string): string;
static removeAt(text: string, index: number): string;
static removeCharactersSince(text: string, characterIndex: number): string;
static insert(source: string, textToInsert: string, positionToInsert: number): string;
static containsAt(text: string, substring: string, location: number): boolean;
}
|
// Get the URL of the member selector
bool SiGetMemberSelectorUrl(char *url, UINT url_size)
{
BUF *b;
bool ret = false;
if (url == NULL)
{
return false;
}
b = ReadDump(MEMBER_SELECTOR_TXT_FILENAME);
if (b == NULL)
{
return false;
}
while (true)
{
char *line = CfgReadNextLine(b);
if (line == NULL)
{
break;
}
Trim(line);
if (IsEmptyStr(line) == false && ret == false)
{
StrCpy(url, url_size, line);
ret = true;
}
Free(line);
}
FreeBuf(b);
return ret;
} |
// Test: GeometricArrayGenerator, test the data send is in valid format
func TestGeometricArrayGenerator(t *testing.T) {
tests := []struct {
name string
dt string
want bool
}{
{"validate_build_path_array", "path[]", true},
{"validate_build_polygon_array", "polygon[]", true},
{"validate_build_line_array", "line[]", true},
{"validate_build_lseg_array", "lseg[]", true},
{"validate_build_box_array", "box", true},
{"validate_build_circle_array", "circle[]", true},
{"validate_build_point_array", "point[]", true},
}
for _, tt := range tests {
re := strings.Replace(reExpOtherGeometricData, "*", "", -1)
format := regexp.MustCompile(re)
t.Run(tt.name, func(t *testing.T) {
if strings.HasPrefix(tt.dt, "box") {
if got := GeometricArrayGenerator(5, tt.dt); format.MatchString(got) != tt.want {
t.Errorf("TestGeometricArrayGenerator = %v, want %v", got, tt.want)
}
} else {
if got := GeometricArrayGenerator(5, tt.dt); isItValidArray(got) != tt.want {
t.Errorf("TestGeometricArrayGenerator = %v, want %v", got, tt.want)
}
}
})
}
} |
package android.support.v4.widget;
import android.annotation.TargetApi;
import android.widget.TextView;
@TargetApi(16)
/* renamed from: android.support.v4.widget.z */
class C0599z {
static int m2546a(TextView textView) {
return textView.getMaxLines();
}
}
|
import sys
import types
from restrain_jit.abs_compiler import instrnames as InstrNames
from bytecode import Bytecode, Instr
from importlib import _bootstrap
from importlib._bootstrap import ModuleSpec
from importlib.abc import Loader
from contextlib import contextmanager
from importlib._bootstrap_external import PathFinder, FileLoader, ExtensionFileLoader
class RePyLoader(Loader):
def __init__(self, loader: FileLoader):
self.loader = loader
def create_module(self, spec: ModuleSpec):
# mod = RestrainModule(spec.name)
mod = types.ModuleType(spec.name)
_bootstrap._init_module_attrs(spec, mod)
return mod
def exec_module(self, module):
code = self.loader.get_code(module.__name__)
if code is None:
raise ImportError('cannot load module {!r} when get_code() '
'returns None'.format(module.__name__))
__glob_refs__ = module.__glob_refs__ = {
} # from a global symbol to jit functions it's referenced
bc = Bytecode.from_code(code)
def update_generations(name):
functions = __glob_refs__.get(name, None)
if functions is None:
return
for fn in functions:
fn.__update_global_ref__(name)
module.__dict__['__update_generations__'] = update_generations
def update_bc():
for each in bc:
yield each
if isinstance(
each,
Instr) and each.name == InstrNames.STORE_NAME:
yield Instr(
InstrNames.LOAD_NAME,
'__update_generations__',
lineno=each.lineno)
yield Instr(
InstrNames.LOAD_CONST,
each.arg,
lineno=each.lineno)
yield Instr(
InstrNames.CALL_FUNCTION, 1, lineno=each.lineno)
yield Instr(InstrNames.POP_TOP, lineno=each.lineno)
lst = list(update_bc())
bc.clear()
bc.extend(lst)
code = bc.to_code()
exec(code, module.__dict__)
class RePyFinder(PathFinder):
@classmethod
def find_spec(cls, fullname, path=None, target=None):
spec: ModuleSpec = PathFinder.find_spec(fullname, path, target)
if spec and spec.loader and isinstance(
spec.loader, FileLoader) and not isinstance(
spec.loader, ExtensionFileLoader):
spec.loader = RePyLoader(spec.loader)
return spec
def unregister():
sys.meta_path.remove(RePyFinder)
def register():
sys.meta_path.insert(0, RePyFinder)
@contextmanager
def with_registered():
try:
register()
yield
finally:
unregister()
|
Bret Peter Tarrant McKenzie, ONZM (born 29 June 1976) is a New Zealand comedian, actor, musician and producer. He is one half of musical comedy duo Flight of the Conchords along with Jemaine Clement. The duo's comedy and music became the basis of a BBC radio series and then an oft-lauded American television series, which aired for two seasons on HBO. McKenzie served as music supervisor for two Muppet films, The Muppets (2011) and Muppets Most Wanted (2014), the former of which won him an Academy Award for Best Original Song for the song "Man or Muppet". His latest work has seen him write the lyrics to the 2016 Sainsburys Christmas advert featuring James Corden on vocals.[1]
As an actor, he portrayed Lindir in Peter Jackson's The Lord of the Rings and The Hobbit film trilogies: in the first he remained unnamed with fans naming him Figwit, a character originally cast as an extra who gained attention thanks to the trilogy's fan community, and in The Hobbit is credited as Lindir, a small character who originally appears in the book of The Fellowship of the Ring.
Background [ edit ]
McKenzie was born in Wellington, New Zealand. He is a former member of The Black Seeds. He released an album called Prototype as Video Kid[2] and is a member of the Wellington International Ukulele Orchestra.[3] McKenzie attended Clifton Terrace Model School ("model" refers to a standard school for training teachers as opposed to modelling), Wellington College and then Victoria University of Wellington where he met Jemaine Clement who was also studying film and theatre. Together, they were members of So You're a Man and they later formed Flight of the Conchords.
As Flight of the Conchords they have toured internationally and released four CDs: Folk the World Tour in 2002,The Distant Future (which won the Grammy Award for Best Comedy Album) in 2007, the Grammy nominated Flight of the Conchords in 2008, and I Told You I Was Freaky in 2009. The Conchords produced a six-part improvisational comedy radio program for the BBC and have appeared on Late Night with Conan O'Brien, the Late Show with David Letterman and The Late Late Show with Craig Ferguson. After a successful appearance in 2005 on HBOs One Night Stand, the Conchords were offered their own 12-part HBO series Flight of the Conchords. Its first season ran from June to September 2007, and its second season premiered on HBO 18 January 2009.
McKenzie has appeared in the first and third films in Peter Jackson's The Lord of the Rings trilogy. His silent role in the first film as Figwit achieved some minor internet fame, which led to Jackson giving him a line in the third film. In April 2011, McKenzie was cast as the elf Lindir for The Hobbit. His father Peter McKenzie played the role of Elendil in Lord of the Rings.
Along with Clement, McKenzie was featured as one of 2008's "100 Sexiest People" in a special edition of the Australian magazine Who.
McKenzie and fellow Conchord Clement guest starred as a pair of camp counselors in "Elementary School Musical", the season premiere of the 22nd season of The Simpsons, which aired on 26 September 2010.[4]
During the summer of 2010, McKenzie flew to Los Angeles to serve as the music supervisor for The Muppets.[5] He went on to write four of the five original songs from the film's soundtrack including "Man or Muppet" and "Life's a Happy Song" both of which were nominated for Broadcast Film Critics Association Awards and Satellite Awards for Best Original Song.[6]
At the 84th Academy Awards in 2012 his song, "Man or Muppet", won the Academy Award for Best Original Song.
McKenzie, together with Australian comedian Hamish Blake starred in a New Zealand feature film, Two Little Boys, finished in late 2011 and released in New Zealand on March 2012.[7]
McKenzie wrote the original songs for the 2014 movie Muppets Most Wanted.
Personal life [ edit ]
He is married to New Zealand publicist Hannah Clarke[8][9] and currently maintains residences in Los Angeles, New York City, and Wellington. They have two children; daughter Vita (born 2009), and son Leo (born 2011).[10]
Selected filmography [ edit ] |
/// Returns the global variables used by the given expression or its
/// subexpressions.
pub fn global_vars(&self) -> BTreeSet<Symbol> {
match *self {
Expr::AExpr(ref e) => e.global_vars(),
Expr::CExpr(ref e) => e.global_vars(),
Expr::Let(ref a, ref b) => a.global_vars().into_iter().chain(b.global_vars()).collect(),
Expr::Seq(ref a, ref b) => a.global_vars().into_iter().chain(b.global_vars()).collect(),
}
} |
The next generation 2018 Porsche 911 test mule has been spotted for the very first time while public road trials in Germany. The prototype features the same body panels like existing 911 with noticeable wider rear wheel arches and protruding fuel cap.
Porsche has recently unveiled face-lifted version of 991 and is planning to unveil all-new 911 by the end of 2018. This Prototype carries cues from 2017 911 and will ride on new modular sport car platform developed by the company. The new platform allows greater flexibility in positioning of engines, suspension setup and other features necessary to adopt hybrid technology for next generation 911. The same platform may be used in next generation versions of the Boxster and Cayman.
At the moment, there are no firm details available about the hybrid powertrain, but we are expecting it may feature same twin-turbocharged, 3.0 -liter six cylinder engine currently powering Carrera and Carrera S variants. This new flat six twin-turbo develops 370 ponies along with 331 foot-pounds of torque for 2017 911 Carrera 4 lineup and with a modified compressor fitted, it punches 420hp and 368 foot-pounds of torque for Carrera 4S models.
Stay tune to Indyacars for further updates. Check the gallery for more detailed images.
Gallery |
/**
* Deserializes the version and datum from a stream.
*
* <p>This method deserializes data serialized via
* {@link #writeVersionAndSerialize(SimpleVersionedSerializer, Object, DataOutputView)}.
*
* <p>The first four bytes will be interpreted as the version. The next four bytes will be
* interpreted as the length of the datum bytes, then length-many bytes will be read.
* Finally, the datum is deserialized via the {@link SimpleVersionedSerializer#deserialize(int, byte[])}
* method.
*
* @param serializer The serializer to serialize the datum with.
* @param in The stream to deserialize from.
*/
public static <T> T readVersionAndDeSerialize(SimpleVersionedSerializer<T> serializer, DataInputView in) throws IOException {
checkNotNull(serializer, "serializer");
checkNotNull(in, "in");
final int version = in.readInt();
final int length = in.readInt();
final byte[] data = new byte[length];
in.readFully(data);
return serializer.deserialize(version, data);
} |
package com.app.ecommerce.v1.props;
import android.content.Context;
import android.content.SharedPreferences;
import android.preference.PreferenceManager;
public class Preferences {
private static final String PREFERENCIA_CLAVE_API = "preferencia.claveApi";
private static SharedPreferences getDefaultSharedPreferences(Context contexto) {
return PreferenceManager.getDefaultSharedPreferences(contexto);
}
public static void guardarClaveApi(Context contexto, String claveApi) {
SharedPreferences sp = getDefaultSharedPreferences(contexto);
sp.edit().putString(PREFERENCIA_CLAVE_API, claveApi).apply();
}
public static String obtenerClaveApi(Context contexto) {
return getDefaultSharedPreferences(contexto).getString(PREFERENCIA_CLAVE_API, null);
}
}
|
Invited Review: Development of a one-dimensional computational fluid dynamics modeling approach to predict cycle-to-cycle variability in spark-ignition engines based on physical understanding acquired from large-eddy simulation
In order to satisfy emission standards and CO2 targets, spark-ignition engines are designed to operate with high dilution rates, compression ratios and boost levels, thus increasing the propensity for unstable combustion. Therefore it is important to address cycle-to-cycle variability (CCV) in complete engine simulators in order to support the design of viable architectures and control strategies. This work concerns the development, validation and application to a multi-cylinder spark-ignition engine of a physics-based one-dimensional combustion model able to render CCV. Its basis relies on the analysis of Large-Eddy Simulation (LES) of flow in a single-cylinder engine used to extract information relating physics to cyclic fluctuations. A one-dimensional CCV model is derived, accounting for variability related to in-cylinder aerodynamics, turbulence and mixture composition. A detailed spark-ignition model is developed, and the resulting model captures the strongly non-linear interactions between flow and combustion, starting from spark ignition and covering laminar/turbulent transition and wrinkling of the flame surface. A first validation is presented against dedicated experimental data from a single-cylinder engine. Detailed comparisons between measurements and predictions are reported on a set of parametric variations around a reference point to assess the physical bases of the model. The resulting model is applied to the simulation of the operating map of a multi-cylinder turbocharged engine. It is found able to reproduce CCV without the need to perform specific LES of that engine, highlighting a certain level of generality of the developed model. |
<reponame>timc1/timcchang.com
import React from 'react'
import SectionIndexLayout from '../components/shared/section-index-layout'
import styled from '@emotion/styled'
import { graphql, Link } from 'gatsby'
import { mq } from '../components/shared/global-styles'
import SEO from '../components/shared/seo'
export default function CaseStudiesIndex(data: any) {
const studies = data.data.allMdx.edges.map((edge: any) => edge.node)
return (
<>
<SEO
title="Case Studies — <NAME>"
description="Overview and retrospective of independent client work at <EMAIL>."
/>
<SectionIndexLayout title="Case Studies">
<CaseStudies>
{studies.map((study: any, index: number) => (
<CaseStudy key={study.id} className="text">
<Link to={study.fields.slug}>
<Title>{study.frontmatter.title}</Title>
<Subtitle>{study.frontmatter.subtitle}</Subtitle>
<Breadcrumbs>
{study.frontmatter.breadcrumbs.map((bc: string) => (
<li key={bc}>{bc}</li>
))}
</Breadcrumbs>
<Number>
{index + 1 < 10 ? `0${index + 1}` : `${index + 1}`}.
</Number>
</Link>
</CaseStudy>
))}
</CaseStudies>
</SectionIndexLayout>
</>
)
}
export const query = graphql`
{
allMdx(
filter: { frontmatter: { type: { regex: "/case-study/" } } }
sort: { fields: [frontmatter___date], order: DESC }
) {
edges {
node {
id
excerpt(pruneLength: 280)
frontmatter {
title
subtitle
breadcrumbs
type
}
fields {
slug
}
}
}
}
}
`
const Number = styled.p`
margin: 0;
grid-column: 1;
grid-row: 2;
font-family: var(--ss-font2);
font-weight: var(--bold);
font-size: var(--font-x-small);
color: var(--color-dark-1);
`
const Title = styled.h2`
margin: 0;
grid-column: 2;
grid-row: 2;
font-size: var(--font-x-large);
font-weight: var(--medium);
color: var(--color-black);
`
const Subtitle = styled.p`
margin: 0;
grid-row: 3;
grid-column: 2;
font-size: var(--font-medium);
color: var(--color-dark-1);
`
const Breadcrumbs = styled.ul`
margin: 0;
padding: 0;
list-style: none;
grid-column: 2;
grid-row: 1;
display: flex;
flex-direction: row;
font-size: var(--font-x-small);
font-weight: var(--medium);
color: var(--color-dark-1);
text-transform: uppercase;
> li {
font-family: var(--ss-font2);
color: var(--color-dark-1);
}
> li:not(:last-of-type) {
padding-right: var(--base-gap);
}
`
const CaseStudy = styled.li`
> a {
display: grid;
grid-template-columns: var(--font-large) 1fr;
align-items: center;
grid-gap: var(--base-gap);
text-decoration: none;
}
${mq[2]} {
> a {
grid-template-columns: 1fr;
}
${Number} {
grid-row: 1;
}
${Breadcrumbs} {
grid-row: 2;
}
${Title} {
grid-row: 3;
}
${Subtitle}{
grid-row: 4;
}
${Number}, ${Title}, ${Subtitle}, ${Breadcrumbs} {
grid-column: unset;
}
}
`
export const CaseStudies = styled.ul`
margin: 0 0 0 calc((var(--font-large) * -1) - var(--base-gap));
padding: 0;
list-style: none;
max-width: 850px;
${CaseStudy}:not(:first-of-type) {
margin-top: var(--base-padding);
}
${mq[2]} {
margin-left: 0;
${CaseStudy}:not(:first-of-type) {
margin-top: 80px;
}
}
`
|
n,h=map(int,input().split())
c=[]
#print(c)
#
amax=0
temp=-1
for i in range(n):
a,b=map(int,input().split())
if a>amax:amax=a;temp=i
c.append(b)
#k=c[temp]
import math
c.sort(reverse = True)
rt=[num for num in c if num>amax]
if h<=sum(rt):
sum1=0
i=0
while sum1<h:
sum1+=rt[i]
i+=1
print(i)
else:
nt=math.ceil((h-sum(rt))/amax)
print(len(rt)+nt)
|
// generateparenthesesgo
// description: Generate Parentheses
// details:
// Given n pairs of parentheses, write a function to generate all combinations of well-formed parentheses.
// author(s) [red_byte](https://github.com/i-redbyte)
// see generateparentheses_test.go
package generateparentheses
import "strings"
func GenerateParenthesis(n int) []string {
result := make([]string, 0)
maxLen := 2 * n
var recursiveComputation func(s []string, left int, right int)
recursiveComputation = func(s []string, left int, right int) {
if len(s) == maxLen {
result = append(result, strings.Join(s, ""))
return
}
if left < n {
s = append(s, "(")
recursiveComputation(s, left+1, right)
s = s[:len(s)-1]
}
if right < left {
s = append(s, ")")
recursiveComputation(s, left, right+1)
_ = s[:len(s)-1]
}
}
recursiveComputation(make([]string, 0), 0, 0)
return result
}
|
/**
* Copyright 2020-2021 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_CCSRC_FRONTEND_PARALLEL_ALLREDUCE_FUSION_ALLREDUCE_GRAPH_H_
#define MINDSPORE_CCSRC_FRONTEND_PARALLEL_ALLREDUCE_FUSION_ALLREDUCE_GRAPH_H_
#include <memory>
#include <set>
#include <utility>
#include <vector>
#include "utils/hash_map.h"
#include "utils/hash_set.h"
#include "ir/anf.h"
#include "frontend/parallel/allreduce_fusion/allreduce_node.h"
#include "frontend/parallel/status.h"
namespace mindspore {
namespace parallel {
class AllreduceGraph {
public:
AllreduceGraph()
: head_cnode_(nullptr),
arnode_set_(),
arnode_vec_(),
cnode_set_(),
para_cnode_map_(),
para_cnodeset_map_(),
cnode_paraset_map_(),
cnode_arnode_map_(),
max_(0) {}
virtual ~AllreduceGraph() = default;
Status AddNode(const CNodePtr &node, const AnfNodePtr ¶);
Status AddEdge(const CNodePtr &from, const CNodePtr &to, double dist);
bool NodeInGraph(const CNodePtr &node) const;
std::vector<AnfNodePtr> GetParaByCost(double from, double to);
// Find the first several AllreduceNode whose depend_feat_size is less than to, the sum of whose parameter size is
// over para_size.
// Return the parameter AnfNodePtr vector corresponding to these AllreduceNodes and the smallest depend_feat_size.
// If the sum of left AllreduceNode's parameter size is less than para_size, the returned depend_feat_size must be 0.
std::pair<std::vector<AnfNodePtr>, double> GetParaByParaSize(double to, double para_size);
// If one parameter is used by multiple AllreduceNode, parameter belong to the last node for backward computation
// is saved by the corresponding AllreduceNode, parameters belong to other AllreduceNode are removed.
// Called during precise optimization, not implemented temporarily.
void SortArnode();
Status RemoveExtraParas();
void PrintCNodeSet() const;
void PrintAllredueGraphInfo() const;
void PrintArnodeVec() const;
void PrintArnodeSet() const;
const mindspore::HashSet<CNodePtr> &cnode_set() const { return cnode_set_; }
CNodePtr head_cnode() const { return head_cnode_; }
Status set_head_cnode(const CNodePtr &node);
double max() const { return max_; }
private:
CNodePtr head_cnode_;
std::set<AllreduceNodePtr> arnode_set_;
std::vector<AllreduceNode> arnode_vec_;
mindspore::HashSet<CNodePtr> cnode_set_;
// If One ParameterPtr is used by multiple CNode, the last node for backward computation is saved.
mindspore::HashMap<AnfNodePtr, std::vector<CNodePtr>> para_cnode_map_;
// One ParameterPtr may be used by multiple CNode
mindspore::HashMap<AnfNodePtr, mindspore::HashSet<CNodePtr>> para_cnodeset_map_;
// Multiple Parameter may be inputs to the same CNode
mindspore::HashMap<CNodePtr, mindspore::HashSet<AnfNodePtr>> cnode_paraset_map_;
mindspore::HashMap<CNodePtr, AllreduceNodePtr> cnode_arnode_map_;
double max_;
};
} // namespace parallel
} // namespace mindspore
#endif // MINDSPORE_CCSRC_FRONTEND_PARALLEL_ALLREDUCE_FUSION_ALLREDUCE_GRAPH_H_
|
// GetDataVolume fetches the specified volume by the passed name and namespace and return DataVolumeNotFoundError error in case of
// not found object error.
func (d *defaultDataVolumeManager) GetDataVolume(ctx context.Context, kubeconfig []byte, name string) (*cdicorev1alpha1.DataVolume, error) {
c, namespace, err := d.client.GetClient(kubeconfig)
if err != nil {
return nil, errors.Wrap(err, "could not create kubevirt client")
}
dataVolume := &cdicorev1alpha1.DataVolume{}
if err := c.Get(ctx, client.ObjectKey{Name: name, Namespace: namespace}, dataVolume); err != nil {
if kerrors.IsNotFound(err) {
return nil, nil
}
return nil, errors.Wrapf(err, "could not get DataVolume: %s", name)
}
return dataVolume, nil
} |
<filename>file_op_func.cpp<gh_stars>0
#include <stdlib.h>
#include <iostream>
#include <stdio.h>
#include <fstream>
#include <sstream>
#include <string>
#include <vector>
#include <algorithm>
#include <string.h>
// the below two is linux-related
#include <unistd.h>
#include <dirent.h>
#include <sys/stat.h>
// #include "ms_info_head.h"
using namespace std;
void printdir(char * dir, vector<string> &file_names, char * ancestor_dir, char * file_type)
{
if (strstr(dir, file_type) != NULL) {
file_names.push_back(string(dir));
return;
}
DIR *Dp;
struct dirent *enty;
struct stat statbuf;
if (NULL == (Dp = opendir(dir)))
{
fprintf(stderr, "can not open dir:%s\n", dir);
return;
}
int status = chdir(dir);
char dir_tmp[120];
char names_tmp[120];
while (NULL != (enty = readdir(Dp)))
{
lstat(enty->d_name, &statbuf);
if (S_ISDIR(statbuf.st_mode))
{
// cout <<
if (0 == strcmp(".", enty->d_name) || 0 == strcmp("..", enty->d_name))
{
continue;
}
strcpy(names_tmp, ancestor_dir);
/*
if (ancestor_dir[0] != '\0')
{
strcat(names_tmp, "/");
}
*/
strcat(names_tmp, dir);
strcat(names_tmp, "/");
printdir(enty->d_name, file_names, names_tmp, file_type);
// printdir(enty -> d_name, file_names);
}
else
{
// only mgf file be kept
if (strstr(enty->d_name, file_type) != NULL)
{
// strcpy(names_tmp, enty->d_name);
strcpy(dir_tmp, ancestor_dir);
strcat(dir_tmp, dir);
strcat(dir_tmp, "/");
strcpy(names_tmp, enty->d_name);
/*
if (ancestor_dir[0] != '\0')
{
strcat(names_tmp, "/");
}
*/
// strcat(dir_tmp, dir);
// strcat(dir_tmp, "/");
strcat(dir_tmp, names_tmp);
file_names.push_back(string(dir_tmp));
// file_names.push_back(string(enty->d_name));
// printf("%*s%s\n",depth," ",enty->d_name);
}
}
}
status = chdir("..");
closedir(Dp);
} |
def weighted_choice(self, board: Union[chess.Board, int], *, exclude_moves: Container[chess.Move] = [], random: Optional[random.Random] = None) -> Entry:
total_weights = sum(entry.weight for entry in self.find_all(board, exclude_moves=exclude_moves))
if not total_weights:
raise IndexError()
choice = _randint(random, 0, total_weights - 1)
current_sum = 0
for entry in self.find_all(board, exclude_moves=exclude_moves):
current_sum += entry.weight
if current_sum > choice:
return entry
assert False |
<reponame>langstonhowley/NCH-Wifi-Controller-Android<filename>app/src/main/java/com/nextek/nchcontrol/Bluetooth_Service.java
/*******************************************************************************
* Copyright 2019 Nextek Power Systems
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.nextek.nchcontrol;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.app.Service;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothManager;
import android.bluetooth.BluetoothSocket;
import android.content.Intent;
import android.os.Binder;
import android.os.Build;
import android.os.IBinder;
import android.util.Log;
import androidx.annotation.RequiresApi;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.Method;
import java.util.UUID;
public class Bluetooth_Service extends Service {
private final IBinder mBinder = new LocalBinder();
private GlobalState gs;
private BluetoothAdapter ba;
private BluetoothManager bm;
private BluetoothSocket bs;
ConnectThread ct;
MessageThread mt;
private final String TAG = "Bluetooth Service";
private final String mUUID = "00001101-0000-1000-8000-00805f9b34fb";
private boolean connected, failed = false;
private String nch_return;
class LocalBinder extends Binder {
Bluetooth_Service getService() {
return Bluetooth_Service.this;
}
}
public IBinder onBind(Intent intent) {
//Log.e(TAG, "onBind: Returning mBinder");
gs = (GlobalState) getApplicationContext();
return mBinder;
}
@Override
public void onCreate() {
super.onCreate();
if (bm == null && ba == null) {
bm = (BluetoothManager) getSystemService(BLUETOOTH_SERVICE);
if (bm != null) {
ba = bm.getAdapter();
} else {
Log.e(TAG, "onCreate: Bluetooth Manager is null", null);
}
}
}
@Override
public void onDestroy() {
super.onDestroy();
disconnect();
}
private class ConnectThread extends Thread {
ConnectThread() {
try {
bs = gs.getDevice().createRfcommSocketToServiceRecord(UUID.fromString(mUUID));
if (bs == null) {
Log.e(TAG, "ConnectThread: BLUETOOTH SOCKET IS NULL", null);
}
} catch (Exception e) {
Log.e(TAG, "ConnectThread: creation of socket failed: ", e);
}
}
public void run() {
try {
if (bs == null) {
Log.e(TAG, "run: BLUETOOTH SOCKET IS NULL", null);
}
bs.getClass().getMethod("connect").invoke(bs);
connected = true;
Log.e(TAG, "run: Connected to device", null);
mt = new MessageThread();
mt.start();
} catch (Exception e) {
Log.e(TAG, "run: Exception occurred: ", e);
sendBroadcast(new Intent("com.example.nchcontrol.DISCONNECTED"));
failed = true;
try {
bs.close();
} catch (Exception e1) {
Log.e(TAG, "run: Failed to close socket: ", e1);
}
}
}
}
public class MessageThread extends Thread {
private InputStream is;
private OutputStream os;
MessageThread() {
try {
is = bs.getInputStream();
os = bs.getOutputStream();
} catch (Exception e) {
Log.e(TAG, "MessageThread: Creation of one of the streams failed: ", e);
}
}
public void run() {
sendBroadcast(new Intent("com.example.nchcontrol.CONNECTED"));
int bytes;
while (true) {
try {
byte[] buffer = new byte[256];
bytes = is.read(buffer);
String s = new String(buffer, 0, bytes);
if (!s.equals("CONNECTED")) {
nch_return = s;
}
Log.e(TAG, "run() returned: " + nch_return, null);
} catch (Exception e) {
Log.e(TAG, "MessageThread run: Failed to read: ", e);
break;
}
}
}
void write(byte[] bytes) {
try {
os.write(bytes);
os.flush();
} catch (Exception e) {
Log.e(TAG, "MessageThread write: Failed to write: ", e);
}
}
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
public void search() {
Log.e(TAG, "search: Starting search", null);
ba.startDiscovery();
}
public void stopSearch() {
Log.e(TAG, "stopSearch: Stopping Search", null);
ba.cancelDiscovery();
}
@SuppressLint("StaticFieldLeak")
@TargetApi(Build.VERSION_CODES.KITKAT)
public void pair() {
final boolean[] paired = {false};
try {
for (int i = 0; i < ba.getBondedDevices().size(); i++) {
if (gs.getDevice().equals(ba.getBondedDevices().toArray()[i])) {
paired[0] = true;
Log.e(TAG, "connect: Device already paired", null);
connect();
}
}
if (!paired[0]) {
Method m = BluetoothDevice.class.getMethod("createBond", (Class[]) null);
m.invoke(gs.getDevice(), (Object[]) null);
if (gs.getDevice().getBondState() == BluetoothDevice.BOND_BONDED) {
Log.e(TAG, "pair: Device paired.", null);
failed = false;
} else {
Log.e(TAG, "pair: Device failed to pair", null);
}
}
} catch (Exception e) {
Log.e(TAG, "pair: Exception triggered: ", e);
}
}
public void connect() {
sendBroadcast(new Intent("com.example.nchcontrol.CONNECTING"));
ct = new ConnectThread();
ct.start();
}
public void disconnect() {
try {
if (bs != null) {
bs.close();
}
} catch (IOException e) {
e.printStackTrace();
}
failed = false;
sendBroadcast(new Intent("com.example.nchcontrol.DISCONNECTED"));
}
public void sendMessage(String message) {
Log.e(TAG, "sendMessage: SENDING " + message + " TO THE NCH.", null);
mt.write(message.getBytes());
}
public String getNch_return() {
if (nch_return == null || nch_return.equals("null")) {
return null;
}
Log.e(TAG, "getNch_return: SENDING VALUE TO MAIN: " + nch_return, null);
return nch_return;
}
public void clearNch_return() {
Log.e(TAG, "clearNch_return: CLEARED", null);
nch_return = null;
}
public boolean getFailed() {
return failed;
}
public void resetFailed() {
failed = false;
}
public BluetoothAdapter getBa() {
return ba;
}
}
|
<filename>src/entities/ErrorEntity.ts
export type Errors =
| "auth/invalid-email"
| "auth/user-not-found"
| "auth/invalid-password"
| "auth/user-exist"
| "auth/password-validation-failed"
| "auth/wrong-password"
| "auth/weak-password"
| "auth/email-already-in-use"
| "auth/auth-domain-config-required"
| "auth/operation-not-supported-in-this-environment"
| "auth/unauthorized-domain"
export class ErrorEntity {
constructor(private error: Errors) {}
getMessage() {
if (this.error === "auth/invalid-password")
return "Le mot de passe ne correspond pas"
if (this.error === "auth/wrong-password")
return "Le mot de passe ne correspond pas"
if (this.error === "auth/user-not-found") return "L'email ne correspond pas"
if (this.error === "auth/password-validation-failed")
return "Les mots de passe ne sont pas identiques"
if (this.error === "auth/weak-password")
return "Le mot de passe est trop faible"
return "Une erreur est survenue"
}
}
|
def main(args):
log_level = logging.DEBUG if args.verbose else logging.WARNING
logging.basicConfig(level=log_level, filename=args.logfile)
word_counts = Counter()
logging.info('Processing files...')
for file_name in args.infiles:
try:
process_file(file_name, word_counts)
except FileNotFoundError:
logging.warning(f'{file_name} not processed: File does not exist')
except PermissionError:
logging.warning(f'{file_name} not processed: No permission to read file')
except Exception as error:
logging.warning(f'{file_name} not processed: {error}')
utilities.collection_to_csv(word_counts, num=args.num) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.