content
stringlengths
10
4.9M
When I wrote that location would be this year’s Twitter at SXSW, I also meant that Twitter’s geolocation would be this year’s Twitter at SXSW. The service has just turned on geolocation on its website today for the first time. While Twitter’s geolocation feature has been live through its API since last November, there was no sign of integration into the main twitter.com site until now. As you can see in the screenshot above, for tweets tagged with location, right next to the source of the tweet there is a location placemarker. When you hover over it, it turns blue, and clicking on it brings up a little Google map showing the location that tweet was sent from. You can see these maps as overlays both on individual tweet pages, and on tweets in your main stream. In some cases, depending on how Twitter geolocation API is being used, it looks like place names are even passed through to Twitter. For example, here’s a tweet sent from Foursquare that also says where the tweet is being sent from. The timing of this move by Twitter is significant. Earlier today, the New York Times reported that Facebook would unveil its answer to location next month at its f8 conference. Twitter’s first-ever Chirp conference takes place just one week before f8. Google, meanwhile, is in the game with Latitude and to some extent Buzz (but could have been in it a lot more). And of course, every app and their mother appears to be launching with some sort of location functionality at the SXSW in Austin, Texas, which begins on Friday. Many of those apps use Twitter’s geolocation API to pass the data back to Twitter, so it makes sense that this would be a good time to turn the functionality on for the website. Update: It looks like Twitter has just turned off the location functionality after having it on for a bit. Look for it to come back shortly — certainly some time before SXSW. [thanks Chad]
def refresh(self, index): path = self._make_path([index, '_refresh']) self.send_request('POST', path) return True
<reponame>inb-co/joob<filename>tests/backend_tests.py import logging from unittest import mock from unittest.case import TestCase from easy_job.result_backends.log import LogResultBackend __author__ = 'Apollo' class ResultBackendLogTestCase(TestCase): @mock.patch("logging.getLogger") def test_normal_logging_function(self, logger): assert isinstance(logger, mock.MagicMock) # Arrange logger_instance = logger.return_value = mock.MagicMock() # Act log_backend = LogResultBackend(logger="foo") log_backend.store( "sample task", "sample result" ) # Assert _, args, kwargs = logger.mock_calls[0] # logging.getLogger call self.assertEqual(args[0], "foo") _, args, kwargs = logger_instance.mock_calls[0] # logger.log call self.assertEqual(kwargs, { 'level': logging.DEBUG, 'msg': 'sample task -> sample result' }) def test_normal_logging_function_using_logger_instance(self): # Arrange logger_instance = mock.MagicMock() # Act log_backend = LogResultBackend(logger_instance=logger_instance) log_backend.store( "sample task", "sample result" ) # Assert _, args, kwargs = logger_instance.mock_calls[0] # logger.log call self.assertEqual(kwargs, { 'level': logging.DEBUG, 'msg': 'sample task -> sample result' })
def convert_game(self, file_name, bd_size): with open(file_name, 'r') as file_object: state_action_iterator = sgf_iter_states(file_object.read(), include_end=False) for (state, move, player, result) in state_action_iterator: if result is None: raise NoResultError() if state.size != bd_size: raise SizeMismatchError() nn_input = self.feature_processor.state_to_tensor(state) if move == go.PASS_MOVE: move = (19, 0) yield (nn_input, move, result)
def calc_even_odd(movie_path, even_path, odd_path, recursive=True, fbinning=utils.fourier_binning): even_files = [] odd_files = [] for (dirpath, dirnames, filenames) in os.walk(even_path): for filename in filenames: if filename.endswith(utils.SUPPORTED_FILES): even_files.append(os.path.join(dirpath, filename)) for (dirpath, dirnames, filenames) in os.walk(odd_path): for filename in filenames: if filename.endswith(utils.SUPPORTED_FILES): odd_files.append(os.path.join(dirpath, filename)) try: os.makedirs(even_path) except FileExistsError: pass try: os.makedirs(odd_path) except FileExistsError: pass movies_to_split = [] filenames_even = list(map(os.path.basename, even_files)) filenames_odd = list(map(os.path.basename, odd_files)) if movie_path: for (dirpath, dirnames, filenames) in os.walk(movie_path): for filename in filenames: if filename.endswith(utils.SUPPORTED_FILES): if filename not in filenames_even and filename not in filenames_odd: path = os.path.join(dirpath, filename) movies_to_split.append((path,filename)) if recursive == False: break for tuble_index, movie_tuble in enumerate(movies_to_split): path, filename = movie_tuble print("Create even/odd average for:", path, "( Progress: ",int(100*tuble_index/len(movies_to_split)),"% )") even, odd = utils.create_image_pair(path,fbinning) out_even_path = os.path.join(even_path, filename) out_odd_path = os.path.join(odd_path, filename) if path.endswith(("mrcs", "mrc")): with mrcfile.new(out_even_path, overwrite=True) as mrc: mrc.set_data(even) with mrcfile.new(out_odd_path, overwrite=True) as mrc: mrc.set_data(odd) elif path.endswith((".tif", ".tiff")): tifffile.imwrite(out_even_path, even) tifffile.imwrite(out_odd_path, odd) even_files.append(out_even_path) odd_files.append(out_odd_path) filenames_even.append(filename) filenames_odd.append(filename) print("Reset progress bar: ( Progress: -1 % )") return even_files, odd_files
/** * We don't want the filterNonZeroIds to iterate through the array if there are no zeros in it. */ @Test public void filterNonZeroIds_returns_theSameInstanceOfArray_whenThereAre_noZeros_inPassedArray() { StaggeredAnimationGroup spiedGroup = prepareSpiedGroup(); int[] testIds = new int[]{1, 2, 3, 4}; int[] filtered = spiedGroup.filterNonZeroIds(testIds); assertThat(filtered).isEqualTo(filtered); }
On Monday, Minnesota state Rep. Mary Franson (R) told the local Fox affiliate in Alexandria that she had refused to meet with a local high school group, the Alexandria Area High School (AAHS) Democrats, because she doesn't "meet with partisan organizations," adding, "this has absolutely nothing to do with me not wanting to meet with Democrats — it has everything to do with the fact that we all have to be careful in today's world." Franson elaborated on a private Facebook post, ThinkProgress reports, suggesting the high schoolers might accuse her of sexual misconduct. "A man's life was destroyed in AL," Franson wrote, pointing to failed Alabama Senate candidate Roy Moore. "40 years ago he met with minors alone and they recently accused him of horrendous actions. In the world of we must believe every sexual harassment claim, I would think my approach is beyond reasonable. All it takes is one perceived action and my life is destroyed. The life of my family is destroyed. That is a risk I will not take." AAHS Democrats founder Jack Ballou, 17, found her reply perplexing when ThinkProgress read it to him (Franson has apparently blocked him on Facebook). "If any of what she said was true, she could have just told us initially that she doesn't meet with minors," he said. "I'm also so confused how she started defending Roy Moore ... last year I met with Mary through the student page program, one on one at her office. She had no issue then." Ballou's group said it had requested to meet with its local representative to discuss issues of concern to high schoolers, like climate change and college affordability. He told Fox 9 that regarding Franson's refusal to meet, "I think it's really a microcosm of what's happening at our national stage — people just aren't talking to each other." Peter Weber
from src.server.instance import server from controllers.books import * server.run()
// DeriveKey derives a new secret key and chain code from an existing secret key and chain code func (sk *SecretKey) DeriveKey(t *merlin.Transcript, cc [ChainCodeLength]byte) (*ExtendedKey, error) { pub, err := sk.Public() if err != nil { return nil, err } sc, dcc := pub.DeriveScalarAndChaincode(t, cc) nonce := [32]byte{} _, err = rand.Read(nonce[:]) if err != nil { return nil, err } dsk, err := ScalarFromBytes(sk.key) if err != nil { return nil, err } dsk.Add(dsk, sc) dskBytes := [32]byte{} copy(dskBytes[:], dsk.Encode([]byte{})) skNew := &SecretKey{ key: dskBytes, nonce: nonce, } return &ExtendedKey{ key: skNew, chaincode: dcc, }, nil }
<filename>pkg/digit_video_recorder/driver/digit_video_recorder.go package digit_video_recorder_driver import ( "io" "os" "sync" "time" "github.com/spf13/viper" ) type Record struct { Id string `yaml:"id"` StartAt time.Time `yaml:"start_at"` EndAt time.Time `yaml:"end_at"` Path string `yaml:"path"` } func (r *Record) Reader() (io.Reader, error) { return os.Open(r.Path) } func (r *Record) Data() map[string]interface{} { return map[string]interface{}{ "id": r.Id, "start_at": r.StartAt.Unix(), "end_at": r.EndAt.Unix(), "path": r.Path, } } type DigitVideoRecorderDriverOption struct { *viper.Viper } func (o *DigitVideoRecorderDriverOption) Sub(key string) *DigitVideoRecorderDriverOption { sub := o.Viper.Sub(key) if sub == nil { return nil } return &DigitVideoRecorderDriverOption{sub} } type DigitVideoRecorderState struct { state string } func (s *DigitVideoRecorderState) String() string { return s.state } var ( DIGITI_VIDEO_RECORDER_STATE_ON = &DigitVideoRecorderState{state: "on"} DIGITI_VIDEO_RECORDER_STATE_OFF = &DigitVideoRecorderState{state: "off"} ) type DigitVideoRecorderDriver interface { Start() error Stop() error State() *DigitVideoRecorderState GetRecord(id string) (*Record, error) ListRecords(ListRecordsFitler) ([]*Record, error) } type DigitVideoRecorderDriverFactory func(opt *DigitVideoRecorderDriverOption, args ...interface{}) (DigitVideoRecorderDriver, error) var digit_video_recorder_driver_factories map[string]DigitVideoRecorderDriverFactory var digit_video_recorder_driver_factories_once sync.Once func register_digit_video_recorder_driver_factory(name string, fty DigitVideoRecorderDriverFactory) { digit_video_recorder_driver_factories_once.Do(func() { digit_video_recorder_driver_factories = make(map[string]DigitVideoRecorderDriverFactory) }) digit_video_recorder_driver_factories[name] = fty } func NewDigitVideoRecorderDriver(name string, opt *DigitVideoRecorderDriverOption, args ...interface{}) (DigitVideoRecorderDriver, error) { fty, ok := digit_video_recorder_driver_factories[name] if !ok { return nil, ErrInvalidDigitVideoRecorderDriver } return fty(opt, args...) }
a = str(input()) s = 0 for i in range(0,len(a)+1): for j in range(i,len(a)+1): b = a[i:j+1] c = b[::-1] if b != c: s = max(s,len(b)) if s == 0: print(0) else: print(s)
#include <cstdio> #include <cstring> #include <cstdlib> const int maxn=5000+1; const int maxm=1000000+10; int n,k; int a[maxn]; struct Tnode { int x,y; int next; }q[maxn*maxn/2],tmp; int h[maxm]; int mark[10],len; int ma,de,tot; bool check(int s) { for (int i=0;i<len;i++) if (mark[i]==s) return 1; return 0; } int L; int main() { scanf("%d%d",&n,&k); ma=0; for (int i=1;i<=n;i++) {scanf("%d",&a[i]);ma=(ma>a[i])?ma:a[i];} for (int i=0;i<maxm;i++) h[i]=-1; L=0; for (int i=1;i<n;i++) for (int j=i+1;j<=n;j++) { de=abs(a[i]-a[j]); q[L].x=i;q[L].y=j; q[L].next=h[de]; h[de]=L; L++; } for (int res=1;res<=ma;res++) { len=0;tot=0; for (int j=0;j*res<=ma;j++) { if (h[j*res]==-1) continue; tmp=q[h[j*res]]; while (1) { if (!check(tmp.x) && !check(tmp.y)) { tot++; mark[len++]=tmp.x; if (tot>k) break; } if (tmp.next==-1) break; tmp=q[tmp.next]; } if (tot>k) break; } if (tot<=k) {printf("%d\n",res);return 0;} } puts("-1"); return 0; }
<gh_stars>0 import { NgModule } from '@angular/core'; import { CommonModule } from '@angular/common'; import { RoleComponent } from './role/role.component'; import { SharedModule } from './../../shared/shared.module'; import { ScrollPanelModule } from 'primeng/scrollpanel'; import { UsermasterRoutingModule } from '../usermaster/usrmaster-routing.module'; import { GetuserService } from '../../services/getuser-service'; import { TableModule } from 'primeng/table'; import { DropdownModule } from 'primeng/dropdown'; import { FormsModule } from '@angular/forms'; import { CheckboxModule } from 'primeng/checkbox'; import { AutoCompleteModule, FileUploadModule } from 'primeng/primeng'; // import { ReviewndaComponent } from './reviewnda/reviewnda.component'; @NgModule({ imports: [ CommonModule, SharedModule, UsermasterRoutingModule, TableModule, FormsModule, DropdownModule, CheckboxModule, AutoCompleteModule,ScrollPanelModule], declarations: [RoleComponent], providers: [GetuserService] }) export class UsermasterModule { }
<gh_stars>1-10 // This file was generated by gir (https://github.com/gtk-rs/gir) // from gir-files (https://github.com/gtk-rs/gir-files) // DO NOT EDIT use gdk_sys; use glib::translate::*; use AnchorHints; use Gravity; use Rectangle; glib_wrapper! { #[derive(Debug, PartialOrd, Ord, Hash)] pub struct PopupLayout(Shared<gdk_sys::GdkPopupLayout>); match fn { ref => |ptr| gdk_sys::gdk_popup_layout_ref(ptr), unref => |ptr| gdk_sys::gdk_popup_layout_unref(ptr), get_type => || gdk_sys::gdk_popup_layout_get_type(), } } impl PopupLayout { pub fn new( anchor_rect: &Rectangle, rect_anchor: Gravity, surface_anchor: Gravity, ) -> PopupLayout { assert_initialized_main_thread!(); unsafe { from_glib_full(gdk_sys::gdk_popup_layout_new( anchor_rect.to_glib_none().0, rect_anchor.to_glib(), surface_anchor.to_glib(), )) } } pub fn copy(&self) -> Option<PopupLayout> { unsafe { from_glib_full(gdk_sys::gdk_popup_layout_copy(self.to_glib_none().0)) } } fn equal(&self, other: &PopupLayout) -> bool { unsafe { from_glib(gdk_sys::gdk_popup_layout_equal( self.to_glib_none().0, other.to_glib_none().0, )) } } pub fn get_anchor_hints(&self) -> AnchorHints { unsafe { from_glib(gdk_sys::gdk_popup_layout_get_anchor_hints( self.to_glib_none().0, )) } } pub fn get_anchor_rect(&self) -> Option<Rectangle> { unsafe { from_glib_none(gdk_sys::gdk_popup_layout_get_anchor_rect( self.to_glib_none().0, )) } } pub fn get_rect_anchor(&self) -> Gravity { unsafe { from_glib(gdk_sys::gdk_popup_layout_get_rect_anchor( self.to_glib_none().0, )) } } pub fn get_surface_anchor(&self) -> Gravity { unsafe { from_glib(gdk_sys::gdk_popup_layout_get_surface_anchor( self.to_glib_none().0, )) } } pub fn set_anchor_hints(&self, anchor_hints: AnchorHints) { unsafe { gdk_sys::gdk_popup_layout_set_anchor_hints( self.to_glib_none().0, anchor_hints.to_glib(), ); } } pub fn set_anchor_rect(&self, anchor_rect: &Rectangle) { unsafe { gdk_sys::gdk_popup_layout_set_anchor_rect( self.to_glib_none().0, anchor_rect.to_glib_none().0, ); } } pub fn set_offset(&self, dx: i32, dy: i32) { unsafe { gdk_sys::gdk_popup_layout_set_offset(self.to_glib_none().0, dx, dy); } } pub fn set_rect_anchor(&self, anchor: Gravity) { unsafe { gdk_sys::gdk_popup_layout_set_rect_anchor(self.to_glib_none().0, anchor.to_glib()); } } pub fn set_surface_anchor(&self, anchor: Gravity) { unsafe { gdk_sys::gdk_popup_layout_set_surface_anchor(self.to_glib_none().0, anchor.to_glib()); } } } impl PartialEq for PopupLayout { #[inline] fn eq(&self, other: &Self) -> bool { self.equal(other) } } impl Eq for PopupLayout {}
package net.clarenceho.algo.minspanning.test; import net.clarenceho.algo.minspanning.Kruskal; import net.clarenceho.algo.minspanning.MinSpanningSolver; import net.clarenceho.algo.minspanning.Prim; import net.clarenceho.util.AdjMatrixGraph; import net.clarenceho.util.Edge; import net.clarenceho.util.GraphProblem; import net.clarenceho.util.Node; import org.junit.Test; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.concurrent.ThreadLocalRandom; import static org.junit.Assert.assertEquals; /** * Testing the minimum spanning tree algorithms. */ public class MinSpanningTest { /** * Sample case from the book "Introduction To Algorithms", first edition, figure 24.4. */ @Test public void testKruskalSample01() { // prepare nodes List<Node> nodes = new ArrayList<>(); int size = 9; for (int i = 0; i < size; i++) { nodes.add(new Node(Character.toString((char) (97 + i)))); } // prepare edges int adj[][] = new int[size][size]; for (int i = 0; i < size; i++) { for (int j = 0; j < size; j++) { adj[i][j] = AdjMatrixGraph.NO_PATH; } } adj[0][1] = 4; adj[0][7] = 8; adj[1][2] = 8; adj[1][7] = 11; adj[2][3] = 7; adj[2][5] = 4; adj[2][8] = 2; adj[3][4] = 9; adj[3][5] = 14; adj[4][5] = 10; adj[5][6] = 2; adj[6][7] = 1; adj[6][8] = 6; adj[7][8] = 7; AdjMatrixGraph.copyToLowerHalf(adj); GraphProblem problem = new MinSpanningTestCase(nodes, adj); MinSpanningSolver solver = new Kruskal(problem); Collection<? extends Edge> result = solver.resolve(); assertEquals(8, result.size()); assertEquals(37, result.stream().mapToInt(Edge::getCost).sum()); } @Test public void testPrimSample01() { // prepare nodes List<Node> nodes = new ArrayList<>(); int size = 9; for (int i = 0; i < size; i++) { nodes.add(new Node(Character.toString((char) (97 + i)))); } // prepare edges int adj[][] = new int[size][size]; for (int i = 0; i < size; i++) { for (int j = 0; j < size; j++) { adj[i][j] = AdjMatrixGraph.NO_PATH; } } adj[0][1] = 4; adj[0][7] = 8; adj[1][2] = 8; adj[1][7] = 11; adj[2][3] = 7; adj[2][5] = 4; adj[2][8] = 2; adj[3][4] = 9; adj[3][5] = 14; adj[4][5] = 10; adj[5][6] = 2; adj[6][7] = 1; adj[6][8] = 6; adj[7][8] = 7; AdjMatrixGraph.copyToLowerHalf(adj); // mirror it across the diagonal GraphProblem problem = new MinSpanningTestCase(nodes, adj); MinSpanningSolver solver = new Prim(problem); Collection<? extends Edge> result = solver.resolve(); assertEquals(8, result.size()); assertEquals(37, result.stream().mapToInt(Edge::getCost).sum()); } /** * A simple graph in square shape. */ @Test public void testKruskalSample02() { // prepare nodes List<Node> nodes = new ArrayList<>(); nodes.add(new Node("a")); nodes.add(new Node("b")); nodes.add(new Node("c")); nodes.add(new Node("d")); // prepare edges int adj[][] = new int[4][4]; for (int i = 0; i < 4; i++) { for (int j = 0; j < 4; j++) { adj[i][j] = AdjMatrixGraph.NO_PATH; } } adj[0][1] = 5; adj[0][3] = 4; adj[1][2] = 3; adj[2][3] = 2; AdjMatrixGraph.copyToLowerHalf(adj); GraphProblem problem = new MinSpanningTestCase(nodes, adj); MinSpanningSolver solver = new Kruskal(problem); Collection<? extends Edge> result = solver.resolve(); assertEquals(3, result.size()); assertEquals(9, result.stream().mapToInt(Edge::getCost).sum()); } @Test public void testPrimSample02() { // prepare nodes List<Node> nodes = new ArrayList<>(); nodes.add(new Node("a")); nodes.add(new Node("b")); nodes.add(new Node("c")); nodes.add(new Node("d")); // prepare edges int adj[][] = new int[4][4]; for (int i = 0; i < 4; i++) { for (int j = 0; j < 4; j++) { adj[i][j] = AdjMatrixGraph.NO_PATH; } } adj[0][1] = 5; adj[0][3] = 4; adj[1][2] = 3; adj[2][3] = 2; AdjMatrixGraph.copyToLowerHalf(adj); GraphProblem problem = new MinSpanningTestCase(nodes, adj); MinSpanningSolver solver = new Prim(problem); Collection<? extends Edge> result = solver.resolve(); assertEquals(3, result.size()); assertEquals(9, result.stream().mapToInt(Edge::getCost).sum()); } @Test public void testKruskalSingleNode() { // prepare nodes List<Node> nodes = new ArrayList<>(); nodes.add(new Node("a")); // prepare edges int adj[][] = new int[1][1]; adj[0][0] = AdjMatrixGraph.NO_PATH; AdjMatrixGraph.copyToLowerHalf(adj); GraphProblem problem = new MinSpanningTestCase(nodes, adj); MinSpanningSolver solver = new Kruskal(problem); Collection<? extends Edge> result = solver.resolve(); assertEquals(0, result.size()); assertEquals(0, result.stream().mapToInt(Edge::getCost).sum()); } @Test public void testPrimSingleNode() { // prepare nodes List<Node> nodes = new ArrayList<>(); nodes.add(new Node("a")); // prepare edges int adj[][] = new int[1][1]; adj[0][0] = AdjMatrixGraph.NO_PATH; AdjMatrixGraph.copyToLowerHalf(adj); GraphProblem problem = new MinSpanningTestCase(nodes, adj); MinSpanningSolver solver = new Prim(problem); Collection<? extends Edge> result = solver.resolve(); assertEquals(0, result.size()); assertEquals(0, result.stream().mapToInt(Edge::getCost).sum()); } @Test public void testKruskalSingleEdge() { // prepare nodes List<Node> nodes = new ArrayList<>(); nodes.add(new Node("a")); nodes.add(new Node("b")); // prepare edges int adj[][] = new int[2][2]; for (int i = 0; i < 2; i++) { for (int j = 0; j < 2; j++) { adj[i][j] = AdjMatrixGraph.NO_PATH; } } adj[0][1] = 3; AdjMatrixGraph.copyToLowerHalf(adj); GraphProblem problem = new MinSpanningTestCase(nodes, adj); MinSpanningSolver solver = new Kruskal(problem); Collection<? extends Edge> result = solver.resolve(); assertEquals(1, result.size()); assertEquals(3, result.stream().mapToInt(Edge::getCost).sum()); } @Test public void testPrimSingleEdge() { // prepare nodes List<Node> nodes = new ArrayList<>(); nodes.add(new Node("a")); nodes.add(new Node("b")); // prepare edges int adj[][] = new int[2][2]; for (int i = 0; i < 2; i++) { for (int j = 0; j < 2; j++) { adj[i][j] = AdjMatrixGraph.NO_PATH; } } adj[0][1] = 3; AdjMatrixGraph.copyToLowerHalf(adj); GraphProblem problem = new MinSpanningTestCase(nodes, adj); MinSpanningSolver solver = new Prim(problem); Collection<? extends Edge> result = solver.resolve(); assertEquals(1, result.size()); assertEquals(3, result.stream().mapToInt(Edge::getCost).sum()); } @Test public void compareSolvers01() { int count = 1000; int maxSize = 20; int minCost = AdjMatrixGraph.MIN_COST/maxSize; int maxCost = AdjMatrixGraph.MAX_COST/maxSize; for (int c = 0; c < count; c++) { int size = ThreadLocalRandom.current().nextInt(1, maxSize + 1); List<Node> nodes = new ArrayList<>(); for (int i = 0; i < size; i++) { nodes.add(new Node(Character.toString((char) (97 + i)))); } int adj[][] = new int[size][size]; for (int i = 0; i < size; i++) { for (int j = i; j < size; j++) { if (i == j) { adj[i][j] = AdjMatrixGraph.NO_PATH; } else { adj[i][j] = ThreadLocalRandom.current().nextInt(minCost, maxCost+1); } } } AdjMatrixGraph.copyToLowerHalf(adj); GraphProblem problem = new MinSpanningTestCase(nodes, adj); MinSpanningSolver solver1 = new Kruskal(problem); Collection<? extends Edge> result1 = solver1.resolve(); MinSpanningSolver solver2 = new Prim(problem); Collection<? extends Edge> result2 = solver2.resolve(); try { assertEquals(result1.size(), result2.size()); assertEquals(result1.stream().mapToInt(Edge::getCost).sum(), result2.stream().mapToInt(Edge::getCost).sum()); } catch (AssertionError e) { System.err.println("adj matrix:"); for (int i = 0; i < size; i++) { for (int j = 0; j < size; j++) { System.err.print(adj[i][j] + " "); } System.err.println(""); } System.err.println("Solver1 result:"); for (Edge edge : result1) { System.err.println(edge); } System.err.println("Solver2 result:"); for (Edge edge : result2) { System.err.println(edge); } throw e; } } } }
def write_dtmpl_file(tmpl_file, dflt_input_file, parameter_names): with open(tmpl_file, "r") as fp: txt_base_tmpl = fp.read().split("\n") with open(dflt_input_file, "r") as fp: txt_dflt_input = fp.read().split("\n") for p_name in parameter_names: for i, line_tmpl in enumerate(txt_base_tmpl): if re.search(p_name, line_tmpl): line_input_split = txt_dflt_input[i].strip().split() for j, item in enumerate(line_tmpl.strip().split()): if item.startswith("{" + p_name): line_input_split[j] = "{" + p_name + "}" txt_dflt_input[i] = " ".join(line_input_split) dtmpl_file = os.path.splitext(os.path.basename(tmpl_file))[0] + ".dtmpl" with open(dtmpl_file, "w") as fp: fp.write("\n".join(txt_dflt_input)) return dtmpl_file
def pull_arm(self): return np.random.normal(loc = 0, scale = 1)+self.mean
matrix = [] target_row = target_col = None for i in range(5): entry =list(map(int, input().split())) if 1 in entry: target_row = i target_col = entry.index(1) print(abs(target_row - 2) + abs(target_col - 2)) break
Image copyright Getty Images The only Finnish word to make it into everyday English is "sauna". But what it is, and how much it means to Finns, is often misunderstood - and it's definitely not about flirtation or sex. In a dimly lit wood-panelled room, naked men sit in silence, sweating. One beats himself repeatedly with birch branches. Another stands, takes a ladle of water and carefully pours it over the heated stones of the stove in the corner. There is a hissing noise. Within seconds a wave of moist heat creeps up around your ankles and over your legs before enveloping your whole body. Your pores open up and sweat covers you from head to toe. This bathing ritual has been performed across Finland for thousands of years, ever since the first settlers dug a ditch in the ground and heated a pile of stones. Water was thrown on the hot stones to give off a vapour known as loyly. Each sauna is considered to have its own character and its own distinctive loyly. The better the loyly, the more enjoyable the sauna. Image copyright Getty Images Sauna, noun (rhymes with downer) "An invigorating bath originating in Finland in which the bather is subjected to hot steam, usually followed by a cold plunge or by being lightly beaten with birch twigs" - Collins Dictionary The OED dates its first use in English to 1881 in travel writer Paul Belloni Du Chaillu's The Land of the Midnight Sun: "One of the most characteristic institutions of the country is the Sauna." For those working in the fields in harsh conditions, the sauna provided welcome relief to wash and soothe aching muscles. These warm wooden rooms could be used at lower temperatures too, and were at the heart of the major events of a Finn's life. Women gave birth in them because the walls of traditional smoke saunas were lined with naturally bacteria-resistant soot, making them the cleanest room in the house. Saunas were also the place for purification rituals before marriage, and the bodies of the dead were washed and prepared for burial on the wooden benches. For many Finns the sauna was the holiest room in the house and the one most closely associated with their wellbeing. "Finns say the sauna is a poor man's pharmacy," says Pekka Niemi, a 54-year-old from Helsinki, who spends about three hours a day in the sauna, six days a week. "If a sick person is not cured by tar, spirits or sauna, then they will die," he adds, quoting a Finnish proverb. ("Spirits" here means strong alcohol, while tar was historically used as an antiseptic.) Image copyright Mark Bosworth Sauna helps you to calm down in a modern society where it is never quiet Jarmo Lehtola Today, Finland is a nation of 5.3 million people and 3.3 million saunas, found in homes, offices, factories, sports centres, hotels, ships and deep below the ground in mines. While Pekka Niemi's sauna habit may be exceptional, 99% of Finns take at least one a week, and much more when they visit their summer cottage in the countryside. Here the pattern of life tends to revolve around the sauna, and a nearby lake used for cooling off. Don't imagine, though, that the sauna is purely a place for fun and games. It certainly would not have been in times gone by. "Children were taught to behave in a sauna as if they were in church," says Jarmo Lehtola approvingly. He is from Saunaseura, the Finnish Sauna Society, an organisation dedicated to upholding and preserving traditional sauna culture. Founded in 1937, this private club of 4,200 members is based on an island a 15-minute bus ride from central Helsinki. Surrounded by a silver birch forest, it overlooks the tranquil Baltic Sea. A sign on the front door instructs you to switch off your mobile phone. Image copyright Mark Bosworth Image caption At Saunaseura's private sauna near Helsinki "Sauna is for your mind. It really helps you to calm down in a modern society where it is never quiet," says Lehtola. "You enter this meditative place. It's dark and it's usually so hot that you don't want to speak." Types of sauna in Finland Smoke sauna (savusauna), 80-160C (176-320F): Known as the original sauna and mainly found in rural areas. Smoke fills the room as wood is burned in a large stove. Once up to heat, the fire is allowed to die and smoke is vented via a hole in the ceiling. Wood-heated sauna 70-130C (158-266F): Most common type in the countryside. Stones are placed on a metal stove fired by well-dried birch wood - preferred for its good smell and long-lasting burn. Electric sauna 80-105C (176-221F): Most common type as it's the safest and easiest to heat in homes. An electric stove is activated at the press of a button. Some apartment blocks have basement saunas that can be booked for private sessions. There are some basic rules. No eating or drinking is allowed in the sauna and if you speak you should not discuss your job, title or religion. Members can choose between an electric sauna, two wood-burning saunas and three smoke saunas - all varying in heat and intensity. Most Finns consider traditional smoke saunas to be the best because of the very soft loyly they produce. They take about five hours to heat and produce soot which covers the wooden walls in a thick black layer. The benches are scrubbed clean but bathers are advised not to lean against the wall, unless they want to get a sooty back. Unlike wood-burning saunas, the smoke saunas use a stove without a chimney. The smoke clears through a small hole in the ceiling before you enter. You can still smell it - a pleasant sensation which transports your mind to the forest - but you do not see it or feel it in your eyes. No clothes or swimsuits are allowed, for the same reason that you would not wear anything in the bath or shower. Every part of the body needs to be properly cleaned. Men and women visit the sauna separately, unless they are members of the same family. Parents go with their children, and everyone is comfortable with that - at least until the children become teenagers, when they tend to use the sauna alone, or with friends. Image copyright Getty Images Image caption The word carried sexual connotations in 1970s Soho There is one widespread misconception that Lehtola is very keen to dispel. "It's nothing to do with sex in Finland," he says emphatically. Finnish sauna sayings In the sauna one must conduct himself as one would in church The sauna is a poor man's pharmacy If a sick person is not cured by tar, spirits [alcohol] or sauna, then they will die A woman looks her most beautiful after the sauna All men are created equal; but nowhere more so than in a sauna A house without a sauna is not a home A sauna without a birch whisk is like food without salt "But in places like Germany in the 1970s and 80s it was all to do with sex." Lehtola insists he has never enjoyed a sauna beyond Finland's borders, despite trying them in many countries. Truth to tell, he would not be happy with some of Helsinki's public saunas either. Two - Kotiharju and Arla - date from the 1920s and can be found in Kallio, a traditionally working class district. Factory workers who lived in homes without bathing facilities used to visit these saunas to relax, socialise and be scrubbed clean by washerwomen. Now the area's new inhabitants - students, artists and adventurous tourists - come to sweat and drink cold beer. At Arla, the mood contrasts sharply with Saunaseura's contemplative atmosphere. The famed Finnish reserve is nowhere to be seen. People seem to enjoy talking to complete strangers in the nude, as long as it's painfully hot. And there is alcohol too. Outside in a courtyard people wrapped in towels open bottles of beer, as steam rises from their bodies. Image copyright Getty Images Image caption Watching ice hockey from the warmth of a stadium sauna box in Helsinki There used to be more than 100 public saunas in Helsinki, with one on almost every street corner. But the number began to decline in the 1950s when people began to buy their own homes, complete with private sauna. It's very healthy to see different types of breasts and bums Minna Kurjenluoma The capital now has just four public saunas. One is the brand new Kulttuurisauna, or Culture Sauna - the first to have been built in the city for half a century. Despite the heat, which can reach up to 160C (320F), Finns insist saunas cool down tense situations. The Finnish parliament has its own sauna chamber for MPs to debate in, and all Finnish diplomatic and consular missions around the world have their own sauna. Former president and Nobel peace prize laureate Martti Ahtisaari used sauna diplomacy - diplomatic meetings in the sauna - to move forward negotiations from Tanzania to Indonesia. During the Cold War, Urho Kekkonen - who served as president for 26 years - negotiated with Soviet diplomats in the sauna at his official residence. Urho Kekkonen's sauna diplomacy Image copyright Getty Images Image caption Kekkonen, left, and Soviet leader Nikita Khrushchev before a hunting trip Finland's president from 1956 to 1982 - pictured above left with the USSR's Nikita Khrushchev - believed in the diplomatic healing powers of the sauna. When world leaders came to his official residence, Tamminiemi, he would hold negotiations in the sauna where new and more constructive ideas arose. Kekkonen believed all were equal in the sauna, and politics could not be hidden up a sleeve when no sleeves were worn. Finland trod a diplomatic tightrope during the Cold War. Its neutrality between East and West was constantly challenged by its giant neighbour, the USSR. In 1960, Khrushchev attended Kekkonen's 60th birthday. The story goes that Kekkonen kept Khrushchev in the sauna until 5am, throwing more water on the hot stones. Soon after, the Soviet government issued a communique expressing support for Finland's intention to co-operate with the West. It led to Finland joining the European Free Trade Association in 1971. Khrushchev was criticised at home. His countrymen said a communist should not have gone naked into a sauna with a capitalist and non-socialist. Tamminiemi is now a museum dedicated to Kekkonen. Visitors can even bathe in his private wooden sauna - for a mere 8,000 euros (£6,700, $10,800). All Finns have a favourite sauna. For 35-year-old Minna Kurjenluoma it is the one her grandfather built for the family in the 1940s on the shore of a lake next to a forest in north-east Finland. Language of the sauna kiuas - sauna stove - sauna stove kiuaskivet - stones in/on the sauna stove - stones in/on the sauna stove loyly - steam, heat, humidity and temperature created by throwing water on hot stones - steam, heat, humidity and temperature created by throwing water on hot stones kiulu - water bucket, usually made of wood - water bucket, usually made of wood loylykauha - ladle for throwing water on the stove - ladle for throwing water on the stove vihta/vasta - whisk made of fresh birch branches for beating the body to aid circulation. - whisk made of fresh birch branches for beating the body to aid circulation. laude - elevated platform to sit on - elevated platform to sit on laudeliina - special towel or disposable paper for sitting on - special towel or disposable paper for sitting on lakeinen - opening in the ceiling of a smoke sauna where the smoke escapes during heating - opening in the ceiling of a smoke sauna where the smoke escapes during heating saunatonttu - sauna elf who, if you behave badly in the sauna, will become angry and burn it down "It's very traditional and basic. There's no electricity and it's always very dim so you need to have a couple of candles," she says. "The loyly is the best because it's very soft, and moist enough because all of the wooden parts of the sauna are very old and soft." For Kurjenluoma, the sauna played a significant part in growing up. "To share a sauna with your grandmother, you've seen the body of an older female without clothes and I think that is great because you don't see that often. It's very healthy to see different types of breasts and bums that aren't shown in the magazines." After cooling off in the Baltic Sea at Saunaseura, I ask Jarmo Lehtola what life would be like without saunas. "There wouldn't be a Finland without the sauna. It's in our DNA," he says. "If somebody wants to understand what it is to be a Finn then they have to understand what a sauna is. If you do not experience sauna then you do not experience Finland." Image copyright Getty Images Image caption Saunas - and the subsequent outdoor dips - are popular year round You can follow the Magazine on Twitter and on Facebook
Classical potentials for q-deformed anharmonic oscillators. Classical potentials giving the same spectrum as the q-deformed anharmonic oscillators having the symmetries U q (2)⊃(2) and SU q (1,1), which have been used for the description of vibrational spectra of diatomic molecules, are determined in analytic form. The potentials found here are somewhat wider than the corresponding classical anharmonic potentials obtained in the limit q→1, while their Taylor expansions are similar to the expansion of the modified Poschl-Teller potential, which is connected to the Morse potential through a known transformation
Without much fanfare, the City Council on Thursday unanimously approved a $2.5 billion budget for the upcoming fiscal year that officials called a “back to basics” spending plan focused on core municipal services. The budget, which takes effect Oct. 1, is slightly smaller than the previous year’s plan because of slightly less spending on capital projects as the 2012 bond program winds down. The council adopted the same property tax rate as last year, when it dropped slightly from the previous year. The rate has not increased in more than two decades, though property values generally have risen considerably — which drives up annual tax bills. Mayor Ivy Taylor said during the council meeting that striking a balance between needs that “sometimes feel like they’re competing” is a difficult job. “But I think we’ve struck a really great balance … focusing on those key priorities,” she said. “I’m glad we’ve maintained the increase we had in street maintenance, that we’re investing in sidewalks and also adding public safety officers, as well as several innovations to better support our employees who do the work of providing services for our citizens every day.” The approved budget includes $7.9 million for the Smart Cities initiative, which will bring a series of tech improvements to town — including free wireless internet in several parks, solar benches that allow users to charge electronic devices and connect to the internet, and community kiosks that will provide information on parks, events and other things, and smartphone apps that will help with things like filing code-violation complaints and way-finding in parks. RELATED: See the proposed budget here. The spending plan brings on 32 new police officers and $1.9 million for continued rollout of body-worn cameras. The city will spend $64 million on street repairs and $15 million on new sidewalks, as well. Councilman Joe Krier said that after he was originally appointed to the District 9 seat, he’d visited with his constituents to better understand what they wanted to see in the annual budget. “One of the things they said to me repeatedly is ‘we want to see budgets that are what we view as basic-services budgets,’ and it was clear to me that basic-services budgets are budgets in which police and fire, streets and drainage, parks and libraries are the very top priorities,” Krier said. “And that is clearly the case in this budget, as it has been for the last several years.” Several council members said they were pleased to increase the hourly wage for entry-level municipal employees from $13 to $13.75, keeping pace with Bexar County, which this week approved raising its rate to $13.75. The council also earmarked funds for council aides, who are contract workers and not city employees. They don’t receive the same benefits but will now get cellphone and car allowances, more for health care and city contributions to retirement accounts. RELATED: See the proposed budget for the employee compensation and benefits here. Councilwoman Shirley Gonzales praised the COPS/Metro Alliance for its push to increase San Antonio’s living wage. “I fought to raise the minimum wage for city employees from $13 dollars per hour to a living wage (of) $13.75,” she said in a news release. “This is a big step towards increasing the quality of life for thousands of families in District 5 and San Antonio.” Councilman Ron Nirenberg, too, applauded the increased minimum wage, among other budget priorities, such as spending on new technology. “Smart Cities — this is hugely important for us. The mayor has acknowledged that smart cities are just regular cities if we are doing our job right. And that’s absolutely true, so I’m glad to see this is fully funded at $7.9 million,” he said. “I think that this is the first step, though, as we continue to implement technology to make our lives and constituent lives easier.” But Nirenberg, who’s rumored to be contemplating a run against Taylor, said he harbors concerns that the council has punted on how to continue funding delegate agencies, which contract with the city to offer social services. Earlier this year, the council had discussed cutting funds for a community safety-net that included money for fighting domestic violence. “The policy recommendation currently is to shift those funds, to reduce the funds,” he said. “We were able to find carry-forward this year, but we need to address that in years forward because that need is only going to grow larger, not smaller.” [email protected] Twitter: @jbaugh
/** * Created by Administrator on 2018/1/19. */ @Data public class CallbackCheckDTO { private Boolean checkResult; private CallbackBO callbackBO; private PaymentDetailDO paymentDetailDO; public static CallbackCheckDTO getCheckFailInstant() { CallbackCheckDTO callbackCheckDTO = new CallbackCheckDTO(); callbackCheckDTO.setCheckResult(false); return callbackCheckDTO; } public static CallbackCheckDTO getCheckSuccessInstant(CallbackBO callbackBO, PaymentDetailDO paymentDetailDO) { CallbackCheckDTO callbackCheckDTO = new CallbackCheckDTO(); callbackCheckDTO.setCheckResult(true); callbackCheckDTO.setCallbackBO(callbackBO); callbackCheckDTO.setPaymentDetailDO(paymentDetailDO); return callbackCheckDTO; } }
# -*- coding: utf-8 -*- colorize_enabled = True _tc = { 'black': '\033[90m', 'red': '\033[91m', 'green': '\033[92m', 'yellow': '\033[93m', 'blue': '\033[94m', 'purple': '\033[95m', 'cyan': '\033[96m', 'white': '\033[97m', 'end': '\033[0m', } _tc_end = _tc['end'] def colorize(message, color): return ''.join((_tc[color], message, _tc_end)) if colorize_enabled else message
CBN News staff have been praying as we've covered Hurricane Harvey. Above is a prayer effort we recorded to share with you. Below is a Facebook prayer time with CBN's Gordon Robertson on this National Day of Prayer. Watch, pray and share. We know where two or more are gathered the Lord Jesus is in our midst and there is power in His Name. President Donald Trump and First Lady Melania Trump attended church services Sunday morning at St. John's Church near the White House. This is the first time the president has visited the Episcopal church, popularly referred to as "the Church of the Presidents," since becoming commander-in-chief. Mr. Trump and the First Lady reportedly sat in the "president's pew" for the first time. Afterwards, Mrs. Trump tweeted, "Beautiful service today at St. John's. @potus and I wishing you many blessings on this National Day of Prayer." Beautiful service today at St. John's. @potus and I wishing you many blessings on this National Day of Prayer. — Melania Trump (@FLOTUS) September 3, 2017 St. John's joined other churches across the nation, like Cornerstone Church in San Antonio, in a special National Day of Prayer in the catastrophic aftermath of Hurricane Harvey. During Cornerstone's Sunday morning worship service, Pastor John Hagee dedicated a prayer for all Texans who had been "disadvantaged" or "destroyed" by the storm, reminding his megachuch congregants that they are "totally dependent" on God. "We pray for all of our friends in the city of Houston, in Rockport, [and] in every city of the south part of this state," Hagee, Cornerstone's senior pastor, said. "Some have lost every possession that they have. Some are in debt for those worthless possessions. Some have lost their health. Some have lost their lives." "I ask you to look from the balconies of heaven today and to comfort the brokenhearted," Hagee continued. "I ask you, God, to restore what Satan has taken from the righteous and give it back to them many times over. I ask for the peace of God that surpasses all understanding to flood our hearts and minds, because his peace is greater than the storm." Our President @realDonaldTrump requested a #NationalDayOfPrayer today and we dedicate this prayer to people affected by Hurricane Harvey. pic.twitter.com/rroucKebVj — JohnHagee Ministries (@HageeMinistries) September 3, 2017 Evangelist Franklin Graham wrote on his Facebook page Sunday, "The Bible encourages us, 'Let us then approach God’s throne of grace with confidence, so that we may receive mercy and find grace to help us in our time of need' (Hebrews 4:16)." "Will you pray for all those who are homeless today?" he asked. "Thousands and thousands have lost their homes and everything they own." "Especially today, let us pray for those who lost loved ones as a result of this deadly storm," Graham continued. "They have suffered unbelievable loss — may God comfort them." Dr. Robert Jeffress, the pastor of First Baptist Church in Dallas, wrote, "Please join our church in prayer for the victims of #HurricaneHarvey, our state, and our nation on this National Day of Prayer." A picture of church leadership and congregants bowing their heads in prayer accompanied the post. President Trump declared Sunday to be a "National Day of Prayer," late last week – joining with Texas Gov. Greg Abbott, who has also called on Texans to pray for recovery efforts after the historic storm. "We are deeply grateful for those performing acts of service, and we pray for healing and comfort for those in need," the president's proclamation reads. .@POTUS signs proclamation declaring this Sunday a National Day of Prayer. pic.twitter.com/eA0rmTm1tF — Sarah Sanders (@PressSec) September 1, 2017 "As response and recovery efforts continue, and as Americans provide much-needed relief to the people of Texas and Louisiana, we are reminded of Scripture's promise that 'God is our refuge and strength, a very present help in trouble,'" the proclamation states. "I call on all Americans and houses of worship throughout the Nation to join in one voice of prayer, as we seek to uplift one another and assist those suffering from the consequences of this terrible storm," Trump continued. Meanwhile, the governor of Texas also called on his fellow Texans and people around the country to pray for the Lone Star State on Sunday. Abbott issued a proclamation calling for an official day of prayer for Texans "of all faiths and religious traditions and backgrounds." I issued a proclamation making Sunday September 3 a Day of Prayer in Texas for the victims & responders to #Harvey. https://t.co/Mlkf8QWPgf — Greg Abbott (@GregAbbott_TX) August 31, 2017 Sunday, Gov. Abbott spoke at Hyde Park Baptist Church in Austin. He praised the many faith-based groups that are bringing relief and spiritual comfort to the flood victims. He also said this is a time not just to pray for God's help, but to give thanks for His mercies. "So, this is a time for prayer for those in need, this is a time for prayers for those who have been lost. It's also a prayer of thanksgiving to God Almighty for uniting us as a nation and for helping us respond together," Abbott said. The governor is also urging people to pray for the safety of first responders, public safety officers and military personnel involved in the aftermath of Hurricane Harvey. He's also calling for prayers for the "healing of individuals, rebuilding of communities and the restoration of the entire region struck by this disaster." Abbott applauded Texans for helping each other through the ordeal and noted they are grateful for the support and resources they've received in the days since Harvey struck. Best moment in Rockport: holding this 2 week old baby born a week before #HurricaneHarvey showing total serenity. pic.twitter.com/HmQBkD8NW8 — Greg Abbott (@GregAbbott_TX) September 1, 2017 The Church Project in the Woodlands is one of the churches making a difference in their Houston community. Pastor Jason Shepperd told CBN News that some 1,200 people have signed up to volunteer through the church. Flood victims Airrion and Sherry Fontenot are blessed by those volunteers. The couple's home was flooded with two feet of water and they said going through their damaged belongings with their three daughters was heartwrenching. "Going to the house was the hardest, because, I mean, the memories," Sherry told CBN News, "but the people that showed up, they made it better. They helped us walk through and honestly I didn't have to lift a finger in that house to do anything." Airrion said the volunteers turned things around for them. "When I got there and just saw the generosity and the grace and God's love for the people, it was amazing, it was an amazing thing."
<reponame>maxep/maxep.me import React, { useState, useEffect } from "react"; import { Box, Image, Flex, Link, Text, Icon } from "@chakra-ui/react"; import fetchJsonp from "fetch-jsonp"; import Rating from "react-rating"; import { BsStarFill } from "react-icons/bs"; export type ApplicationProps = { id: number; title: string; subtitle: string; comment: string; beta: boolean; url?: string; icon?: string; rating?: { value: number; count: number; }; }; type Props = { application: ApplicationProps; }; const useApplication = (props: ApplicationProps): ApplicationProps => { const [application, setApplication] = useState(props); useEffect(() => { fetchJsonp(`https://itunes.apple.com/lookup?id=${props.id}`) .then((data) => data.json()) .then((json) => { if (json.resultCount !== 1) return; const result = json.results[0]; setApplication((current) => ({ ...current, icon: result.artworkUrl512, url: result.sellerUrl, rating: { value: result.averageUserRating, count: result.userRatingCount, }, })); }); }, [props.id]); return application; }; const Application = (props: Props): React.ReactElement => { const application = useApplication(props.application); const rating = application.rating?.value.toLocaleString(undefined, { maximumFractionDigits: 1, }); const ratingCount = application.rating?.count.toLocaleString(undefined, { notation: "compact", }); return ( <Box p={4} marginX={4} marginBottom={4} w="xs" rounded="lg" borderWidth={1}> <Flex mb={4} align="center" justify="center" direction="column"> <Image w={160} alignItems="center" src={application.icon} rounded={"22.5%"} /> <Flex marginY={2} direction="column" align="center"> <Rating initialRating={application.rating?.value} readonly emptySymbol={<Icon as={BsStarFill} color="#CBD5E0" />} fullSymbol={<Icon as={BsStarFill} color="#FF9000" />} placeholderSymbol={<BsStarFill color="#CBD5E0" />} /> <Text fontSize="sm" fontWeight="regular" color="gray.500"> {application.beta ? "In Beta" : `${rating}, ${ratingCount} Ratings`} </Text> </Flex> </Flex> <Link fontSize="lg" fontWeight="medium" variant="app" href={application.url} isExternal > {application.title} </Link> <Text fontSize="md" fontWeight="regular" variant="description"> {application.subtitle} </Text> <Text mt={4} fontSize="sm" fontWeight="regular" variant="description"> {application.comment} </Text> </Box> ); }; export default Application;
<filename>src/pages/bill/bill.ts import { ConationProvider } from '../../providers/conation/conation'; import { Component } from '@angular/core'; import { IonicPage, NavController, NavParams } from 'ionic-angular'; import { AlertController } from 'ionic-angular'; import { HomePage } from '../home/home'; @IonicPage() @Component({ selector: 'page-bill', templateUrl: 'bill.html', }) export class BillPage { price:any; days: any; check_in : any; check_out: any; room : any; booking: any; bill: any; check_in_time : any; occupants: any; constructor(public navCtrl: NavController, public navParams: NavParams, public futsalProv : ConationProvider, public alertCtrl: AlertController) { this.check_in = navParams.get("param1"); this.check_out = navParams.get("param2"); this.days = navParams.get("param3"); this.price = navParams.get("param4"); this.room = navParams.get("param5"); if (this.days ==0){ this.bill = this.price } else { this.bill = this.days * this.price*24;} this.bill = navParams.get("param4") * navParams.get("param3") this.check_in_time = navParams.get("param6"); this.occupants = navParams.get("param6"); } ionViewDidLoad() { } bookSuccess() { let alert = this.alertCtrl.create({ title: 'Booking Success!!', subTitle: 'Thank you for choosing us, hope to see you here.', buttons: ['OK'] }); alert.present(); } BookFailed() { let alert = this.alertCtrl.create({ title: 'Warning!', subTitle: 'Not Available, Kindly choose another date!', buttons: ['OK'] }); alert.present(); } errDays() { let alert = this.alertCtrl.create({ title: 'Warning!', subTitle: 'You are only allow to book atleast 2 days.', buttons: ['OK'] }); alert.present(); } clickSubmit(){ if(this.days > 2){ return this.errDays(); } this.futsalProv.userBook(this.check_in, this.check_out,this.room,this.bill,this.occupants,this.check_in_time).subscribe(booking => { console.log(this.booking = booking); if(this.booking.status == true){ this.bookSuccess(); this.navCtrl.setRoot(HomePage); }else{ this.BookFailed(); } }); } }
<reponame>sebge2/github-oauth import { NonRecurringScheduledTaskTriggerDto, RecurringScheduledTaskTriggerDto, ScheduledTaskDefinitionDto, ScheduledTaskTriggerDto, } from '../../../api'; import { LocalizedString } from '@i18n-core-translation'; import { ScheduledTaskTrigger } from './scheduled-task-trigger.model'; import { NonRecurringScheduledTaskTrigger } from './non-recurring-scheduled-task-trigger.model'; import { RecurringScheduledTaskTrigger } from './recurring-scheduled-task-trigger.model'; export class ScheduledTaskDefinition { static fromDto(dto: ScheduledTaskDefinitionDto): ScheduledTaskDefinition { return new ScheduledTaskDefinition( dto.id, LocalizedString.fromDto(dto.name), LocalizedString.fromDto(dto.description), dto.enabled, ScheduledTaskDefinition.fromTriggerDto(dto.trigger), dto.lastExecutionTime, dto.nextExecutionTime ); } static fromTriggerDto(dto: ScheduledTaskTriggerDto): ScheduledTaskTrigger { switch (dto.type) { case 'NON_RECURRING': return new NonRecurringScheduledTaskTrigger((<NonRecurringScheduledTaskTriggerDto>dto).startTime); case 'RECURRING': return new RecurringScheduledTaskTrigger((<RecurringScheduledTaskTriggerDto>dto).cronExpression); } } constructor( public id: string, public name: LocalizedString, public description: LocalizedString, public enabled: boolean, public trigger: ScheduledTaskTrigger, public lastExecutionTime?: Date, public nextExecutionTime?: Date ) {} }
Share. ''The Dirty Dozen with clones.'' ''The Dirty Dozen with clones.'' At Star Wars Celebration today, Dave Filoni introduced a premiere screening for a never finished or released story arc from Star Wars: The Clone Wars – “Bad Batch.” All four episodes of the storyline were shown, though in their incomplete form – final animation was never completed, so this was all the earliest, animatic-style animation without any detailed backgrounds, moving lips and many other elements we'd have seen if it had been completed, though the vocals had all been recorded by the cast. The storyline is another big one for Dee Bradley Baker, the voice of all the clone troopers, as we meet the Bad Batch of the title, a foursome -- Hunter, Tech, Crosshair and Wrecker -- of Clones who came out somehow malformed (like the Clone “99” we met once before), but were then mutated further into a squad of super soldier types, sent on especially dangerous missions. These guys are all highly individualized (including their appearance, as they do not share the same exact face) in both their skill set and armor, with an almost G.I Joe type feel to them. There’s also a hint of a Republic Commando type crew here as well. It’s also no secret (don’t yell about spoilers – Filoni himself revealed this nearly a year ago!) that this would be the storyline to explain what happened to Echo, the Clone Trooper who Filoni explained was not dead, as previously believed. I won’t say exactly what happened to Echo or how he is found, but it’s a pretty intriguing plot point, and like much of Clone Wars, has a disturbing component to it. Exit Theatre Mode Even in the rough animation form we saw it in, “Bad Batch” was a lot of fun, with some great action beats that had the audience cheering. There are some bat-like creatures that come into play that are really well used, including in a standout action scene that begins on a small bridge. Also, Mace Windu, while not a huge character in this arc, has an incredibly entertaining speech at one point that is one of the character’s best moments. Suffice to say, the episodes were very well received and it was hard not to wish they could be completed, given how effective they were in this form. In the Q&A that followed, Filoni couldn’t firmly say whether “Bad Batch” will be put on StarWars.com eventually, as one other unfinished Clone Wars storyline was last year – but he seemed to lean towards it being highly likely it would be, saying, with a grin, “I don’t know. It would be great. I’m sure it will… possibly.” Asked why they don’t release all the unfinished episodes, he said many factors were at play, with some episodes being further along than others to the point where he felt comfortable showing them. He noted that he really didn’t want to release any episodes where the final vocals hadn’t at least been recorded, since he felt the voice cast added so much to the experience. Baker was present for the Q&A and said, “I remember how terrified I was going into this!,” when asked his reaction to the Bad Batch storyline. He said his challenge is usually to make sure the different clones sound at least somewhat distinct, but here he had to remember that while he could go into broader places with the Bad Batch’s voices, they still had to sound at least somewhere at the core as though they were clones and it was a challenge to retain that. Writer Brent Friedman, who wrote the Bad Batch arc, recalled, “George [Lucas] had a very specific vision of this motley crew, “ and that, “As we were breaking the stories, Dave was sketching the characters. I already saw their personalities and could hear their voices.” Another Clone Wars writer, Matt Michnovetz, wrote a second arc for the Bad Batch character that was in the works for the series when the show was cancelled, also featuring Yoda and Wookiees, Chewbacca included. He said that often while making The Clone Wars, “George would come up with these classic movie references,” and with the Bad Bunch, “He came in and wanted to do a version of the Dirty Dozen with clones.” Filoni revealed the influence for most of the Bad Bunch characters (with the exception of “Tech”), saying, “Hunter is Billy from Predator. Crosshair is more Clint Eastwood - Cad Bane if he’s a clone.” As for the super strong Wrecker, Filoni joked he “Is about as close to the Hulk as well get,” saying his Marvel-adoring crew, “loved Wrecker, because this is them mini-Avengering.”
/** \brief Launches a thread that spins ROS. This is usually used to ensure that callbacks are processed. */ Spinner() : spin{[this] { while (!done && ros::ok()) { ros::spinOnce(); ros::Duration(0.01).sleep(); } }} {}
use linebuffer::{typenum, LineBuffer}; use std::convert::TryInto; use std::time::*; #[test] #[ignore] fn perf_simple() { const AMOUNT: usize = 8192; let mut buffer: LineBuffer<(), typenum::U2048> = LineBuffer::new(AMOUNT); let start = Instant::now(); let max: u32 = 1_000_000_000; for i in 0..max { buffer.insert(&i.to_ne_bytes(), ()); } let nanos = start.elapsed().as_nanos(); assert_eq!(buffer.capacity_bytes(), AMOUNT); println!("Duration: {} ns for {} entries", nanos, max); // let bytes: u128 = (max * 4) as u128; // let ms = nanos / 1_000_000; // println!("{} Byte in, {} B/ms",bytes, (bytes / ms) ); let expected: u32 = max - 1; assert_eq!( buffer.get((max - 1) as usize), Some((&(expected.to_ne_bytes()[..]), &())) ); } #[test] #[ignore] fn perf_lines() { const AMOUNT: usize = 1024; let mut buffer: LineBuffer<(), typenum::U1024> = LineBuffer::new(AMOUNT); let start = Instant::now(); let max: u32 = 1_000_000_000; let empty = [0; 0]; for _ in 0..max { buffer.insert(&empty, ()); } let nanos = start.elapsed().as_nanos(); assert_eq!(buffer.capacity_bytes(), AMOUNT); println!("Duration: {} ns for {} entries", nanos, max); } #[test] #[ignore] fn perf_from_file() { unimplemented!(); }
/** * The type Bch tx script pub key. */ @AllArgsConstructor @NoArgsConstructor @Data @EqualsAndHashCode @JsonIgnoreProperties(ignoreUnknown = true) public class BchTxScriptPubKey { private String hex; private String asm; private String[] addresses; private String type; }
/** * If there is an associated endpoint then check is authentication required * and if so trigger the authentication process. * * @param args * @throws ClientServicesException */ protected void checkAuthentication(Args args) throws ClientServicesException { if (endpoint != null) { if (endpoint.isRequiresAuthentication() && !endpoint.isAuthenticated()) { endpoint.authenticate(false); } } }
Former Manhattan U.S. Attorney Preet Bharara took several shots at the administration of President Donald Trump on Thursday, calling for “facts not falsehoods” as the basis for political discourse and a more welcoming stance towards immigrants in his first public speaking event since being fired one month ago. Bharara sprinkled the hour-long speech with humor, including a joke about the size of the crowd clearly aimed at Trump. But Bharara also made a series of thinly veiled criticisms of the new administration, referring multiple times to Trump’s campaign pledge to “drain the swamp” in Washington. “You don’t drain a swamp with a slogan. You don’t drain it by replacing one set of partisans with another. You don’t replace muck with muck,” Bharara, 48, said at the Cooper Union in New York. “To drain a swamp you need an army corps of engineers, experts schooled in service and serious purpose. Not do-nothing, say-anything, neophyte opportunists who know a lot about how to bully and bluster but not so much about truth, justice and fairness.” Bharara was fired by Trump on March 11 after refusing to step down. While he was among 46 U.S. attorneys told to submit their resignations, his dismissal was a surprise because Trump had asked him in November to stay in the job. For more about Preet Bharara, watch Fortune’s video: Bharara has said it was not made clear to him at first whether or not he was being dismissed. He joked about that initial confusion with a reference to Trump’s TV reality show in which he regularly fired people on the spot. “I don’t really understand why this was such a big deal, especially to this White House. I had thought that was what Donald Trump was good at,” Bharara said. As the chief federal prosecutor for the Southern District of New York, Bharara oversaw several notable corruption and white-collar criminal cases, as well as cases involving national security. He said he was equally proud of the less high-profile work his office did to combat gangs and consumer fraud. Bharara said he expected his successor to carry on with the office’s reputation for independence. He did not comment on who his possible successor. As for his own future, Bharara ruled out running for public office. “I don’t have any plans to enter politics just like I have no plans to join the circus. And I mean no offense to the circus.”
<filename>src/main/java/mx/ipn/cic/biblioteca/AdminControl/services/UserServiceImpl.java package mx.ipn.cic.biblioteca.AdminControl.services; import mx.ipn.cic.biblioteca.AdminControl.model.DoctorModel; import mx.ipn.cic.biblioteca.AdminControl.model.PatientModel; import mx.ipn.cic.biblioteca.AdminControl.model.User; import mx.ipn.cic.biblioteca.AdminControl.model.Role; import mx.ipn.cic.biblioteca.AdminControl.repositories.IDoctorRepository; import mx.ipn.cic.biblioteca.AdminControl.repositories.IPatientRepository; import mx.ipn.cic.biblioteca.AdminControl.repositories.UserRepository; //import mx.ipn.cic.biblioteca.AdminControl.web.dto.UserRegistrationDto; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.core.Authentication; import org.springframework.security.core.GrantedAuthority; import org.springframework.security.core.authority.SimpleGrantedAuthority; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.security.core.userdetails.UserDetails; import org.springframework.security.core.userdetails.UserDetailsService; import org.springframework.security.core.userdetails.UsernameNotFoundException; import org.springframework.stereotype.Service; import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; @Service public class UserServiceImpl implements UserDetailsService { @Autowired private UserRepository userRepository; @Autowired private IPatientRepository patientRepository; @Autowired private IDoctorRepository doctorRepository; //descomentar para bcrypt // @Autowired // private BCryptPasswordEncoder passwordEncoder; public User findByEmail(String email){ return userRepository.findByEmail(email); } public Long findIdByEmail(String email) { return userRepository.findIdByEmail(email); } public String getCurrentUsername() { String username; String nombre = null; Object principal = SecurityContextHolder.getContext().getAuthentication().getPrincipal(); if (principal instanceof UserDetails) { username = ((UserDetails)principal).getUsername(); System.out.println("username: "+username); nombre = username; } System.out.println("nombre: "+nombre); return nombre; } /* public User save(UserRegistrationDto registration){ //User user = new User(); DoctorModel user = new DoctorModel(); user.setFirstName(registration.getFirstName()); user.setLastNameP(registration.getLastNameP()); user.setLastNameM(registration.getLastNameM()); user.setEmail(registration.getEmail()); //descomentar para bcrypt //user.setPassword(passwordEncoder.encode(registration.getPassword())); user.setPassword(registration.getPassword()); user.setRoles(Arrays.asList(new Role("ROLE_USER"))); return userRepository.save(user); } */ @Override public UserDetails loadUserByUsername(String email) throws UsernameNotFoundException { User user = userRepository.findByEmail(email); System.out.println("USER: "+user); if (user == null){ throw new UsernameNotFoundException("Invalid username or password."); } return new org.springframework.security.core.userdetails.User( user.getEmail(), user.getPassword(), mapRolesToAuthorities(user.getRoles())); } private Collection<? extends GrantedAuthority> mapRolesToAuthorities(Collection<Role> roles){ return roles .stream() .map(role -> new SimpleGrantedAuthority(role.getName())) .collect(Collectors.toList()); } public String getEmailUser(){ Authentication auth = SecurityContextHolder.getContext().getAuthentication(); String name = auth.getName(); System.out.println(name); return name; } public boolean deleteDoctor(Long idToDelete){ Optional<DoctorModel> doctor = this.doctorRepository.findById(idToDelete); List<PatientModel> patients = this.patientRepository.findByIdDoctor(doctor.get()); for (PatientModel tmp : patients){ this.patientRepository.delete(tmp); } this.userRepository.deleteById(idToDelete); return true; } public boolean delete(Long idToDelete){ this.userRepository.deleteById(idToDelete); return true; } public boolean updateCity(String city, Long id){ this.userRepository.updateCity(city, id); return true; } public boolean updateDoctorWithoutPassword(Long id, String firstName, String lastNameP, String lastNameM, String email, String birthdate, String gender, String phone, String mobilePhone, String professionalLicense, String city, String hospital){ this.userRepository.updateUserWithoutPassword(id, firstName, lastNameP, lastNameM, email, birthdate, gender, phone, mobilePhone, professionalLicense, city, hospital); return true; } public boolean updateDoctor(Long id, String firstName, String lastNameP, String lastNameM, String email, String birthdate, String gender, String phone, String mobilePhone, String professionalLicense, String city, String hospital, String password){ this.userRepository.updateUser(id, firstName, lastNameP, lastNameM, email, birthdate, gender, phone, mobilePhone, professionalLicense, city, hospital, password); return true; } // public boolean updateMonitorWithoutPassword(Long id, String firstName, String lastNameP, // String lastNameM, String email, String birthdate, // String gender, String phone, String mobilePhone){ // this.userRepository.updateMonitorWithoutPassword(id, firstName, lastNameP, lastNameM, email, birthdate, // gender, phone, mobilePhone); // return true; // } // // public boolean updateMonitor(Long id, String firstName, String lastNameP, // String lastNameM, String email, String birthdate, // String gender, String phone, String mobilePhone, // String password){ // this.userRepository.updateMonitor(id, firstName, lastNameP, lastNameM, email, birthdate, // gender, phone, mobilePhone, password); // return true; // } }
/** * Unit Test Case for EndpointService. * * @author kumargautam */ @PrepareForTest(value = { DockerClientBuilder.class, ExposedPort.class, EndpointService.class }) @PowerMockIgnore({ "javax.net.ssl.*,javax.xml.*", "org.xml.sax.*", "org.apache.logging.log4j.*" }) public class EndpointServiceTest extends PowerMockTestCase { private EndpointService endpointService; @Mock private EndpointRepository repository; @Mock private CredentialService credentialService; @Mock private EndpointCertificatesService certificatesService; @Mock private EndpointClientFactory endpointClientFactory; @Mock private VCenterAdapterDetailsService vcaAdapterService; @Mock private MappingService mappingService; @Mock private SchedulerService schedulerService; @Mock private TaskService taskService; private EndpointMockData mockData = new EndpointMockData(); private EndpointSpec endpointSpec; private EndpointSpec dockerEndpointSpec; private EndpointSpec k8sEndpointSpec; private CredentialsSpecMockData credentialsSpecMockData = new CredentialsSpecMockData(); @Mock private CustomDockerClient customDockerClient; @Mock private KubernetesCommandLineClient kubernetesCommandLineClient; /** * @throws java.lang.Exception */ @BeforeMethod public void setUpBeforeClass() { MockitoAnnotations.initMocks(this); PowerMockito.mockStatic(DockerClientBuilder.class); PowerMockito.mockStatic(EndpointService.class); endpointService = new EndpointService(repository, credentialService, endpointClientFactory, certificatesService, mappingService, vcaAdapterService); this.endpointSpec = mockData.rmEndpointMockData(); this.dockerEndpointSpec = mockData.dockerEndpointMockData(); this.k8sEndpointSpec = mockData.k8sEndpointMockData(); } /** * @throws java.lang.Exception */ @AfterClass public void tearDownAfterClass() { this.endpointSpec = null; this.mockData = null; } /** * @throws java.lang.Exception */ @AfterTest public void tearDown() { validateMockitoUsage(); } /** * Test method for {@link EndpointService#getAllEndpoints()}. */ @Test public void testGetAllEndpoints() { List<EndpointSpec> endpointSpecList = new ArrayList<>(); endpointSpecList.add(endpointSpec); endpointSpecList.add(mockData.k8sEndpointMockData()); when(repository.findAll()).thenReturn(endpointSpecList); List<EndpointSpec> actualResult = endpointService.getAllEndpoints(); Assert.assertEquals(actualResult.size(), 2); verify(repository, times(1)).findAll(); } /** * Test method for {@link EndpointService#getEndpointByName(java.lang.String)}. * * @throws MangleException */ @Test public void testGetEndpointByName() throws MangleException { Optional<EndpointSpec> optional = Optional.of(endpointSpec); when(repository.findByName(anyString())).thenReturn(optional); EndpointSpec actualResult = endpointService.getEndpointByName(endpointSpec.getName()); verify(repository, times(1)).findByName(anyString()); Assert.assertEquals(actualResult, endpointSpec); } /** * Test method for {@link EndpointService#getEndpointByName(java.lang.String)}. * * @throws MangleException */ @Test public void testGetEndpointByNameFailure() { Optional<EndpointSpec> optional = Optional.empty(); when(repository.findByName(anyString())).thenReturn(optional); boolean actualResult = false; try { endpointService.getEndpointByName(endpointSpec.getName()); } catch (Exception e) { actualResult = true; } verify(repository, times(1)).findByName(anyString()); Assert.assertTrue(actualResult); } /** * Test method for {@link EndpointService#getEndpointByName(java.lang.String)}. * * @throws MangleException */ @Test public void testGetEndpointByNameFailure1() { Optional<EndpointSpec> optional = Optional.empty(); when(repository.findByName(anyString())).thenReturn(optional); boolean actualResult = false; try { endpointService.getEndpointByName(null); } catch (Exception e) { actualResult = true; } verify(repository, times(0)).findByName(anyString()); Assert.assertTrue(actualResult); } /** * Test method for * {@link EndpointService#getAllEndpointByType(com.vmware.mangle.model.enums.EndpointType)}. * * @throws MangleException */ @Test public void testGetAllEndpointByType() throws MangleException { List<EndpointSpec> endpointSpecList = new ArrayList<>(); endpointSpecList.add(endpointSpec); endpointSpecList.add(endpointSpec); when(repository.findByEndPointType(any(EndpointType.class))).thenReturn(endpointSpecList); List<EndpointSpec> actualResult = endpointService.getAllEndpointByType(endpointSpec.getEndPointType()); Assert.assertEquals(actualResult.size(), 2); verify(repository, times(1)).findByEndPointType(any(EndpointType.class)); } /** * Test method for * {@link EndpointService#getAllEndpointByType(com.vmware.mangle.model.enums.EndpointType)}. * * @throws MangleException */ @Test public void testGetAllEndpointByTypeFailureWithNull() { List<EndpointSpec> endpointSpecList = new ArrayList<>(); when(repository.findByEndPointType(any(EndpointType.class))).thenReturn(endpointSpecList); boolean actualResult = false; try { endpointService.getAllEndpointByType(null); } catch (Exception e) { actualResult = true; } Assert.assertTrue(actualResult); verify(repository, times(0)).findByEndPointType(any(EndpointType.class)); } /** * Test method for {@link EndpointService#addOrUpdateEndpoint(EndpointSpec)} * * @throws MangleException */ @Test public void testAddOrUpdateEndpoint() throws MangleException { when(repository.save(any(EndpointSpec.class))).thenReturn(endpointSpec); Optional<EndpointSpec> optional = Optional.of(endpointSpec); when(repository.findByName(anyString())).thenReturn(optional); EndpointSpec actualResult = endpointService.addOrUpdateEndpoint(endpointSpec); verify(repository, times(1)).save(any(EndpointSpec.class)); verify(repository, times(1)).findByName(anyString()); Assert.assertEquals(actualResult, endpointSpec); } /** * Test method for {@link EndpointService#addOrUpdateEndpoint(EndpointSpec)} * */ @Test public void testAddOrUpdateEndpointWithSameNameAndDifferentType() { when(repository.save(any(EndpointSpec.class))).thenReturn(endpointSpec); EndpointSpec dockerSpec = mockData.dockerEndpointMockData(); dockerSpec.setName(endpointSpec.getName()); Optional<EndpointSpec> optional = Optional.of(dockerSpec); when(repository.findByName(anyString())).thenReturn(optional); try { endpointService.addOrUpdateEndpoint(endpointSpec); } catch (MangleException ex) { Assert.assertEquals(ex.getErrorCode(), ErrorCode.DUPLICATE_RECORD_FOR_ENDPOINT); verify(repository, times(1)).findByName(anyString()); } } /** * Test method for {@link EndpointService#addOrUpdateEndpoint(EndpointSpec)} * * @throws MangleException */ @Test public void testAddOrUpdateEndpointWithNull() { when(repository.save(any(EndpointSpec.class))).thenReturn(endpointSpec); boolean actualResult = false; try { endpointService.addOrUpdateEndpoint(null); } catch (Exception e) { actualResult = true; } verify(repository, times(0)).save(any(EndpointSpec.class)); Assert.assertTrue(actualResult); } /** * Test method for {@link EndpointService#getEndpointBasedOnPage(int, int)}. */ @SuppressWarnings("unchecked") @Test public void testGetEndpointBasedOnPage() { Slice<EndpointSpec> page = Mockito.mock(Slice.class); when(page.getSize()).thenReturn(4); when(repository.findAll(any(Pageable.class))).thenReturn(page); when(repository.count()).thenReturn(10L); Slice<EndpointSpec> actualResult = endpointService.getEndpointBasedOnPage(1, 4); Assert.assertEquals(endpointService.getTotalPages(page), 3); verify(repository, times(1)).findAll(any(Pageable.class)); Assert.assertEquals(actualResult.getSize(), 4); verify(page, times(3)).getSize(); } /** * Test method for {@link EndpointService#getEndpointBasedOnPage(int, int)}. */ @SuppressWarnings("unchecked") @Test(description = "Test to get the data from 2 page with size 4") public void testGetEndpointBasedOnPageCase1() { Slice<EndpointSpec> slice = Mockito.mock(Slice.class); when(slice.getSize()).thenReturn(4); when(repository.findAll(any(Pageable.class))).thenReturn(slice); CassandraPageRequest pageable = Mockito.mock(CassandraPageRequest.class); when(slice.getPageable()).thenReturn(pageable); when(pageable.getPagingState()).thenReturn(null); Slice<EndpointSpec> actualResult = endpointService.getEndpointBasedOnPage(2, 4); verify(repository, times(1)).findAll(any(Pageable.class)); Assert.assertEquals(actualResult.getSize(), 4); verify(slice, times(1)).getSize(); } /** * Test method for {@link EndpointService#getEndpointBasedOnPage(int, int)}. */ @SuppressWarnings("unchecked") @Test(description = "Test to get the data from 3 page with size 4") public void testGetEndpointBasedOnPageCase2() { Slice<EndpointSpec> slice = Mockito.mock(Slice.class); when(slice.getSize()).thenReturn(4); when(repository.findAll(any(Pageable.class))).thenReturn(slice); CassandraPageRequest pageable = Mockito.mock(CassandraPageRequest.class); when(slice.getPageable()).thenReturn(pageable); when(pageable.getPageNumber()).thenReturn(3); when(pageable.getPageSize()).thenReturn(4); PagingState pagingState = Mockito.mock(PagingState.class); when(pageable.getPagingState()).thenReturn(pagingState); Slice<EndpointSpec> actualResult = endpointService.getEndpointBasedOnPage(3, 4); verify(repository, times(3)).findAll(any(Pageable.class)); Assert.assertEquals(actualResult.getSize(), 4); verify(slice, times(1)).getSize(); } /** * Test method for {@link EndpointService#testEndpointConnection(EndpointSpec)} * */ @Test public void testTestEndpointConnectionWithNullCredentialsName() { EndpointSpec endpointSpecV11 = mockData.rmEndpointMockData(); endpointSpecV11.setCredentialsName(null); try { endpointService.testEndpointConnection(endpointSpecV11); } catch (MangleException e) { Assert.assertEquals(e.getErrorCode(), ErrorCode.FIELD_VALUE_EMPTY); } } /** * Test method for {@link EndpointService#testEndpointConnection(EndpointSpec)} * * @throws MangleException * */ @Test public void testTestEndpointConnectionForDocker() throws MangleException { EndpointSpec endpointSpecV11 = mockData.dockerEndpointMockData(); EndpointClient client = Mockito.mock(EndpointClient.class); when(client.testConnection()).thenReturn(true); when(endpointClientFactory.getEndPointClient(any(CredentialsSpec.class), any(EndpointSpec.class))) .thenReturn(client); assertTrue(endpointService.testEndpointConnection(endpointSpecV11)); verify(endpointClientFactory, times(1)).getEndPointClient(any(CredentialsSpec.class), any(EndpointSpec.class)); } /** * Test method for {@link EndpointService#testEndpointConnection(EndpointSpec)} * * @throws MangleException * */ @Test public void testTestEndpointConnectionForRedis() throws MangleException { EndpointSpec endpointSpecV11 = mockData.getRedisProxyEndpointMockData(); EndpointClient client = Mockito.mock(EndpointClient.class); when(client.testConnection()).thenReturn(true); when(endpointClientFactory.getEndPointClient(any(CredentialsSpec.class), any(EndpointSpec.class))) .thenReturn(client); assertTrue(endpointService.testEndpointConnection(endpointSpecV11)); verify(endpointClientFactory, times(1)).getEndPointClient(any(CredentialsSpec.class), any(EndpointSpec.class)); } /** * Test method for {@link EndpointService#testEndpointConnection(EndpointSpec)} * * @throws MangleException * */ @Test public void testTestEndpointConnectionForRemoteMachine() throws MangleException { when(credentialService.getCredentialByName(anyString())) .thenReturn(credentialsSpecMockData.getRMCredentialsData()); EndpointClient client = Mockito.mock(EndpointClient.class); when(client.testConnection()).thenReturn(true); when(endpointClientFactory.getEndPointClient(any(CredentialsSpec.class), any(EndpointSpec.class))) .thenReturn(client); assertTrue(endpointService.testEndpointConnection(endpointSpec)); verify(endpointClientFactory, times(1)).getEndPointClient(any(CredentialsSpec.class), any(EndpointSpec.class)); verify(credentialService, times(1)).getCredentialByName(anyString()); } /** * Test method for {@link EndpointService#testEndpointConnection(EndpointSpec)} * */ @Test public void testTestEndpointConnectionWithNullConnectionProperties() { EndpointSpec endpointSpecV11 = mockData.rmEndpointMockData(); endpointSpecV11.setRemoteMachineConnectionProperties(null); try { endpointService.testEndpointConnection(endpointSpecV11); } catch (MangleException e) { Assert.assertEquals(e.getErrorCode(), ErrorCode.PROVIDE_CONNECTION_PROPERTIES_FOR_ENDPOINT); } } /** * Test method for {@link EndpointService#testEndpointConnection(EndpointSpec)} * * @throws MangleException * */ @Test public void testTestEndpointConnectionForVcenter() throws MangleException { when(credentialService.getCredentialByName(anyString())) .thenReturn(credentialsSpecMockData.getVCenterCredentialsData()); EndpointClient client = Mockito.mock(EndpointClient.class); when(client.testConnection()).thenReturn(true); when(endpointClientFactory.getEndPointClient(any(CredentialsSpec.class), any(EndpointSpec.class))) .thenReturn(client); assertTrue(endpointService.testEndpointConnection(mockData.getVCenterEndpointSpecMock())); verify(endpointClientFactory, times(1)).getEndPointClient(any(CredentialsSpec.class), any(EndpointSpec.class)); verify(credentialService, times(1)).getCredentialByName(anyString()); } /** * Test method for {@link EndpointService#testEndpointConnection(EndpointSpec)} * * @throws MangleException * */ @Test public void testTestEndpointConnectionForDatabase() throws MangleException { when(credentialService.getCredentialByName(anyString())) .thenReturn(credentialsSpecMockData.getDatabaseCredentials()); EndpointSpec endpointSpecV11 = mockData.getDatabaseEndpointSpec(); EndpointClient client = Mockito.mock(EndpointClient.class); when(client.testConnection()).thenReturn(true); when(endpointClientFactory.getEndPointClient(any(CredentialsSpec.class), any(EndpointSpec.class))) .thenReturn(client); assertTrue(endpointService.testEndpointConnection(endpointSpecV11)); verify(endpointClientFactory, times(1)).getEndPointClient(any(CredentialsSpec.class), any(EndpointSpec.class)); verify(credentialService, times(1)).getCredentialByName(anyString()); } /** * Test method for {@link EndpointService#preProcessVCenterEndpointSpec(vCenterEndpointSpec)} * * @throws MangleException * */ @Test public void testpreProcessVCenterEndpointSpec_Throws_Exception() throws MangleException { when(vcaAdapterService.getVCAdapterDetailsByName("VCAdapterName")).thenReturn(null); try { endpointService.preProcessVCenterEndpointSpec(mockData.getVCenterEndpointSpecMock()); } catch (MangleException e) { verify(vcaAdapterService, times(1)).getVCAdapterDetailsByName(anyString()); assertThrows(MangleException.class, () -> endpointService.preProcessVCenterEndpointSpec(mockData.getVCenterEndpointSpecMock())); } } /** * Test method for {@link EndpointService#testEndpointConnection(EndpointSpec)} * * @throws MangleException * */ @Test public void testTestEndpointConnectionForK8S() throws MangleException { when(credentialService.getCredentialByName(anyString())) .thenReturn(credentialsSpecMockData.getk8SCredentialsData()); EndpointClient client = Mockito.mock(EndpointClient.class); when(client.testConnection()).thenReturn(true); when(endpointClientFactory.getEndPointClient(any(CredentialsSpec.class), any(EndpointSpec.class))) .thenReturn(client); assertTrue(endpointService.testEndpointConnection(mockData.k8sEndpointMockData())); verify(endpointClientFactory, times(1)).getEndPointClient(any(CredentialsSpec.class), any(EndpointSpec.class)); verify(credentialService, times(1)).getCredentialByName(anyString()); } /** * Test method for {@link EndpointService#getAllContainersByEndpointName(java.lang.String)}. * * Description: Positive test case which will take the DockerEndpointName and gives the list of * containers present in Docker Host. * */ @Test public void testgetAllContainersByEndpointName() throws Exception { Optional<EndpointSpec> optional = Optional.of(dockerEndpointSpec); List<String> allContainers = new ArrayList<>(); allContainers.add("mangle"); when(repository.findByName(anyString())).thenReturn(optional); PowerMockito.whenNew(CustomDockerClient.class).withAnyArguments().thenReturn(customDockerClient); when(customDockerClient.getAllContainerNames()).thenReturn(allContainers); List<String> actualResult = endpointService.getAllContainersByEndpointName(dockerEndpointSpec.getName()); verify(repository, times(1)).findByName(anyString()); Assert.assertEquals(actualResult.size(), 1); Assert.assertEquals(actualResult, allContainers); Assert.assertEquals(actualResult.get(0), "mangle"); } /** * Test method for {@link EndpointService#getAllContainersByEndpointName(java.lang.String)}. * * Description: Test case to validate if the EndpointSpec returned is not the DockerEndpoint. * */ @Test public void testgetAllContainersByEndpointName_InvalidDockerEP() throws Exception { Optional<EndpointSpec> optional = Optional.of(endpointSpec); when(repository.findByName(anyString())).thenReturn(optional); PowerMockito.whenNew(CustomDockerClient.class).withAnyArguments().thenReturn(customDockerClient); try { endpointService.getAllContainersByEndpointName(dockerEndpointSpec.getName()); } catch (MangleException exception) { verify(repository, times(1)).findByName(anyString()); Assert.assertEquals("Invalid DockerEndpoint", exception.getMessage()); } } /** * Test method for {@link EndpointService#getAllContainersByEndpointName(java.lang.String)}. * * Description: Test case to validate the result when the EndpointName is Empty/Null. * */ @Test public void testgetAllContainersByEndpointName_EPNameEmpty() throws Exception { Optional<EndpointSpec> optional = Optional.of(endpointSpec); when(repository.findByName(anyString())).thenReturn(optional); PowerMockito.whenNew(CustomDockerClient.class).withAnyArguments().thenReturn(customDockerClient); List<String> actualResult = endpointService.getAllContainersByEndpointName(""); verify(repository, times(0)).findByName(anyString()); Assert.assertEquals(actualResult, Collections.emptyList()); } /** * Test method for {@link EndpointService#getAllContainersByEndpointName(java.lang.String)}. * * Description: Test case to validate the result when the EndpointSpec returned from Repository * is Null/Empty. * */ @Test public void testgetAllContainersByEndpointName_NoRecordFound() throws Exception { Optional<EndpointSpec> optional = Optional.empty(); when(repository.findByName(anyString())).thenReturn(optional); PowerMockito.whenNew(CustomDockerClient.class).withAnyArguments().thenReturn(customDockerClient); try { endpointService.getAllContainersByEndpointName(dockerEndpointSpec.getName()); } catch (MangleException exception) { verify(repository, times(1)).findByName(anyString()); Assert.assertEquals("Found No Search Results", exception.getMessage()); } } /** * Test method for {@link EndpointService#enableEndpoints(List, Map, boolean)} * * Description: Test case to validate enable and disable endpoint for fault injection when only * list of names passed as filters * */ @Test public void testEnableEndpointsWithNames() { List<EndpointSpec> endpointSpecList = new ArrayList<>(); endpointSpecList.add(endpointSpec); endpointSpecList.add(mockData.k8sEndpointMockData()); List<String> endpointNames = new ArrayList<>(); endpointNames.add(endpointSpecList.get(0).getName()); endpointNames.add(endpointSpecList.get(1).getName()); when(repository.findAll()).thenReturn(endpointSpecList); when(repository.findByNames(any())).thenReturn(endpointSpecList); when(repository.saveAll(any())).thenReturn(endpointSpecList); try { List<String> updatedEndpoints = endpointService.enableEndpoints(endpointNames, null, false); Assert.assertEquals(updatedEndpoints, endpointNames, "test enableEndpoint method with only names as filter is failed"); verify(repository, times(0)).findAll(); verify(repository, times(1)).findByNames(any()); } catch (MangleException exception) { Assert.fail("test enableEndpoint method is failed with exception" + exception.getMessage()); } } /** * Test method for {@link EndpointService#enableEndpoints(List, Map, boolean)} * * Description: Test case to validate enable and disable endpoint for fault injection when only * tags passed as filter * */ @Test public void testEnableEndpointsWithTags() { Map<String, String> tags = new HashMap<>(); tags.put("ip", endpointSpec.getRemoteMachineConnectionProperties().getHost()); endpointSpec.setTags(tags); List<EndpointSpec> endpointSpecList = new ArrayList<>(); endpointSpecList.add(endpointSpec); List<String> endpointNames = new ArrayList<>(); endpointNames.add(endpointSpecList.get(0).getName()); when(repository.findAll()).thenReturn(endpointSpecList); when(repository.findByNames(any())).thenReturn(endpointSpecList); when(repository.saveAll(any())).thenReturn(endpointSpecList); try { List<String> updatedEndpoints = endpointService.enableEndpoints(null, tags, false); Assert.assertEquals(updatedEndpoints, endpointNames, "test enableEndpoint method with only tags as filter is failed"); verify(repository, times(1)).findAll(); verify(repository, times(0)).findByNames(any()); } catch (MangleException exception) { Assert.fail("test enableEndpoint method is failed with exception" + exception.getMessage()); } } /** * Test method for {@link EndpointService#enableEndpoints(List, Map, boolean)} * * Description: Test case to validate enable and disable endpoint for fault injection when both * names and tags sent as filters * */ @Test public void testEnableEndpointsWithBothTagsAndNames() { List<EndpointSpec> endpointSpecList = new ArrayList<>(); endpointSpecList.add(endpointSpec); List<String> endpointNames = new ArrayList<>(); endpointNames.add(endpointSpecList.get(0).getName()); when(repository.findAll()).thenReturn(endpointSpecList); when(repository.findByNames(any())).thenReturn(endpointSpecList); when(repository.saveAll(any())).thenReturn(endpointSpecList); try { List<String> updatedEndpoints = endpointService.enableEndpoints(endpointNames, endpointSpecList.get(0).getTags(), false); Assert.assertEquals(updatedEndpoints, endpointNames, "test enableEndpoint method with both names and tags as filters is failed"); verify(repository, times(0)).findAll(); verify(repository, times(1)).findByNames(any()); } catch (MangleException exception) { Assert.fail("test enableEndpoint method is failed with exception" + exception.getMessage()); } } /** * Test method for {@link EndpointService#enableEndpoints(List, Map, boolean)} * * Description: Test case to validate enable and disable endpoint for fault injection when both * names and tags not sent as filters * */ @Test public void testEnableEndpointsWithoutTagsAndNames() { List<EndpointSpec> endpointSpecList = new ArrayList<>(); endpointSpecList.add(endpointSpec); List<String> endpointNames = new ArrayList<>(); endpointNames.add(endpointSpecList.get(0).getName()); when(repository.findAll()).thenReturn(endpointSpecList); when(repository.findByNames(any())).thenReturn(endpointSpecList); when(repository.saveAll(any())).thenReturn(endpointSpecList); try { List<String> updatedEndpoints = endpointService.enableEndpoints(null, null, false); Assert.assertEquals(updatedEndpoints, endpointNames, "test enableEndpoint method without names and tags as filters is failed"); verify(repository, times(1)).findAll(); verify(repository, times(0)).findByNames(any()); } catch (MangleException exception) { Assert.fail("test enableEndpoint method is failed with exception" + exception.getMessage()); } try { when(repository.findAll()).thenReturn(new ArrayList<>()); endpointService.enableEndpoints(null, null, false); Assert.fail("test enableEndpoint failed to throw exception whe no endpoints found"); } catch (MangleException exception) { Assert.assertEquals(exception.getErrorCode(), ErrorCode.NO_ENDPOINTS_FOUND, "Failed to throw expection eexception wtih error code:" + ErrorCode.NO_ENDPOINTS_FOUND); } } /** * Test method for {@link EndpointService#preProcessDatabaseEndpointSpec(EndpointSpec)}. * * @throws MangleException */ @Test public void testPreProcessDatabaseEndpointSpec() throws MangleException { EndpointSpec endpointSpec = mockData.getDatabaseEndpointSpec(); when(repository.findByName(anyString())).thenReturn(Optional.of(endpointSpec)); assertEquals(endpointService.preProcessDatabaseEndpointSpec(endpointSpec), endpointSpec); verify(repository, times(1)).findByName(anyString()); } /** * Test method for * {@link EndpointService#getAllResourcesByEndpointName(java.lang.String,com.vmware.mangle.services.enums.K8SResource)}. * * Description: Positive test case which will take the K8sEndpointName and resource type and * gives the list of resource present in K8s Cluster. * */ @Test public void testGetAllResourcesByEndpointName() throws MangleException { Optional<EndpointSpec> optional = Optional.of(k8sEndpointSpec); K8SResource resourceType = K8SResource.DEPLOYMENT; CredentialsSpec credentialsSpec = credentialsSpecMockData.getk8SCredentialsData(); List<String> allResources = new ArrayList<>(); allResources.add("mangle"); when(repository.findByName(anyString())).thenReturn(optional); when(endpointClientFactory.getEndPointClient(any(), any())).thenReturn(kubernetesCommandLineClient); when(kubernetesCommandLineClient.getResourcesByType(any())).thenReturn(allResources); when(credentialService.getCredentialByName(anyString())).thenReturn(credentialsSpec); List<String> actualResult = endpointService.getAllResourcesByEndpointName(k8sEndpointSpec.getName(), resourceType); verify(repository, times(1)).findByName(anyString()); Assert.assertEquals(actualResult.size(), 1, "Test failed because the actual number of resources differs from the expected number of resource"); Assert.assertEquals(actualResult, allResources, "Test failed because the elements in the actual Collection differ from the elements in the expected Collection"); Assert.assertEquals(actualResult.get(0), "mangle", "Test failed because the first element in the actual list differs from expected result 'mangle'"); } /** * Test method for * {@link EndpointService#getAllResourcesByEndpointName(java.lang.String,com.vmware.mangle.services.enums.K8SResource)}. * * Description: Test case to validate if the EndpointSpec returned is not the K8sEndpoint. * */ @Test public void testGetAllResourcesByEndpointName_InvalidK8sEP() throws MangleException { Optional<EndpointSpec> optional = Optional.of(endpointSpec); K8SResource resourceType = K8SResource.DEPLOYMENT; CredentialsSpec credentialsSpec = credentialsSpecMockData.getk8SCredentialsData(); when(repository.findByName(anyString())).thenReturn(optional); when(endpointClientFactory.getEndPointClient(any(), any())).thenReturn(kubernetesCommandLineClient); when(credentialService.getCredentialByName(anyString())).thenReturn(credentialsSpec); when(repository.findByName(anyString())).thenReturn(optional); try { endpointService.getAllResourcesByEndpointName(k8sEndpointSpec.getName(), resourceType); } catch (MangleException exception) { verify(repository, times(1)).findByName(anyString()); Assert.assertEquals("Invalid K8sEndpoint", exception.getMessage(), "Test failed because getAllResourcesByEndpointName succeeded with invalid K8s endpoint"); } } /** * Test method for * {@link EndpointService#getAllResourcesByEndpointName(java.lang.String,com.vmware.mangle.services.enums.K8SResource)}. * * Description: Test case to validate the result when the EndpointName is Empty/Null. * */ @Test public void testGetAllResourcesByEndpointName_EPNameEmpty() throws MangleException { Optional<EndpointSpec> optional = Optional.of(endpointSpec); K8SResource resourceType = K8SResource.DEPLOYMENT; when(repository.findByName(anyString())).thenReturn(optional); List<String> actualResult = endpointService.getAllResourcesByEndpointName("", resourceType); verify(repository, times(0)).findByName(anyString()); Assert.assertEquals(actualResult, Collections.emptyList(), "Test failed because the actual result is not an empty list"); } /** * Test method for * {@link EndpointService#getAllResourcesByEndpointName(java.lang.String,com.vmware.mangle.services.enums.K8SResource)}. * * Description: Test case to validate the result when the EndpointSpec returned from Repository * is Null/Empty. * */ @Test public void testGetAllResourcesByEndpointName_NoRecordFound() throws MangleException { Optional<EndpointSpec> optional = Optional.empty(); K8SResource resourceType = K8SResource.DEPLOYMENT; when(repository.findByName(anyString())).thenReturn(optional); try { endpointService.getAllResourcesByEndpointName(k8sEndpointSpec.getName(), resourceType); } catch (MangleException exception) { verify(repository, times(1)).findByName(anyString()); Assert.assertEquals("Found No Search Results", exception.getMessage(), "Test failed because getAllResourcesByEndpointName succeeded when no record is found"); } } }
def configure(config_filepath=None): config = load_configuration(config_filepath) write_config = False if config["julia_exec"] is None: config["julia_exec"] = get_julia_exec() write_config = True else: print(f"configured julia executable: {config['julia_exec']}") while True: to_replace = input("Replace this executable with another? (y/n) ") if to_replace.lower()[0] in {"y", "n"}: to_replace = to_replace == "y" break print("Invalid entry detected.") if to_replace: config["julia_exec"] = get_julia_exec() write_config = True if config["genx_path"] is None: config["genx_path"] = get_genx_path() write_config = True else: print(f"configured GenX repo path: {config['genx_path']}") while True: to_replace = input("Replace this path with another? (y/n) ") if to_replace.lower()[0] in {"y", "n"}: to_replace = to_replace == "y" break print("Invalid entry detected.") if to_replace: config["genx_path"] = get_genx_path() write_config = True api = LibJulia.load(julia=config["julia_exec"]) api.init_julia([f"--project={config['genx_path']}"]) julia.install(julia=config["julia_exec"]) from julia import Pkg Pkg.activate(config["genx_path"]) Pkg.instantiate() if write_config: write_configuration(config) print("configuration successful!")
Breaking News Emails Get breaking news alerts and special reports. The news and stories that matter, delivered weekday mornings. Dec. 1, 2016, 1:33 AM GMT / Updated Dec. 1, 2016, 5:33 AM GMT By Maggie Fox More than 2,300 American scientists, including 22 Nobel Prize winners, issued an open letter to President-elect Donald Trump and Congress Wednesday, urging them to respect science. They are especially worried about government interference in their work and about policies that could reject scientific fact — notably climate science. The Orbital ATK Antares rocket, with the Cygnus spacecraft onboard stands on launch Pad-0A during sunrise at NASA's Wallops Flight Facility Bill Ingalls/NASA / NASA via Getty Images "From disease outbreaks to climate change to national security to technology innovation, people benefit when our nation’s policies are informed by science unfettered by inappropriate political or corporate influence," the researchers, who come from dozens of fields in all 50 states, wrote. Related: Science Leader Calls on Trump to Respect Facts "Congress and the Trump administration should ensure our nation’s bedrock public health and environmental laws — such as the Clean Air Act and the Endangered Species Act — retain a strong scientific foundation, and that agencies are able to freely collect and draw upon scientific data to effectively carry out statutory responsibilities established by these laws. They should also safeguard the independence of those outside the government who provide scientific advice." They’ve got plenty to worry about. Trump has raised questions about the legitimacy of climate change, made questionable comments about disproven links between vaccines and autism, said fracking causes "zero" risks, and has advocated for keeping everyone from Ebola-affected countries out of the U.S. Related: Here Are Trump's Cabinet Picks so Far "Global warming is based on faulty science and manipulated data," Trump tweeted Nov. 2. Many Republicans in Congress also deny that climate change is real or caused by people. He's suggested dismantling the Environmental Protection Agency and promised to cancel the Paris climate agreement. Senior Trump campaign adviser Bob Walker told the Guardian newspaper he thought the space agency NASA, which leads climate research that has shown melting ice caps and changes in atmospheric gases, should stop "politically correct environmental monitoring" and focus on space exploration instead. "Respect for science in policymaking should be a prerequisite for any cabinet position," said physicist Lewis Branscomb of the University of California, San Diego. Congress has blocked federal health agencies from researching, or even paying for research, on gun violence since the 1990s. Members of Congress have sponsored hearings aimed at raising doubts about the safety of vaccines and passed legislation to protect the sale of unproven nutritional supplements. Advocacy groups such as Public Citizen worry that Trump would pressure the Food and Drug Administration to approve drugs and devices without adequate safety reviews. The president-elect has pledged to “reform the Food and Drug Administration, to put greater focus on the need of patients for new and innovative medical products.” "The new administration must ensure that federal agencies can continue to use science to serve the public interest." "Experts at federal agencies prevent the spread of diseases, ensure the safety of our food and water, protect consumers from harmful medical devices, and so much more. The new administration must ensure that federal agencies can continue to use science to serve the public interest," said Nobel Laureate Carol Greider, director of molecular biology and genetics at Johns Hopkins University, who signed the letters. Other signatories include Dr. Georges Benjamin of the American Public Health Association, cosmologist Lawrence Krauss of Arizona State University and physicist Neal Lane of Rice University, a former science adviser to President Bill Clinton. The letter sponsored by the Union of Concerned Scientists, isn’t the first from the group. It also sponsored a letter to the administration of President George W. Bush, which irked scientists with its stance restricting research on embryonic stem cells and climate science. Reversing the Bush administration's strict limits of stem cell science was one of President Barack Obama's first acts in office.
<filename>libs/platform/src/lib/value-help-dialog/components/index.ts export * from './base-tab/vhd-base-tab.component'; export * from './define-tab/define-tab.component'; export * from './select-tab/select-tab.component'; export * from './value-help-dialog-filter/value-help-dialog-filter.component'; export * from './value-help-dialog-search/value-help-dialog-search.component';
<reponame>damian123/crypto #include <iostream> #include <gtest/gtest.h> #include <securities.h> TEST(securities_test_case, FindSpot) { Securities s; s.Refresh("ftx"); EXPECT_EQ(true, s.FindSpot("BTC/USD", "ftx")); } TEST(securities_test_case, FindFuture) { Securities s; s.Refresh("ftx"); EXPECT_EQ(true, s.FindFuture("BTC-PERP", "ftx")); } TEST(securities_test_case, SpotSecurity) { Securities s; s.Refresh("ftx"); auto sec = s.SecSpot("BTC/USD", "ftx"); EXPECT_STREQ("spot", sec.product.c_str()); EXPECT_STREQ("ftx", sec.source.c_str()); EXPECT_STREQ("BTC/USD", sec.symbol.c_str()); EXPECT_STREQ("USD", sec.quoteCurrency.c_str()); EXPECT_STREQ("BTC", sec.baseCurrency.c_str()); // Validate Exchange Rules EXPECT_EQ(true, sec.enabled); EXPECT_EQ(0.0001, sec.minProvideSize); EXPECT_EQ(1.0, sec.priceIncrement); EXPECT_EQ(0.0001, sec.sizeIncrement); EXPECT_EQ(false, sec.postOnly); EXPECT_EQ(false, sec.restricted); } TEST(securities_test_case, FutureSecurity) { Securities s; s.Refresh("ftx"); auto sec = s.SecFuture("BTC-PERP", "ftx"); EXPECT_STREQ("future", sec.product.c_str()); EXPECT_STREQ("ftx", sec.source.c_str()); EXPECT_STREQ("BTC-PERP", sec.symbol.c_str()); // Validate Exchange Rules EXPECT_EQ(true, sec.enabled); EXPECT_EQ(0.001, sec.minProvideSize); EXPECT_EQ(1.0, sec.priceIncrement); EXPECT_EQ(0.0001, sec.sizeIncrement); EXPECT_EQ(false, sec.postOnly); EXPECT_EQ(false, sec.restricted); }
/** * Test class for CreateMoodActivity. All the UI tests are written here. * Robotium test framework is used * Test class for CreateMoodActivity. All the UI tests are written here. Robotium test framework is used * todo: * * write tests for location picker */ public class CreateMoodActivityTest { private Solo solo; @Rule public ActivityTestRule<LoginActivity> rule = new ActivityTestRule<>(LoginActivity.class, true, true); @Before public void setUp() { solo = new Solo(InstrumentationRegistry.getInstrumentation(), rule.getActivity()); TestHelper.setup(solo); } /** * Clicks on the Fab button for adding moods to go to createMood Activity */ @Test public void CreateActivityTest() { // wait for activity to change solo.sleep(5000); solo.assertCurrentActivity("Wrong Activity", MainActivity.class); // ensure current fragment is for Mood History assertTrue(solo.searchText("Mood History")); solo.clickOnView(solo.getView(R.id.mood_history_add_button)); solo.sleep(5000); // wait for activity to change assertTrue(solo.waitForActivity(CreateMoodActivity.class)); } /** * Tests the adding of location * NOTE: Set location to 37.4220, -122.0840 in the android emulator */ @Test public void addLocationTest () { // wait for activity to change solo.sleep(5000); solo.assertCurrentActivity("Wrong Activity", MainActivity.class); // ensure current fragment is for Mood History Assert.assertTrue(solo.searchText("Mood History")); // create mood solo.clickOnView(solo.getView(R.id.mood_history_add_button)); // add location by clicking on button solo.clickOnView(solo.getView(R.id.create_location_button)); // wait for activity to launch solo.sleep(3000); // check if we are on LocationPickerActivity solo.assertCurrentActivity("Expected Maps Activity to launch", LocationPickerActivity.class); // add location solo.clickOnView(solo.getView(R.id.confirmButton)); solo.sleep(2000); // check if we got back to CreateMoodActivity solo.assertCurrentActivity("Expected create mood activity to launch", CreateMoodActivity.class); // check the coordinates passed back and set into the pick location button text Button pickLocationButton = (Button) solo.getView(R.id.create_location_button); String actual = pickLocationButton.getText().toString(); String expected = "1600 Amphitheatre Pkwy, Mountain View, CA 94043, USA"; assertEquals("Expected coords returned from LocationPickerActivity to match users location. Make sure you set the location in the emulator to 37.4220, -122.0840", expected, actual); } }
/* * Copyright (c) 2010 Cisco Systems, Inc. All rights reserved. * $COPYRIGHT$ * * Additional copyrights may follow * * $HEADER$ */ #include "orte_config.h" #include "orte/constants.h" #include "opal/mca/mca.h" #include "opal/util/output.h" #include "opal/mca/base/base.h" #include "opal/mca/base/mca_base_param.h" #ifdef HAVE_STRING_H #include <string.h> #endif #include "orte/mca/debugger/base/base.h" /* * The following file was created by configure. It contains extern * statements and the definition of an array of pointers to each * component's public mca_base_component_t struct. */ #include "orte/mca/debugger/base/static-components.h" /* * Global variables */ orte_debugger_base_t orte_debugger_base; opal_list_t orte_debugger_base_components_available; orte_debugger_base_module_t orte_debugger; /* instance the standard MPIR interfaces */ struct MPIR_PROCDESC *MPIR_proctable = NULL; int MPIR_proctable_size = 0; volatile int MPIR_being_debugged = 0; volatile int MPIR_debug_state = 0; volatile int MPIR_i_am_starter = 0; volatile int MPIR_partial_attach_ok = 1; volatile char MPIR_executable_path[MPIR_MAX_PATH_LENGTH]; volatile char MPIR_server_arguments[MPIR_MAX_ARG_LENGTH]; volatile int MPIR_forward_output = 0; volatile int MPIR_forward_comm = 0; char MPIR_attach_fifo[MPIR_MAX_PATH_LENGTH]; int MPIR_force_to_main = 0; #if ORTE_DISABLE_FULL_SUPPORT int orte_debugger_base_open(void) { return ORTE_SUCCESS; } #else /** * Function for finding and opening either all MCA components, or the one * that was specifically requested via a MCA parameter. */ int orte_debugger_base_open(void) { int value; /* Debugging / verbose output. Always have stream open, with verbose set by the mca open system... */ orte_debugger_base.output = opal_output_open(NULL); mca_base_param_reg_int_name("orte", "output_debugger_proctable", "Whether or not to output the debugger proctable after launch (default: false)", true, false, 0, &value); orte_debugger_base.dump_proctable = OPAL_INT_TO_BOOL(value); mca_base_param_reg_string_name("orte", "debugger_test_daemon", "Name of the executable to be used to simulate a debugger colaunch (relative or absolute path)", false, false, NULL, &orte_debugger_base.test_daemon); mca_base_param_reg_int_name("orte", "debugger_test_attach", "Test debugger colaunch after debugger attachment", false, false, 0, &value); orte_debugger_base.test_attach = OPAL_INT_TO_BOOL(value); /* Open up all available components */ if (ORTE_SUCCESS != mca_base_components_open("debugger", orte_debugger_base.output, mca_debugger_base_static_components, &orte_debugger_base_components_available, true)) { return ORTE_ERROR; } /* All done */ return ORTE_SUCCESS; } #endif
<reponame>technologiestiftung/kulturdaten-frontend<filename>components/navigation/header/HeaderMenuLink.tsx<gh_stars>1-10 import React, { useContext, useMemo } from 'react'; import Link from 'next/link'; import styled from '@emotion/styled'; import * as feather from 'react-feather'; import { useIsRouteStringActive } from '../../../lib/routing'; import { css } from '@emotion/react'; import { NavigationContext } from '../NavigationContext'; const StyledA = styled.a<{ active?: boolean; disabled?: boolean }>` color: inherit; text-decoration: none; display: flex; align-items: center; justify-content: space-between; width: 100%; padding: calc(0.375rem - 1px) calc(0.75rem - 1px); border: 1px solid var(--grey-400); background: var(--white); border-radius: 0.75rem; font-size: var(--font-size-300); line-height: var(--line-height-300); font-weight: 700; transition: background var(--transition-duration-fast); &:hover { background: var(--grey-400); } ${({ disabled }) => disabled && css` cursor: not-allowed; opacity: 0.3; &:hover { background: var(--white); } `} ${({ active }) => active ? css` background: var(--black); border-color: var(--black); color: var(--white); &:hover { background: var(--black); } ` : ''} `; const StyledHeaderMenuLinkIcon = styled.div` display: flex; align-items: center; svg { width: 1.125rem; height: 1.125rem; } `; export enum MenuLinkType { internal = 'internal', external = 'external', } interface InternalMenuLinkProps { title: string; href: string; active?: boolean; icon?: string; onClick?: () => void; disabled?: boolean; __TYPE?: 'HeaderMenuLink'; } export interface HeaderMenuLinkProps extends InternalMenuLinkProps { type?: MenuLinkType; } const InternalMenuLink: React.FC<InternalMenuLinkProps> = ({ title, href, active, icon, onClick, disabled = false, }: InternalMenuLinkProps) => { const isRouteActive = useIsRouteStringActive(href); const linkIsActive = active !== undefined ? active : isRouteActive; const isDisabled = useMemo(() => disabled && !linkIsActive, [disabled, linkIsActive]); const { setMenuExpanded } = useContext(NavigationContext); const renderedStyledLink = ( <StyledA title={title} active={linkIsActive} onClick={() => { if (onClick) { onClick(); } setMenuExpanded(false); }} as={isDisabled ? 'div' : undefined} disabled={isDisabled} > <span>{title}</span> {icon && ( <StyledHeaderMenuLinkIcon>{React.createElement(feather[icon])}</StyledHeaderMenuLinkIcon> )} </StyledA> ); return !isDisabled ? ( <Link href={href} passHref> {renderedStyledLink} </Link> ) : ( renderedStyledLink ); }; export const HeaderMenuLink: React.FC<HeaderMenuLinkProps> = (props: HeaderMenuLinkProps) => { const { type = MenuLinkType.internal } = props; switch (type) { case MenuLinkType.internal: { return <InternalMenuLink {...props} />; } case MenuLinkType.external: { const { title, href } = props; return ( <StyledA href={href} rel="noopener noreferrer" target="_blank"> {title} </StyledA> ); } default: { throw new Error(`MenuLink type "${type}" is not valid`); } } }; HeaderMenuLink.defaultProps = { __TYPE: 'HeaderMenuLink', };
package ru.stqa.pft.sandbox; public class PointMain { public static void main(String[] args) { double d1 = Point.distance(new Point(0, 0), new Point(0, 1)); double d2 = Point.distance(new Point(1, 1), new Point(-1, -1)); double d3 = new Point(0,3).distance(new Point(4,0)); System.out.println(d1); System.out.println(d2); System.out.println(d3); } }
// also remove surrounding quotes, if any : String KillHeadTailWhiteAndQuotes(const char * str) { str = skipwhitespace(str); const char * end = skipwhitebackwards( strend(str) -1,str); if ( end <= str ) return String(); int len = (int) (end - str) + 1; if ( len >= 2 ) { if ( *str == '"' && str[len-1] == '"' ) { str++; len -= 2; } else if ( *str == SingleQuote && str[len-1] == SingleQuote ) { str++; len -= 2; } } return String(String::eSubString,str,len); }
package gobatis import ( "fmt" "testing" ) func TestStaticSqlSource_getBoundSql(t *testing.T) { sss := &staticSqlSource{ sqlStr:"select * from t_gap where id = #{id} and gap = #{gap}", paramMappings:make([]string, 0), } bs := sss.getBoundSql(map[string]interface{}{ "id":1, "gap":10, }) expc := "select * from t_gap where id = ? and gap = ?" assertEqual(bs.sqlStr, expc, "test failed, actual:" + bs.sqlStr) assertEqual(bs.params["id"], 1, "test failed, actual:" + fmt.Sprintf("%d", bs.params["id"])) assertEqual(bs.params["gap"], 10, "test failed, actual:" + fmt.Sprintf("%d", bs.params["gap"])) } func TestDynamicSqlSource_getBoundSql(t *testing.T) { params := map[string]interface{}{ "name": "wenj91", "array": []map[string]interface{}{{"idea": "11"}, {"idea": "22"}, {"idea": "33"}}, "array1": []string{"11", "22", "33"}, "array2": []s{{A: "aa"}, {A: "bb"}, {A: "cc"}}, } msn := &mixedSqlNode{ sqlNodes: []iSqlNode{ &textSqlNode{ content: "select 1 from t_gap where 1 = 1", }, &ifSqlNode{ test: "name == 'wenj91'", sqlNode: &textSqlNode{ content: "and name = #{name}", }, }, &foreachSqlNode{ sqlNode: &mixedSqlNode{ sqlNodes: []iSqlNode{ &textSqlNode{ content: "#{ item.A }", }, }, }, item: "item", open: "and id in (", close: ")", separator: ",", collection: "array2", }, }, } ds := dynamicSqlSource{ sqlNode: msn, } bs := ds.getBoundSql(params) expc := "select 1 from t_gap where 1 = 1 and name = ? and id in ( ? , ? , ? )" assertEqual(bs.sqlStr, expc, "test failed, actual:" + bs.sqlStr) assertEqual(bs.params["name"], "wenj91", "test failed, actual:" + fmt.Sprintf("%d", bs.params["id"])) assertEqual(bs.extParams["_ls_item_p_item0.A"], "aa", "test failed, actual:" + fmt.Sprintf("%s", bs.extParams["_ls_item_p_item0.A"])) assertEqual(bs.extParams["_ls_item_p_item1.A"], "bb", "test failed, actual:" + fmt.Sprintf("%s", bs.extParams["_ls_item_p_item1.A"])) assertEqual(bs.extParams["_ls_item_p_item2.A"], "cc", "test failed, actual:" + fmt.Sprintf("%s", bs.extParams["_ls_item_p_item2.A"])) }
<gh_stars>0 /* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.kie.workbench.common.screens.library.client.screens.project; import javax.enterprise.event.Event; import elemental2.dom.HTMLElement; import org.guvnor.common.services.project.client.context.WorkspaceProjectContext; import org.guvnor.common.services.project.context.WorkspaceProjectContextChangeEvent; import org.guvnor.common.services.project.model.GAV; import org.guvnor.common.services.project.model.Module; import org.guvnor.common.services.project.model.POM; import org.guvnor.common.services.project.model.WorkspaceProject; import org.guvnor.structure.repositories.Branch; import org.jboss.errai.common.client.api.Caller; import org.jboss.errai.ioc.client.api.ManagedInstance; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.kie.workbench.common.screens.defaulteditor.client.editor.NewFileUploader; import org.kie.workbench.common.screens.library.api.LibraryService; import org.kie.workbench.common.screens.library.client.screens.ProjectScreenTestBase; import org.kie.workbench.common.screens.library.client.screens.assets.AssetsScreen; import org.kie.workbench.common.screens.library.client.screens.organizationalunit.contributors.tab.ContributorsListPresenter; import org.kie.workbench.common.screens.library.client.screens.organizationalunit.contributors.tab.ProjectContributorsListServiceImpl; import org.kie.workbench.common.screens.library.client.screens.project.actions.ProjectMainActions; import org.kie.workbench.common.screens.library.client.screens.project.branch.delete.DeleteBranchPopUpScreen; import org.kie.workbench.common.screens.library.client.screens.project.delete.DeleteProjectPopUpScreen; import org.kie.workbench.common.screens.library.client.screens.project.rename.RenameProjectPopUpScreen; import org.kie.workbench.common.screens.library.client.settings.SettingsPresenter; import org.kie.workbench.common.screens.library.client.util.LibraryPermissions; import org.kie.workbench.common.screens.library.client.util.LibraryPlaces; import org.kie.workbench.common.screens.projecteditor.client.validation.ProjectNameValidator; import org.kie.workbench.common.screens.projecteditor.service.ProjectScreenService; import org.kie.workbench.common.widgets.client.handlers.NewResourcePresenter; import org.mockito.Answers; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import org.uberfire.backend.vfs.Path; import org.uberfire.ext.editor.commons.client.file.CommandWithFileNameAndCommitMessage; import org.uberfire.ext.editor.commons.client.file.FileNameAndCommitMessage; import org.uberfire.ext.editor.commons.client.file.popups.CopyPopUpPresenter; import org.uberfire.ext.editor.commons.client.file.popups.CopyPopUpView; import org.uberfire.mocks.CallerMock; import org.uberfire.promise.SyncPromises; import org.uberfire.workbench.events.NotificationEvent; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith(MockitoJUnitRunner.class) public class ProjectScreenTest extends ProjectScreenTestBase { private ProjectScreen presenter; @Mock private ProjectScreen.View view; @Mock private LibraryPlaces libraryPlaces; @Mock private AssetsScreen assetsScreen; @Mock private AssetsScreen.View assetsView; @Mock private ContributorsListPresenter contributorsListScreen; @Mock private ProjectMetricsScreen projectMetrictsScreen; @Mock private LibraryPermissions libraryPermissions; @Mock(answer = Answers.RETURNS_DEEP_STUBS) private NewFileUploader newFileUploader; @Mock private NewResourcePresenter newResourcePresenter; @Mock private ManagedInstance<DeleteProjectPopUpScreen> deleteProjectPopUpScreenInstance; @Mock private DeleteProjectPopUpScreen deleteProjectPopUpScreen; @Mock private ManagedInstance<DeleteBranchPopUpScreen> deleteBranchPopUpScreenInstance; @Mock private DeleteBranchPopUpScreen deleteBranchPopUpScreen; @Mock private ManagedInstance<RenameProjectPopUpScreen> renameProjectPopUpScreenInstance; @Mock private RenameProjectPopUpScreen renameProjectPopUpScreen; @Mock private LibraryService libraryService; @Mock private SettingsPresenter settingsPresenter; @Mock private ProjectScreenService projectScreenService; private Caller<ProjectScreenService> projectScreenServiceCaller; @Mock private CopyPopUpPresenter copyPopUpPresenter; @Mock private ProjectNameValidator projectNameValidator; @Mock private Event<NotificationEvent> notificationEvent; @Mock private ProjectContributorsListServiceImpl projectContributorsListService; @Mock private ProjectMainActions projectMainActions; private SyncPromises promises; @Before public void setUp() { projectScreenServiceCaller = new CallerMock<>(projectScreenService); promises = spy(new SyncPromises()); when(assetsScreen.getView()).thenReturn(assetsView); when(deleteProjectPopUpScreenInstance.get()).thenReturn(deleteProjectPopUpScreen); when(deleteBranchPopUpScreenInstance.get()).thenReturn(deleteBranchPopUpScreen); when(renameProjectPopUpScreenInstance.get()).thenReturn(renameProjectPopUpScreen); final WorkspaceProjectContext projectContext = mock(WorkspaceProjectContext.class); when(libraryPlaces.getWorkbenchContext()).thenReturn(projectContext); final ProjectScreen projectScreen = new ProjectScreen(this.view, this.libraryPlaces, this.assetsScreen, this.contributorsListScreen, this.projectMetrictsScreen, this.libraryPermissions, this.settingsPresenter, this.newFileUploader, this.newResourcePresenter, this.deleteProjectPopUpScreenInstance, this.deleteBranchPopUpScreenInstance, this.renameProjectPopUpScreenInstance, new CallerMock<>(this.libraryService), projectScreenServiceCaller, copyPopUpPresenter, projectNameValidator, promises, notificationEvent, projectContributorsListService, projectMainActions); this.presenter = spy(projectScreen); this.presenter.workspaceProject = spy(createProject()); when(libraryPlaces.getActiveWorkspace()).thenReturn(this.presenter.workspaceProject); } @Test public void testInitialize() { presenter.initialize(); verify(view).init(presenter); verify(view).setTitle("mainModuleName"); verify(projectMainActions).setBuildEnabled(eq(false)); verify(projectMainActions).setDeployEnabled(eq(false)); verify(projectMainActions).setRedeployEnabled(eq(false)); } @Test public void testActionsVisibilityWithPermissionToUpdateProjectOnly() { doReturn(true).when(this.presenter).userCanUpdateProject(); presenter.initialize(); verify(view).setAddAssetVisible(true); verify(view).setImportAssetVisible(true); verify(view).setDuplicateVisible(false); verify(view).setReimportVisible(true); verify(view).setDeleteProjectVisible(false); verify(view).setDeleteBranchVisible(false); verify(view).setActionsVisible(true); verify(projectMainActions).setBuildEnabled(eq(false)); verify(projectMainActions).setDeployEnabled(eq(false)); verify(projectMainActions).setRedeployEnabled(eq(false)); } @Test public void testActionsVisibilityWithPermissionToDeleteProjectOnly() { doReturn(true).when(this.presenter).userCanDeleteProject(); presenter.initialize(); verify(view).setAddAssetVisible(false); verify(view).setImportAssetVisible(false); verify(view).setDuplicateVisible(false); verify(view).setReimportVisible(false); verify(view).setDeleteProjectVisible(true); verify(view).setDeleteBranchVisible(false); verify(view).setActionsVisible(true); verify(projectMainActions).setBuildEnabled(eq(false)); verify(projectMainActions).setDeployEnabled(eq(false)); verify(projectMainActions).setRedeployEnabled(eq(false)); } @Test public void testActionsVisibilityWithPermissionToBuildProjectOnly() { doReturn(true).when(this.presenter).userCanBuildProject(); presenter.initialize(); verify(view).setAddAssetVisible(false); verify(view).setImportAssetVisible(false); verify(view).setDuplicateVisible(false); verify(view).setReimportVisible(false); verify(view).setDeleteProjectVisible(false); verify(view).setDeleteBranchVisible(false); verify(view).setActionsVisible(true); verify(projectMainActions).setBuildEnabled(eq(true)); verify(projectMainActions).setDeployEnabled(eq(false)); verify(projectMainActions).setRedeployEnabled(eq(false)); } @Test public void testActionsVisibilityWithPermissionToDeployProjectOnly() { doReturn(true).when(this.presenter).userCanDeployProject(); presenter.initialize(); verify(view).setAddAssetVisible(false); verify(view).setImportAssetVisible(false); verify(view).setDuplicateVisible(false); verify(view).setReimportVisible(false); verify(view).setDeleteProjectVisible(false); verify(view).setDeleteBranchVisible(false); verify(view).setActionsVisible(true); verify(projectMainActions).setBuildEnabled(eq(false)); verify(projectMainActions).setDeployEnabled(eq(true)); verify(projectMainActions).setRedeployEnabled(eq(false)); } @Test public void testActionsVisibilityWithPermissionToCreateProjectsOnly() { doReturn(true).when(this.presenter).userCanCreateProjects(); presenter.initialize(); verify(view).setAddAssetVisible(false); verify(view).setImportAssetVisible(false); verify(view).setDuplicateVisible(true); verify(view).setReimportVisible(false); verify(view).setDeleteProjectVisible(false); verify(view).setDeleteBranchVisible(false); verify(view).setActionsVisible(true); verify(projectMainActions).setBuildEnabled(eq(false)); verify(projectMainActions).setDeployEnabled(eq(false)); verify(projectMainActions).setRedeployEnabled(eq(false)); } @Test public void testActionsVisibilityWithoutAllPermissions() { presenter.initialize(); verify(view).setAddAssetVisible(false); verify(view).setImportAssetVisible(false); verify(view).setDuplicateVisible(false); verify(view).setReimportVisible(false); verify(view).setDeleteProjectVisible(false); verify(view).setDeleteBranchVisible(false); verify(view).setActionsVisible(false); verify(projectMainActions).setBuildEnabled(eq(false)); verify(projectMainActions).setDeployEnabled(eq(false)); verify(projectMainActions).setRedeployEnabled(eq(false)); } @Test public void testActionsVisibilityWithPermissionToDeleteProjectInCreatedBranch() { doReturn(true).when(this.presenter).userCanDeleteProject(); doReturn(new Branch("other-branch", mock(Path.class))).when(presenter.workspaceProject).getBranch(); presenter.initialize(); verify(view).setAddAssetVisible(false); verify(view).setImportAssetVisible(false); verify(view).setDuplicateVisible(false); verify(view).setReimportVisible(false); verify(view).setDeleteProjectVisible(true); verify(view).setDeleteBranchVisible(true); verify(view).setActionsVisible(true); verify(projectMainActions).setBuildEnabled(eq(false)); verify(projectMainActions).setDeployEnabled(eq(false)); verify(projectMainActions).setRedeployEnabled(eq(false)); } @Test public void testAddAsset() { { doReturn(false).when(this.presenter).userCanUpdateProject(); this.presenter.addAsset(); verify(this.libraryPlaces, never()).goToAddAsset(); } { doReturn(true).when(this.presenter).userCanUpdateProject(); this.presenter.addAsset(); verify(this.libraryPlaces, times(1)).goToAddAsset(); } } @Test public void testImportAsset() { { doReturn(false).when(this.presenter).userCanUpdateProject(); this.presenter.importAsset(); verify(this.newFileUploader, never()).getCommand(any()); } { doReturn(true).when(this.presenter).userCanUpdateProject(); this.presenter.importAsset(); verify(this.newFileUploader, times(1)).getCommand(any()); } } @Test public void testShowSettings() { SettingsPresenter.View settingsView = mock(SettingsPresenter.View.class); when(settingsView.getElement()).thenReturn(new HTMLElement()); when(this.settingsPresenter.getView()).thenReturn(settingsView); doReturn(promises.resolve()).when(settingsPresenter).setupUsingCurrentSection(); this.presenter.showSettings(); verify(view).setContent(any()); } @Test public void testRename() { { doReturn(false).when(this.presenter).userCanUpdateProject(); this.presenter.rename(); verify(this.renameProjectPopUpScreen, never()).show(any()); } { doReturn(true).when(this.presenter).userCanUpdateProject(); this.presenter.rename(); verify(this.renameProjectPopUpScreen, times(1)).show(any()); } } @Test public void testDuplicate() { { doReturn(false).when(this.presenter).userCanCreateProjects(); this.presenter.duplicate(); verify(this.copyPopUpPresenter, never()).show(any(), any(), any()); } { doReturn(true).when(this.presenter).userCanCreateProjects(); CommandWithFileNameAndCommitMessage duplicateCommand = mock(CommandWithFileNameAndCommitMessage.class); doReturn(duplicateCommand).when(presenter).getDuplicateCommand(); this.presenter.duplicate(); verify(this.copyPopUpPresenter).show(presenter.workspaceProject.getRootPath(), projectNameValidator, duplicateCommand); } } @Test public void testDuplicateCommand() { doNothing().when(projectScreenService).copy(any(), any()); final CopyPopUpView copyPopUpView = mock(CopyPopUpView.class); doReturn(copyPopUpView).when(copyPopUpPresenter).getView(); this.presenter.getDuplicateCommand().execute(new FileNameAndCommitMessage("newFileName", "commitMessage")); verify(copyPopUpView).hide(); verify(view).showBusyIndicator(anyString()); verify(projectScreenService).copy(presenter.workspaceProject, "newFileName"); verify(view).hideBusyIndicator(); verify(notificationEvent).fire(any()); verify(promises).resolve(); } @Test public void testReimport() { { doReturn(false).when(this.presenter).userCanUpdateProject(); this.presenter.reimport(); verify(this.copyPopUpPresenter, never()).show(any(), any(), any()); } { doNothing().when(projectScreenService).reImport(any()); doReturn(true).when(this.presenter).userCanUpdateProject(); CommandWithFileNameAndCommitMessage duplicateCommand = mock(CommandWithFileNameAndCommitMessage.class); doReturn(duplicateCommand).when(presenter).getDuplicateCommand(); this.presenter.reimport(); verify(view).showBusyIndicator(anyString()); verify(projectScreenService).reImport(presenter.workspaceProject.getMainModule().getPomXMLPath()); verify(view).hideBusyIndicator(); verify(notificationEvent).fire(any()); verify(promises).resolve(); } } @Test public void canBuild() { doReturn(true).when(libraryPermissions).userCanBuildProject(any(WorkspaceProject.class)); assertTrue(presenter.userCanBuildProject()); } @Test public void notAllowedToBuild() { doReturn(false).when(libraryPermissions).userCanBuildProject(any(WorkspaceProject.class)); assertFalse(presenter.userCanBuildProject()); } @Test public void testContextModuleIsUpdated() { final WorkspaceProject workspaceProject = mock(WorkspaceProject.class); doReturn("module name").when(workspaceProject).getName(); final Module module = mock(Module.class); when(module.getPom()).thenReturn(new POM(new GAV(GROUP_ID, ARTIFACT, VERSION))); when(workspaceProject.getMainModule()).thenReturn(module); presenter.changeProjectAndTitleWhenContextChange(new WorkspaceProjectContextChangeEvent(workspaceProject)); verify(view).setTitle("module name"); verify(projectMainActions).setBuildEnabled(eq(false)); verify(projectMainActions).setDeployEnabled(eq(false)); verify(projectMainActions).setRedeployEnabled(eq(false)); } @Test public void testContextModuleIsUpdatedSapshot() { final WorkspaceProject workspaceProject = mock(WorkspaceProject.class); doReturn("module name").when(workspaceProject).getName(); final Module module = mock(Module.class); when(module.getPom()).thenReturn(new POM(new GAV(GROUP_ID, ARTIFACT, VERSION + "-SNAPSHOT"))); when(workspaceProject.getMainModule()).thenReturn(module); presenter.changeProjectAndTitleWhenContextChange(new WorkspaceProjectContextChangeEvent(workspaceProject)); verify(view).setTitle("module name"); verify(projectMainActions).setBuildEnabled(eq(false)); verify(projectMainActions).setDeployEnabled(eq(false)); verify(projectMainActions).setRedeployEnabled(eq(true)); } @Test public void shouldNotChangeProjectAndTitleWhenContextChange() { presenter.changeProjectAndTitleWhenContextChange(new WorkspaceProjectContextChangeEvent() { @Override public WorkspaceProject getWorkspaceProject() { return null; } }); verify(view, never()).setTitle(any()); } }
from google_images_download import google_images_download insectFolder = "~/ai-insects/dataset-collection/insects-dataset/" #creating list of arguments arguments = {"keywords":"mosquitoes","limit":50,"print_urls":True,output_directory:insectFolder} arguments.type = "photo" arguments.print_size = true response = google_images_download.googleimagesdownload() absolute_image_paths = response.download(arguments)
Five “can’t-miss” first round prospects in the 2009 NFL Draft There is no such thing as a “can’t-miss” prospect in the first round of an NFL draft. They just don’t exist. For as much as teams scout and prepare, it’s inevitable that some prospects just aren’t going to pan out, which means they hop the train to Bustville right out of the gates. Before you check to make sure that you’ve taken the correct dosage of your medication today, I realize that the first line of this article directly contradicts the title. That’s because even though there is no such thing as “can’t-miss” prospect in the first round, this is the time of year to have a little fun by making some predictions. Two weeks ago, I complied a ranking of the top 5 potential first round busts in this month’s draft. So I thought it was only appropriate that I construct another top 5 ranking, this time of players I deem to be “can’t-miss” prospects. These are first round prospects that I think are low-risk, high-reward players. Fair warning, don’t flip out when you read names of prospects that are expected to go in the top 10. This isn’t a piece dedicated to first round steals – it’s one that centers around prospects that should have the least amount of risk involved and the highest success rate. Just as I mentioned in my top 5 first round busts piece, I realize that one or two of these prospects might slip to the second round (see Alex Mack). But on average, most pundits would agree that all five of the players are first round talents. 1. Aaron Curry, LB, Wake Forest Some believe Curry is the best prospect in the entire draft and I’m one of them. What’s impressive about the Wake Forest product is that he projects as either a middle or outside linebacker in any defensive scheme. Some linebackers and defensive ends don’t have a true position in the NFL because of lack of size, but Curry is projected to have multiple positions at the next level. He’s also a three-down player who not only can be a force against the run, but he’s so athletic that he can stay on the field in passing situations and cover backs and tight ends in coverage. Curry is the ultimate package and has limited weaknesses, which is why some Lions fans are holding out hope that Detroit will make him the No. 1 pick. 2. Eugene Monroe, OT, Virginia Baylor’s Jason Smith is rated as the best offensive tackle in this month’s draft – and for good reason. He has great size, athleticism and is projected to be the best pass blocker of any offensive lineman in the draft. But while Smith might have a higher ceiling in terms of potential, Monroe is arguably the safest prospect in the entire draft. He doesn’t excel at any one facet of the game, yet is fundamentally sound in both run blocking and pass protection, and is also more athletic than what people give him credit for. Monroe is the prototypical NFL offensive tackle in terms of size (he’s 6’5”, 309 pounds) and has tons of experience (he was a three-year starter and could have played as a true freshman if it wasn’t for D’Brickashaw Ferguson). He also has outstanding footwork, which is often the most important thing when pro scouts are looking at offensive line prospects. Whichever team lands Monroe will probably be able to plug him in at left tackle and forget about having to draft the position again for the next 10 to 12 years. 3. Alex Mack, C, California Some people are going to take one look at Mack’s position and wonder what I smoked right before sitting down to write this piece. A center for a first round pick? Believe it. The last center to be selected in the first round was Nick Mangold by the Jets in 2006. While he struggled a bit during his sophomore season, he’s been a quality player in the middle of the Jets’ offensive line and will likely continue to be for years to come. Centers are often the most overlooked position on the field and yet they’re responsible for getting their linemates set, calling out audibles and making pre-snap adjustments so their quarterbacks don’t get killed. They’re often the most intelligent players on the field and prospects like Mack are changing the way teams view centers in the draft. Mack doesn’t come without his flaws (he can be overly aggressive at times), but he’s extremely intelligent, has great technique and he’s an incredibly hard worker. He also comes loaded with experience, is strong and doesn’t have any durability issues. Mack won’t be selected in the top 20 and could fall into the early second round, but he’s the best center prospect to come out in several years and chances are he’s going to be very successful at the next level. 4. B.J. Raji, DT, Boston College Some will argue that since Raji comes with off field baggage (i.e. he was ejected from a game in 2006 for throwing a punch, he missed the entire 2007 college season because of an academic issue and he also tested positive for marijuana before the 2008 season) that he could be one of the bigger first round risks this year. But back in 1995 there was a defensive tackle out of Miami named Warren Sapp that had some off field issues (including a reported failed drug test leading up the draft) too, yet he turned the doubts and criticisms into motivation and wound up being a seven-time Pro Bowler. Raji has a ton of talent and if he’s motivated, he could become one of the more dominant interior defensive linemen in the NFL some day. He’s big enough and quick enough to play either tackle position in a 4-3, and has enough size to play nose in a 3-4. He has outstanding strength, can occupy multiple defenders and flashes good instincts and quickness at the point of attack. If he winds up in a solid organization (i.e. Green Bay), he could turn out to be one of the best first round picks in this draft. He just needs to stay focused and motivated. 5. Rey Maualuga, LB, USC Maualuga isn’t a three-down player. He won’t be very good in coverage if he’s left in obvious passing situations and he’s not Aaron Curry in that he can play multiple ‘backer positions in either a 4-3 or 3-4. Maualuga is a true thumper in every sense of the word and if he winds up playing inside linebacker in a 3-4 or middle linebacker in a 4-3, he’s going to be a force. He has the size and strength to shed blockers and despite not having ideal speed, he’s a solid pass rusher and decent blitzer. The bottom line is that Maualuga has to fit into the right defensive scheme to be completely successful, but if he does, he could become one of the better young ‘backers in the NFL in a short amount of time. Be sure to return to The Scores Report the week of the draft as John Paulsen and I break down what positions are the safest bets in the first round.
/** * Created by Alessandro Di Diego */ public class OpenPcapFragment extends Fragment { private AppCompatButton openPcapButton; private FilePickerDialog dialog; @Override public void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); setHasOptionsMenu(true); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup parentViewGroup, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.fragment_openpcap, parentViewGroup, false); openPcapButton = (AppCompatButton) rootView.findViewById(R.id.open_pcap_button); DialogProperties properties=new DialogProperties(); properties.selection_mode=DialogConfigs.SINGLE_MODE; properties.selection_type=DialogConfigs.FILE_SELECT; properties.root=new File(Environment.getExternalStorageDirectory() + "/pcaps"); properties.error_dir=new File(DialogConfigs.DEFAULT_DIR); properties.extensions=new String[]{"pcap","cap"}; dialog = new FilePickerDialog(getContext(),properties); dialog.setDialogSelectionListener(new DialogSelectionListener() { @Override public void onSelectedFilePaths(String[] files) { //files is the array of the paths of files selected by the Application User. if(files!=null && files.length>0) { Intent intent = new Intent(getContext(), ParsePcapService.class); intent.putExtra(ParsePcapService.PCAP_PATH, files[0]); getContext().startService(intent); intent = new Intent(getContext(), CaptureActivity.class); startActivity(intent); } } }); openPcapButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { if(dialog!=null) dialog.show(); } }); return rootView; } @Override public void onActivityCreated(@Nullable Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); } @Override public void onPause() { super.onPause(); if(dialog!=null) dialog.dismiss(); } }
Molecular Evolution of Puumala Hantavirus ABSTRACT Puumala virus (PUUV) is a negative-stranded RNA virus in the genusHantavirus, family Bunyaviridae. In this study, detailed phylogenetic analysis was performed on 42 complete S segment sequences of PUUV originated from several European countries, Russia, and Japan, the largest set available thus far for hantaviruses. The results show that PUUV sequences form seven distinct and well-supported genetic lineages; within these lineages, geographical clustering of genetic variants is observed. The overall phylogeny of PUUV is star-like, suggesting an early split of genetic lineages. The individual PUUV lineages appear to be independent, with the only exception to this being the Finnish and the Russian lineages that are closely connected to each other. Two strains of PUUV-like virus from Japan form the most ancestral lineage diverging from PUUV. Recombination points within the S segment were searched for and evidence for intralineage recombination events was seen in the Finnish, Russian, Danish, and Belgian lineages of PUUV. Molecular clock analysis showed that PUUV is a stable virus, evolving slowly at a rate of 0.7 × 10−7 to 2.2 × 10−6 nt substitutions per site per year.
def check_file_compliance(file_list, hlsp_name, known_missions, known_filters, exclude_missions, exclude_filters): n_fields_expected = 8 for ifile in file_list: ifile_base = os.path.basename(ifile) if not ifile_base.islower(): logging.warning("File is not all lowercase: " + ifile) splits = ifile_base.split('_') if len(splits) != n_fields_expected: logging.warning("File does not have " + str(n_fields_expected) + " parts: " + ifile) else: if splits[0] != "hlsp": logging.warning('Field 1 is not "hlsp": ' + ifile) if splits[1] != hlsp_name: logging.warning('Field 2 is not "' + hlsp_name + '": ' + ifile) if not check_in_known_missions(splits[2], known_missions, exclude_missions): logging.warning('Field 3 ("' + splits[2] + '") is not in list ' 'of known missions: ' + ifile) if not check_in_known_filters(splits[5], known_filters, exclude_filters): logging.warning('Field 6 ("' + splits[5] + '") is not in list ' 'of known filters: ' + ifile) if not check_is_version_string(splits[6]): logging.warning('Field 7 does not appear to be a valid version' ' string: ' + ifile) prod_ext_str = splits[7] if splits[7][-3:] == '.gz': prod_ext_str = prod_ext_str.strip('.gz') product_extension_splits = prod_ext_str.split('.') if len(product_extension_splits) < 2: logging.warning('Field 8 does not have <product>.<extension> ' 'format: ' + ifile)
<reponame>lunes-platform/lunes-lite<filename>src/types/index.d.ts declare module "*.png" declare module "*.jpg" declare module "*.jpeg" declare module "*.svg" declare module "*.json" type Token = { assetId?: string balance: number reissuable?: boolean quantity?: number issueTransaction: { type?: number id?: string sender?: string senderPublicKey?: string fee?: number timestamp?: number signature?: string assetId?: string name?: string description?: string quantity?: number decimals: number reissuable?: boolean } } declare module "token" { export interface IToken { token: Token } }
// GetAllMembers retrieves a set of thread // members from the thread with the given id. func GetAllMembers(token auth.Token, threadID snowflake.Snowflake) (members []channel.ThreadMember, err error) { return members, client.GET(client.Request{ Path: fmt.Sprintf("/channels/%s/thread-members", threadID), Token: token, Result: &members, }) }
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *ListenerParameters) DeepCopyInto(out *ListenerParameters) { *out = *in if in.AlpnPolicy != nil { in, out := &in.AlpnPolicy, &out.AlpnPolicy *out = make([]*string, len(*in)) for i := range *in { if (*in)[i] != nil { in, out := &(*in)[i], &(*out)[i] *out = new(string) **out = **in } } } if in.Certificates != nil { in, out := &in.Certificates, &out.Certificates *out = make([]*Certificate, len(*in)) for i := range *in { if (*in)[i] != nil { in, out := &(*in)[i], &(*out)[i] *out = new(Certificate) (*in).DeepCopyInto(*out) } } } if in.Port != nil { in, out := &in.Port, &out.Port *out = new(int64) **out = **in } if in.Protocol != nil { in, out := &in.Protocol, &out.Protocol *out = new(string) **out = **in } if in.SSLPolicy != nil { in, out := &in.SSLPolicy, &out.SSLPolicy *out = new(string) **out = **in } if in.Tags != nil { in, out := &in.Tags, &out.Tags *out = make([]*Tag, len(*in)) for i := range *in { if (*in)[i] != nil { in, out := &(*in)[i], &(*out)[i] *out = new(Tag) (*in).DeepCopyInto(*out) } } } in.CustomListenerParameters.DeepCopyInto(&out.CustomListenerParameters) }
def run_with_batches(self, fetch, dataset, batch_size=None): if batch_size is None or dataset.sample_size <= batch_size: return self.sess.run(fetch, dataset[:]) else: assert dataset.sample_size % batch_size == 0 if type(fetch) is list: out = zeros_from_arrays(fetch) update_func = lambda old, new, coef: add_arrays(old, mul_const_by_arrays(new, coef)) else: shape = fetch._shape_as_list() if shape is None: shape = [None] if None in shape: assert shape[0] is None out = None def update_func(old, new, coef): if old is None: out = new else: out = np.concatenate([old, new]) return out else: out = np.zeros(shape) update_func = lambda old, new, coef: out + new * coef idxs_of_minibatches = get_minibatch_indices(dataset.sample_size, batch_size, append_remainder=False, original_order=True) for idxs in idxs_of_minibatches: coef = batch_size / dataset.sample_size tmp = self.sess.run(fetch, dataset[idxs]) out = update_func(out, tmp, coef) return out
#include <bits/stdc++.h> using namespace std; char a[1005][1005]; int v[1005][1005]; int r,c; void bfs (int i , int j) { memset(v, 1005, 1005); queue<pair<pair<int, int>,int>> q; q.push({{i,j},0}); memset(v, 0, sizeof(v)); while (!q.empty()) { auto z = q.front(); q.pop(); int x = z.first.first; int y = z.first.second; int w = z.second; //cerr<<x<<" "<<y<<" "<<w<<" "<<endl; if (v[x][y] || x>=r || y>=c || x<0 || y<0 || a[x][y] == 'T') { continue; } v[x][y] = w; q.push({{x+1,y},w+1}); q.push({{x,y+1},w+1}); q.push({{x-1,y},w+1}); q.push({{x,y-1},w+1}); } } int main(int argc, const char **argv) { if(argc>=2) { freopen(argv[1], "r", stdin); //freopen(argv[2], "w", stdout); } cin>>r>>c; for(int i=0 ; i< r ; i++) { scanf("%s", a[i]); } int thresh = 0; for(int i=0 ; i< r ; i++) { for(int j=0 ; j< c ; j++) { if (a[i][j] == 'E') { bfs(i, j); } } } for(int i=0 ; i< r ; i++) { for(int j=0 ; j< c ; j++) { if (a[i][j] == 'S') { thresh = v[i][j]; } } } cerr<<"THRESH"<<" "<<thresh<<" "<<endl; long long ans = 0; for(int i=0 ; i< r ; i++) { for(int j=0 ; j< c ; j++) { if (a[i][j] <= '9' && a[i][j] > '0') { int z = v[i][j]; if (z > 0 && z <= thresh) { ans += a[i][j] - '0'; } } } } cout<<ans<<endl; }
/** * Do texgen needed for glRasterPos. * \param ctx rendering context * \param vObj object-space vertex coordinate * \param vEye eye-space vertex coordinate * \param normal vertex normal * \param unit texture unit number * \param texcoord incoming texcoord and resulting texcoord */ static void compute_texgen(GLcontext *ctx, const GLfloat vObj[4], const GLfloat vEye[4], const GLfloat normal[3], GLuint unit, GLfloat texcoord[4]) { const struct gl_texture_unit *texUnit = &ctx->Texture.Unit[unit]; GLfloat u[3], two_nu, rx, ry, rz, m, mInv; COPY_3V(u, vEye); NORMALIZE_3FV(u); two_nu = 2.0F * DOT3(normal, u); rx = u[0] - normal[0] * two_nu; ry = u[1] - normal[1] * two_nu; rz = u[2] - normal[2] * two_nu; m = rx * rx + ry * ry + (rz + 1.0F) * (rz + 1.0F); if (m > 0.0F) mInv = 0.5F * _mesa_inv_sqrtf(m); else mInv = 0.0F; if (texUnit->TexGenEnabled & S_BIT) { switch (texUnit->GenModeS) { case GL_OBJECT_LINEAR: texcoord[0] = DOT4(vObj, texUnit->ObjectPlaneS); break; case GL_EYE_LINEAR: texcoord[0] = DOT4(vEye, texUnit->EyePlaneS); break; case GL_SPHERE_MAP: texcoord[0] = rx * mInv + 0.5F; break; case GL_REFLECTION_MAP: texcoord[0] = rx; break; case GL_NORMAL_MAP: texcoord[0] = normal[0]; break; default: _mesa_problem(ctx, "Bad S texgen in compute_texgen()"); return; } } if (texUnit->TexGenEnabled & T_BIT) { switch (texUnit->GenModeT) { case GL_OBJECT_LINEAR: texcoord[1] = DOT4(vObj, texUnit->ObjectPlaneT); break; case GL_EYE_LINEAR: texcoord[1] = DOT4(vEye, texUnit->EyePlaneT); break; case GL_SPHERE_MAP: texcoord[1] = ry * mInv + 0.5F; break; case GL_REFLECTION_MAP: texcoord[1] = ry; break; case GL_NORMAL_MAP: texcoord[1] = normal[1]; break; default: _mesa_problem(ctx, "Bad T texgen in compute_texgen()"); return; } } if (texUnit->TexGenEnabled & R_BIT) { switch (texUnit->GenModeR) { case GL_OBJECT_LINEAR: texcoord[2] = DOT4(vObj, texUnit->ObjectPlaneR); break; case GL_EYE_LINEAR: texcoord[2] = DOT4(vEye, texUnit->EyePlaneR); break; case GL_REFLECTION_MAP: texcoord[2] = rz; break; case GL_NORMAL_MAP: texcoord[2] = normal[2]; break; default: _mesa_problem(ctx, "Bad R texgen in compute_texgen()"); return; } } if (texUnit->TexGenEnabled & Q_BIT) { switch (texUnit->GenModeQ) { case GL_OBJECT_LINEAR: texcoord[3] = DOT4(vObj, texUnit->ObjectPlaneQ); break; case GL_EYE_LINEAR: texcoord[3] = DOT4(vEye, texUnit->EyePlaneQ); break; default: _mesa_problem(ctx, "Bad Q texgen in compute_texgen()"); return; } } }
/** * A connection to a remote client is handled in its own thread. The method run() implements the * Runnable interface. It will be started in a seperate thread.<br/> * In run() we handle the actual conversation with a remote client be sending and receiving messages * and also taking the appropriate actions. * * @see Thread#run() */ public void run() { log("Connection started!"); setEndConnection(false); while (!getEndConnectionFlag() && !getSocketErrorFlag() && !isInterrupted()) { setEndConversation(false); send(RemoteProtocol.getGreetingCmd()+" this is ReversiByFrankKopp v"+ Reversi.VERSION); handleGameRequest(); waitASec(); } _server.connectionClosed(); nullConnectionThread(); log("Connection ended!"); }
<reponame>KwanJunWen/jsx-readme /* @jsx MD */ import MD from "jsx-md"; import { Badge } from "../Badge"; import { BadgeComponent } from "./utils/BadgeComponent"; /** Display a badge explaining that this repo uses https://github.com/semantic-release/semantic-release */ export const SemanticReleaseBadge: BadgeComponent = ({ pkg }) => { if (pkg.devDependencies?.["semantic-release"] === undefined) { return null; } return ( <Badge link="https://github.com/semantic-release/semantic-release#badge" imageSource="https://img.shields.io/badge/%20%20%F0%9F%93%A6%F0%9F%9A%80-semantic--release-e10079.svg" > semantic release </Badge> ); };
package live.stubs; import live.data.AccountRepository; import live.domain.Account; public class StubAccountRepository implements AccountRepository { @Override public Account getAccount(String username) { return "Mike".equals(username)?new Account(username,"secret"):null; } }
SHARE By of the The Archdiocese of Milwaukee and its bankruptcy creditors have failed to reach a settlement after two days of negotiations, but the parties have agreed to continue settlement talks in two weeks. The parties are scheduled to return to the negotiating table for two more days of talks Sept. 22 and 23, said Michael Finnegan, whose St. Paul, Minn., law firm represents most of the 575 men and women who have filed sex abuse claims in the bankruptcy. He declined to comment on the round of talks that concluded Tuesday in Minnesota, or to speculate about prospects for a settlement. Jerry Topczewski, chief of staff for Archbishop Jerome Listecki, who was in Minnesota for the meetings, could not be reached for comment. The archdiocese sought the mediation in hopes of emerging out of its nearly 4-year-old bankruptcy. Legal fees in the case have topped $13 million. The mediation included lawyers for the archdiocese; its creditors committee, which is composed of sex abuse victims; and the $60 million trust created by the archdiocese for maintenance of its cemeteries. U.S. Bankruptcy Judge Susan V. Kelley also urged the archdiocese's insurers to take part, but it was not clear whether they did. The mediation appears to be a last-ditch effort to come up with a negotiated settlement before the U.S. 7th Circuit Court of Appeals issues its ruling on a key question related to the cemetery trust. At issue is whether forcing the archdiocese to tap even $1 of the cemetery trust to fund the bankruptcy estate — and ultimately pay sex abuse settlements — would violate its free exercise of religion. The archdiocese says it would. The creditors committee — which is composed of abuse victims but represents all creditors — rejects that argument. A decision favoring the creditors would likely spawn a new round of costly court battles in the case.
/** * Executes the given Index Operation once, without performing retries. In case of failure with BadOffsetException * (which indicates a conditional update failure), the BTreeIndex is reinitialized to the most up-to-date state. * * @param indexOperation A Function, that, when invoked, returns a CompletableFuture which indicates when the index * operation completes. * @param timer Timer for the operation. * @return A CompletableFuture that will indicate when the operation completes. */ private CompletableFuture<Long> executeConditionallyOnce(Function<Duration, CompletableFuture<Long>> indexOperation, TimeoutTimer timer) { return Futures.exceptionallyCompose( indexOperation.apply(timer.getRemaining()), ex -> { if (Exceptions.unwrap(ex) instanceof BadOffsetException) { BadOffsetException boe = (BadOffsetException) Exceptions.unwrap(ex); if (boe.getExpectedOffset() != this.index.getIndexLength()) { log.warn("{}: Conditional Index Update failed (expected {}, given {}). Reinitializing index.", this.traceObjectId, boe.getExpectedOffset(), boe.getGivenOffset()); return this.index.initialize(timer.getRemaining()) .thenCompose(v -> Futures.failedFuture(ex)); } } return Futures.failedFuture(ex); }); }
Whole-exome sequencing identifies variants in invasive pituitary adenomas Pituitary adenomas exhibit a wide range of behaviors. The prediction of invasion or malignant behavior in pituitary adenomas remains challenging. The objective of the present study was to identify the genetic abnormalities associated with invasion in sporadic pituitary adenomas. In the present study, the exomes of six invasive pituitary adenomas (IPA) and six non-invasive pituitary adenomas (nIPA) were sequenced by whole-exome sequencing. Variants were confirmed by dideoxynucleotide sequencing, and candidate driver genes were assessed in an additional 28 pituitary adenomas. A total of 15 identified variants were mainly associated with angiogenesis, metabolism, cell cycle phase, cellular component organization, cytoskeleton and biogenesis immune at a cellular level, including 13 variants that occurred as single nucleotide variants and 2 that comprised of insertions. The messenger RNA (mRNA) levels of diffuse panbronchiolitis critical region 1 (DPCR1), KIAA0226, myxovirus (influenza virus) resistance, proline-rich protein BstNI subfamily 3, PR domain containing 2, with ZNF domain, RIZ1 (PRDM2), PR domain containing 8 (PRDM8), SPANX family member N2 (SPANXN2), TRIO and F-actin binding protein and zinc finger protein 717 in IPA specimens were 50% decreased compared with nIPA specimens. In particular, DPCR1, PRDM2, PRDM8 and SPANXN2 mRNA levels in IPA specimens were approximately four-fold lower compared with nIPA specimens (P=0.003, 0.007, 0.009 and 0.004, respectively). By contrast, the mRNA levels of dentin sialophospho protein, EGF like domain, multiple 7 (EGFL7), low density lipoprotein receptor-related protein 1B and dynein, axonemal, assembly factor 1 (LRRC50) were increased in IPA compared with nIPA specimens (P=0.041, 0.037, 0.022 and 0.013, respectively). Furthermore, decreased PRDM2 expression was associated with tumor recurrence. The findings of the present study indicate that DPCR1, EGFL7, the PRDM family and LRRC50 in pituitary adenomas are modifiers of tumorigenesis, and most likely contribute to the development of oncocytic change and to the invasive tumor phenotype. Introduction Pituitary adenomas account for 10-15% of all intracranial neoplasms and are incidentally identified in <27% of non-selected autopsies (1). The clinical presentation of pituitary adenomas depends on the structural and functional characteristics of the tumor (2). The World Health Organization (WHO) categorizes pituitary tumors into typical adenomas, atypical adenomas and pituitary carcinomas; of which, typical adenomas constitute the major class. However, the WHO classification does not offer an accurate association between the histopathological findings and the clinical behavior of the tumor (3). An estimated 35-55% of pituitary adenomas demonstrate invasion into bones, dura or adjacent structures, including the cavernous or sphenoid sinuses (4). Clinically defined invasive pituitary adenomas (IPAs) demonstrate earlier and more frequent recurrences, and may be resistant to conventional treatments, such as surgery and radiotherapy (5). Specific biomarkers that distinguish between aggressive and nonaggressive pituitary adenomas have not yet been identified, although certain studies suggest that the Ki-67 proliferation index may be of diagnostic value (3). The WHO classification of endocrine tumors indicates that invasion of the surrounding structures, size at presentation, an elevated mitotic index, a Ki-67 labeling index of >3% and extensive tumor protein p53 (p53) expression are indicators of aggressive behavior (6,7). However, Ki-67 and p53 labeling index evaluations demonstrate subjective variability, and the cutoff values Whole-exome sequencing identifies variants in invasive pituitary adenomas are controversial (8). Clinically, endocrine tumors present a challenging management problem, with a high frequency of incomplete resections, tendency for recurrence and notable morbidity (9). Previously, several studies attempted to identify novel molecular markers that require additional validation (10)(11)(12)(13). In a previous study, multivariate Cox regression analysis assessed galectin-3 immunohistochemical expression in ≥30% of neoplastic cells; galectin-3 messenger RNA expression was indicated to be a strong predictive factor of recurrence or tumor progression (P<0.001); and a Ki-67 labeling index of >3% (P=0.019) was indicated in the 81 cases with available follow-up data (12). PTTG expression may be associated with tumor invasiveness and microvessel density of pituitary adenomas (13). Apoptosis and mitoses represent two adverse and asynchronous events that maintain the optimal cell numbers; cytogenetic analysis may, therefore, be useful in defining the biological invasion of pituitary tumors (14). In addition, predicting the subsequent risk of disease invasion or drug sensitivity is challenging. However, mutations in classic oncogenes and tumor-suppressor genes are rarely associated with these tumors (3)(4)(5)(6)(8)(9)(10)(11)(12)(13)(14)(15). Nonfunctioning pituitary adenomas (NFPAs) result in few somatic mutations, which is consistent with the associated low proliferation rates and benign nature; however, mechanisms other than somatic mutation are likely to be involved in the etiology of sporadic NFPAs (16). The majority mechanisms of endocrine tumorigenesis differ significantly from those associated with haematological malignancies and non-endocrine tumors (17). In addition, the genetic events underpinning the development of invasion or refractory pituitary adenomas are not yet understood (18). In order to identify the genetic events that may be contributing to the invasion of pituitary adenomas, whole-exome sequencing, which has been successfully used to find variants in multiple tumor types, was applied (16,(19)(20)(21). Through stringent variant calling and filtering parameters, 15 identified variants were mainly associated with cell cycle phase, cellular component organization and biogenesis at cellular level by whole-exome sequencing in combination with homozygosity mapping between IPAs and non-invasive pituitary adenomas (nIPA). The present study supports the role of somatic variants of the PR domain (PRDM) gene family, which is known to control cell proliferation in cancer and in normal development, in IPAs. Materials and methods Patients and specimens. Specimens from six IPAs and six nIPAs were obtained from patients that underwent endoscopic transsphenoidal surgery between December 2009 and January 2010 at Beijing Tiantan Hospital, Beijing, China. Informed consent was obtained from all individuals and ethical approval was obtained form the Institutional Review Board of Beijing Tiantan Hospital Affiliated to Capital Medical University. Pituitary adenomas, obtained from 12 patients (5 men and 7 women; mean age, 40.7 years; range, 16.0-63.0 years) that did not have a family history of endocrine neoplasia, were characterized based on presurgical clinical and biochemical findings, including a pituitary hormone test. This tested for 12 types of pituitary hormone: Growth hormone, adrenocorticotropic hormone, follicle-stimulating hormone, luteinizing hormone, estradiol, progesterone, human growth hormone, cortisol, total triiodothyronine, total thyroxine, thyroid-stimulating hormone and prolactin (PRL) levels (4 patients in normal range, 8 patients with increased PRL levels; normal range, 2.5-17 ng/ml). Pituitary adenomas were also characterized based on morphological and immunohistochemical analysis of removed tissue samples (Table I). Cases with multiple hormonal changes according to the clinical and pathological data were excluded. The following IPA diagnostic criteria were adopted: i) Knosp classification grade III-IV tumors and Hardy classification invasive adenomas; ii) tumor cells confirmed via pathology as invading the sphenoid bone or adjacent dura mater; iii) tumor cells invading the sphenoid sinus cavity or peripheral vascular and nerve; iv) Ki-67 labeling index of >3% (22). The tumors did not have atypical features, and constituted the 'discovery' set of tumors for exome capture and DNA sequence analysis. An additional 28 pituitary adenomas, histologically confirmed, were obtained from 13 women and 15 men (mean age, 61 years; range, 17-71 years), and these constituted the 'validation' set. For histological analysis, the tumor specimens were divided into two sections. One was stored in liquid nitrogen and the other was fixed in 4% paraformaldehyde for 24 h (Sinopharm Chemical Reagent Beijing Co., Ltd., Beijing, China) within 0.5 h of surgery. After washing for 6 h in flowing water, the specimens underwent gradient dehydration in alcohol, were embedded in paraffin wax (Leica Biosystems Richmond Inc., Richmond, IL, USA) and sectioned at a thickness of 5 µm. Sections were incubated with primary mouse anti-human monoclonal PRDM2 antibody (catalog no., ab3791; dilution, 1:200; Abcam, Cambridge, MA, USA) at 4˚C overnight. Next, sections were washed three times with phosphate-buffered saline (PBS; ZSGB-BIO), then incubated with DyLight-conjugated AffiniPure secondary antibody (goat anti-mouse IgG H+L; catalog no., ZF-0313; ZSGB-BIO, Beijing, China) with fluorescence was added at room temperature for 1 h followed by 3 washes with PBS (5 min each). Streptavidin-Biotin Complex (ZSGB-BIO) was added for 20 min and then the sections were washed with PBS. Next, sections were mounted with ProLong Gold Antifade reagent (ZSGB-BIO) with DAPI (Invitrogen; Thermo Fisher Scientific, Inc., Waltham, MA USA). Staining was visualized using a LEICA-TCS-SP5II microscope (Leica, Wetzlar, Germany). The percentage of DAPI-stained cells exhibiting PRDM2 immunoreactivity was analyzed in 5 randomly selected high power fields. Specimen preparation, exome capture, DNA sequencing and bioinformatics analysis. Total DNA was extracted from pituitary adenomas using the QIAamp DNA Mini Kit (Qiagen GmbH, Hilden, Germany). An aliquot containing 5 µg of genomic DNA was purified and quantified from each specimen. Exome enrichment was performed by using an ABI SOLiD optimized SureSelect Human All Exon kit (Agilent Technologies, Inc., Santa Clara, CA, USA), which included the exonic sequences of ~18,000 genes, covering a total of 42 Mb of genomic sequences. The enriched exome libraries were then amplified by emulsion polymerase chain reaction (ePCR; Ion PI™ Hi-Q™ OT2 200 kit; cat no. a26434), according to the manufacturer's instructions (Thermo Fisher Scientific, Inc.), and based on a library concentration of 0.5 pM. The PCR products were then sequenced on a SOLiD5500 sequencer (Thermo Fisher Scientitic, Inc.), and one quadrant of a SOLiD sequencing slide was required for each sample. Color-space reads were mapped to the hg19 reference human genome using SOLiDBioScope software (5500 W Series Genetic Analyzer V2.0; Thermo Fisher Scientitic, Inc.), which is suitable for a repetitive mapping approach. Single-nucleotide polymorphisms (SNPs) were then called using the diBayes algorithm with conservative default call stringency. Known SNPs available from the Single Nucleotide Polymorphism Database Table II. Methylation validation primers. Gene Forward primer Reverse primer (dbSNP) version 130, which is maintained by the National Center for Biotechnology Information, were excluded. Mutation validation. Primer3 software (version 0.4.0; http://frodo.wi.mit.edu/primer3/) was used to generate primers for the PCR amplification of variants identified via exome sequencing or exons covered in additional screening using a SOLiD5500xl sequencer (Thermo Fisher Scientific, Inc.; Table II). The DNA ladder (DL1000; Takara Bio, Inc., Otsu, Japan) and ethidium bromide were purchased from Takara Bio, Inc. Amplification products of an appropriate size were identified using agarose gel electrophoresis (100 V, 30 min). Amplicons from 3 normal pituitary and 28 pituitary tumor DNA molecules coupled with leukocyte were sequenced using forward and reverse primers. Variants were confirmed by at least two independent sequences from various primers. RNA extraction and reverse transcription-quantitative PCR (RT-qPCR). Total RNA was extracted from frozen normal pituitary and pituitary adenomas (~50 mg) using the TRIzol Reagent (Invitrogen; Thermo Fisher Scientific, Inc.). RT-qPCR was performed as described previously (23), using the Applied Biosystems 7500 Fast System (Thermo Fisher Scientific, Inc.) and the primers indicated in Table III. The fold-change in differential expression for each gene was calculated using the comparative Cq method (also known as the 2 -ΔΔCq method), as previously described (24). to identify the independent factors associated with pituitary adenoma recurrence. P<0.05 was used to indicate a statistically significant difference. Results Identification of variant genes by whole-exome sequencing. For the identification of tumor-specific somatic variants, whole-exome capture using DNA from the discovery set of six IPAs and six nIPAs yielded excellent target region coverage, with ~72% of the exome covered to a depth of at least 30-fold between the somatic variant calling algorithm and confirmatory sequencing. Several prioritization steps were taken to decrease the number of genetic variants and to find the potentially pathogenic variants, as follows: i) Variants should have a deleterious effect on protein function (as predicted by protein prediction software, such as PolyPhen-2, MutationTaster and SIFT); ii) variants should be present at sufficient allele frequency to represent likely heterozygous or homozygous changes (i.e., present from early in the tumorigenic process), although deviation from the expected heterozygous or homozygous allele frequencies may represent either contamination with normal tissue or the preference of the sequence and alignment process for the wild-type allele, as previously reported (25,26); and iii) variants should be involved in biological processes relevant to tumorigenesis (27). Approximately 90% of single-nucleotide variants (SNVs) resulted in missense amino-acid changes, whereas the remaining (~10%) were synonymous changes. Over 70% of the SNVs occurred as C:G-T:A transitions, and <30% were transversions. Using stringent variant calling and filtering parameters (16), 233 variants were identified in the specimens. In addition, five variants (C8orf79, chr8:12879694; FSHD region gene 1 family member B, pseudogene, chr20:29632674; mucin 2, oligomeric mucus/gel-forming, chr11:1092715; mucin 6, oligomeric mucus/gel-forming, chr11:1018092; and solute carrier family 5 member 3, chr21:35467473) were present in all specimens, and 47 were detected in either the IPA or nIPA. Of these, 15 were somatic variants confirmed by dideoxynucleotide sequencing. Of the 15 confirmed variants, 13 occurred as SNVs, including three synonymous SNVs, and two comprised insertions (Table IV). The genes with variants were generally associated with angiogenesis, metabolism, cell cycle phase, cellular component organization, cytoskeleton and biological immunity at a cellular level. The genes include: EGF like domain, multiple 7 (EGFL7), associated with angiogenesis; low density lipoprotein receptor-related protein 1B (LRP1B) and maltase-glucoamylase (α-glucosidase) associated with cell metabolism; dentin sialophospho protein (DSPP), PR domain containing 2, with ZNF domain, RIZ1 (PRDM2), PR domain containing 8 (PRDM8) and zinc finger protein 717 (ZNF717) associated with cell proliferation; dynein, axonemal, assembly factor 1 (LRRC50), microtubule associated serine/threonine kinase and TRIO and F-actin binding protein (TRIOBP) associated with cytoskeleton; myxovirus (influenza virus) resistance (MX2) associated with cell cycle phase; diffuse panbronchiolitis critical region 1 (DPCR1), proline-rich protein BstNI subfamily 3 (PRB3) and SPANX family member N2 (SPANXN2) associated with immune response; and KIAA0226 associated with vesicle trafficking. Furthermore, three variants were indicated in PRDM2 in five separate IPA specimens, including two synonymous and one frame shift. One mutation was indicated in PRDM8 (missense, N102D) and PR domain containing 10 (missense, S1018R) in a tumor separately (data not shown), and four variants (R246Q, G272C, S501F and A1247G) were indicated in PR domain containing 16 in three separate IPA specimens (data not shown). mRNA level -----------------------------Parameter High a Low χ 2 P-value Median expression level was used as the cutoff between the high and low mRNA levels. b Recurrence was defined as the discovery of an elevated PRL level at any time in the postoperative surveillance period after an initial remission. Low PRDM2 mRNA levels were defined as values below the 50th percentile of the 12 patients; values at or above 50th percentile were classified as high levels. PRDM2, PR domain containing 2, with ZNF domain, RIZ1; mRNA, messenger RNA. PRDM2 levels are associated with recurrence in pituitary adenomas. The usual morphological signs of tumor aggression are poorly associated with the invasive potential of pituitary tumors, proliferation capacity, tendency of post-surgical recurrence and global biological behavior (28). PRDM2 contains a PR domain that demonstrates histone H3 lysine 9 methylation activity (29). Therefore, whether decreased levels of PRDM family mRNA in pituitary adenomas was associated with certain clinical parameters was assessed (Table V). No significant association was indicated between PRDM2 mRNA levels and age, gender or tumor size. However, decreased PRDM2 protein levels were more frequently observed in recurrent tumors (Fig. 3). Furthermore, binary multivariate regression revealed that decreased levels of PRDM2 were independently associated with tumor recurrence (odds ratio, 0.065; 95% confidence interval, 0.050-0.832; P=0.036). A growing body of evidence suggests a coevolutionary model of cancer, wherein the cross-talk between tumor cells and the host determine the malignant potential of individual tumors (31). Endogenous T cells respond to and infiltrate tumors, significantly delaying malignant progression in mouse models (32). Low expression levels of interleukin-6 and signal transducer and activator of transcription 3 were indicated to be significant in the dysimmunity of pituitary adenoma (33). DPCR1, located between major histocompatibility complex (MHC), class I (HLA)-B and HLA-A on chromosome 6p21.33, is classified as one of the HLA molecules. The DPCR1 gene may contain markers for diagnosis of diffuse pan-bronchiolitis, a bronchiolar disease that affects human airways (34). To fully escape the immune system, cancer cells typically mutate to decrease the expression of antigens, lose expression of HLA proteins or employ an aberrant antigen processing pathway (35). However, to the best of our knowledge, the association between DPCR1 variations and the risk of IPA has not yet been investigated. The mRNA level of DPCR1 is approximately four-fold lower in IPA compared with in nIPA specimens; therefore, the invasion of pituitary adenomas is hypothesized to be associated with the induction of immune-escape via the downregulation of DPCR1. The expansion of solid tumors depends on the continuous growth of novel blood vessels from pre-existing capillaries. The role of angiogenesis and tumor blood vessels in the pathogenesis of pituitary tumors remains a mystery. A previous study indicated the involvement of prolactin during vasculature remodelling by acting on the endothelial and perivascular cells in pituitary adenomas (36). The vascular-specific secreted factor EGFL7 is a component of the interstitial extracellular matrix (ECM) and regulates the proper spatial organization of endothelial cells within each filopodia, affecting the collective movement of the cells (37). A previous study indicated that the expression of EGFL7 in neural stem cells (NSCs) in vitro decreased NOTCH-specific signaling and resulted in the decreased proliferation and self-renewal of NSCs (38). EGFL7 acts as a soluble NOTCH inhibitor, which is in contrast to the typical NOTCH inhibitory molecules that are expressed by adjacent cells as transmembrane proteins (39,40). A previous study indicated that the differentially expressed genes involved in this pathway were delta-like 1 homolog, C-terminal binding protein 2, hes family bHLH transcription factor (HES)1, HES5 and E1A binding protein p300 in plurihormonal pituitary adenomas (41). Another study used RT-qPCR assays and western blot analyses to observe upregulated NOTCH3 and jagged 1 (JAG1) in human NFPAs; furthermore, NOTCH3 was positively associated with JAG1 at the mRNA and protein levels (42). The elevated expression of EGFL7 in IPAs may be associated with invasive behavior via activation of the NOTCH pathway. Increasing evidence has demonstrated the primary roles of tumor suppressors, oncogenes and cell cycle abnormalities in pituitary tumorigenesis. The PRDM group of proteins is an evolutionarily conserved protein family with 17 predicted members in humans; of which, few protein members have been characterized (49). Numerous studies suggest that PRDM family proteins interact with a number of chromatin modifying proteins, and act primarily as negative regulators of transcription (50)(51)(52)(53)(54). PRDM2/RIZ is a binding partner for the retinoblastoma tumor suppressor protein and is the frequent target for inactivation in a variety of human tumors, including breast, liver, and colon cancers (55). Its tumor suppressor function is directly confirmed by the tumorigenic phenotype of mice deficient for RIZ1, the PR-containing isoform of PRDM2. In the present study, PRDM2 and PRDM8 mRNA levels were approximately five-fold lower in IPA specimens compared with nIPA specimens (P=0.007 and 0.009, respectively). In addition, binary multivariate regression revealed that decreased levels of PRDM2 were independently associated with tumor recurrence. Exome sequencing studies allow the comprehensive testing of coding variation in an unbiased manner. The results of the current study demonstrate that whole-exome sequencing will be particularly valuable for the identification of genes under conditions in which mapping has been confounded by locus heterogeneity and uncertainty of the boundaries of diagnostic classification. Whole-exome sequencing may be useful in the future for a wide range of applications to medicine.
/// A light row, a dark row and a lightning bolt below. mod dataframe_map_row { use super::*; ensogl::define_shape_system! { (style:Style) { let weak_color = style.get_color(theme::preparation::weak); let strong_color = style.get_color(theme::preparation::strong); let weak_row = table(3,1).translate(((-6.5).px(),3.0.px())).fill(weak_color); let strong_row = table(3,1).translate(((-6.5).px(),(-1.0).px())).fill(strong_color); let lightning = lightning_bolt().rotate((PI/2.0).radians()); let lightning = lightning.translate_y((-5.25).px()).fill(strong_color); let shape = weak_row + strong_row + lightning; let shape = shape.shrink(SHRINK_AMOUNT.px()); shape.into() } } }
Different shaped stress balls , including a cube, a star, and a sphere Molyneux's problem is a thought experiment in philosophy concerning immediate recovery from blindness. It was first formulated by William Molyneux, and notably referred to in John Locke's An Essay Concerning Human Understanding (1689). The problem can be stated in brief, "if a man born blind can feel the differences between shapes such as spheres and cubes, could he, if given the ability to see, distinguish those objects by sight alone, in reference to the tactile schemata he already possessed?" Original correspondence [ edit ] The question was originally posed to Locke by philosopher William Molyneux, whose wife was blind:[1] Suppose a man born blind, and now adult, and taught by his touch to distinguish between a cube and a sphere of the same metal, and nighly of the same bigness, so as to tell, when he felt one and the other, which is the cube, which is the sphere. Suppose then the cube and the sphere placed on a table, and the blind man made to see: query, Whether by his sight, before he touched them, he could now distinguish and tell which is the globe, which the cube? To which the acute and judicious proposer answers: 'Not. For though he has obtained the experience of how a globe, and how a cube, affects his touch; yet he has not yet attained the experience, that what affects his touch so or so, must affect his sight so or so...' To which Locke responds in An Essay Concerning Human Understanding (emphasis added):[2] I shall here insert a problem of that very ingenious and studious promoter of real knowledge, the learned and worthy Mr. Molyneux, which he was pleased to send me in a letter some months since; and it is this:—"Suppose a man born blind, and now adult, and taught by his touch to distinguish between a cube and a sphere of the same metal, and nighly of the same bigness, so as to tell, when he felt one and the other, which is the cube, which the sphere. Suppose then the cube and sphere placed on a table, and the blind man be made to see: quaere, whether by his sight, before he touched them, he could now distinguish and tell which is the globe, which the cube?" To which the acute and judicious proposer answers, "Not. For, though he has obtained the experience of how a globe, how a cube affects his touch, yet he has not yet obtained the experience, that what affects his touch so or so, must affect his sight so or so; or that a protuberant angle in the cube, that pressed his hand unequally, shall appear to his eye as it does in the cube."—I agree with this thinking gentleman, whom I am proud to call my friend, in his answer to this problem; and am of opinion that the blind man, at first sight, would not be able with certainty to say which was the globe, which the cube, whilst he only saw them; though he could unerringly name them by his touch, and certainly distinguish them by the difference of their figures felt. This I have set down, and leave with my reader, as an occasion for him to consider how much he may be beholden to experience, improvement, and acquired notions, where he thinks he had not the least use of, or help from them. And the rather, because this observing gentleman further adds, that "having, upon the occasion of my book, proposed this to divers very ingenious men, he hardly ever met with one that at first gave the answer to it which he thinks true, till by hearing his reasons they were convinced. Responses [ edit ] In 1709, in A New Theory of Vision, George Berkeley also concluded that there was no necessary connection between a tactile world and a sight world—that a connection between them could be established only on the basis of experience. He speculated: the objects to which he had hitherto used to apply the terms up and down, high and low, were such as only affected or were in some way perceived by touch; but the proper objects of vision make a new set of ideas, perfectly distinct and different from the former, and which can in no sort make themselves perceived by touch — (sect. 95). In 1749, Denis Diderot wrote Letter on the blind for the benefit of those who see as a criticism of our knowledge of ultimate reality. A similar problem was also addressed earlier in the 12th century by Ibn Tufail (Abubacer), in his philosophical novel, Hayy ibn Yaqdhan (Philosophus Autodidactus). His version of the problem, however, dealt mainly with colors rather than shapes[3][4]: If you want a comparison that will make you clearly grasp the difference between the perception, such as it is understood by that sect [the Sufis] and the perception as others understand it, imagine a person born blind, endowed however with a happy natural temperament, with a lively and firm intelligence, a sure memory, a straight sprite, who grew up from the time he was an infant in a city where he never stopped learning, by means of the senses he did dispose of, to know the inhabitants individually, the numerous species of beings, living as well as non-living, there, the streets and sidestreets, the houses, the steps, in such a manner as to be able to cross the city without a guide, and to recognize immediately those he met; the colors alone would not be known to him except by the names they bore, and by certain definitions that designated them. Suppose that he had arrived at this point and suddenly, his eyes were opened, he recovered his view, and he crosses the entire city, making a tour of it. He would find no object different from the idea he had made of it; he would encounter nothing he didn’t recognize, he would find the colors conformable to the descriptions of them that had been given to him; and in this there would only be two new important things for him, one the consequence of the other: a clarity, a greater brightness, and a great voluptuousness. Regarding Molyneux's problem, the authors Asif A. Ghazanfar & Hjalmar K. Turesson (2008) have recently noted: Production of speech is seen as a pure motor act, involving muscles and the neurons controlling them, while perception of speech is seen as purely sensory, involving the ear and the auditory pathway. This parcellation of the systems appear intuitive and clear, but recent studies [beginning with Taine 1870!] ... suggest that such divisions may be fundamentally wrong. Rather than separate processes for motor outputs and individual sensory modalities, adaptive action seems to use all the available context-specific information. That is, neural representations across the brain may be centered on specific actions. This view on neural representations puts 'Molyneux's Problem' in a new light. Unisensory signals are fused into multisensory motor representations unified by an action, but since Molyneux does not suggest any action, his 'problem' may be better viewed as an ill-posed question—at least from a neuroscientific perspective.[5] One reason that Molyneux's Problem could be posed in the first place is the extreme dearth of human subjects who gain vision after extended congenital blindness. Alberto Valvo estimated that fewer than twenty cases have been known in the last 1000 years.[6] Ostrovsky, et al.,[7] studied a woman who gained sight at the age of 12 when she underwent surgery for dense bilateral congenital cataracts. They report that the subject could recognize family members by sight six months after surgery, but took up to a year to recognize most household objects purely by sight. In 2003, Pawan Sinha, a professor at the Massachusetts Institute of Technology, set up a program in the framework of the Project Prakash[8] and eventually had the opportunity to find five individuals who satisfied the requirements for an experiment aimed at answering Molyneux's question experimentally. Prior to treatment, the subjects (aged 8 to 17) were only able to discriminate between light and dark, with two of them also being able to determine the direction of a bright light. The surgical treatments took place between 2007 and 2010, and quickly brought the relevant subject from total congenital blindness to fully seeing. A carefully designed test was submitted to each subject within the next 48 hours. Based on its result, the experimenters concluded that the answer, in short, to Molyneux's problem is "no". Although after restoration of sight, the subjects could distinguish between objects visually almost as effectively as they would do by touch alone, they were unable to form the connection between an object perceived using the two different senses. The correlation was barely better than if the subjects had guessed. They had no innate ability to transfer their tactile shape knowledge to the visual domain. However, the experimenters could test three of the five subjects on later dates (5 days, 7 days, and 5 months after, respectively) and found that the performance in the touch-to-vision case improved significantly, reaching 80–90%.[9][10][11] See also [ edit ] References [ edit ] Further reading [ edit ]
/** * This test lously checks that errors are reported as expected. It * checks that the error code given at construction time is reported * either my an exception thrown from the execute method or during the * processing of single files in the SVGConverterController handler. */ class ConfigErrorTest extends AbstractTest implements SVGConverterController{ String errorCode; String foundErrorCode = null; public static final String ERROR_DID_NOT_GET_ERROR = "ConfigErrorTest.error.did.not.get.error"; public static final String ERROR_UNEXPECTED_ERROR_CODE = "ConfigErrorTest.error.unexpected.error.code"; public static final String ENTRY_KEY_EXPECTED_ERROR_CODE = "ConfigErrorTest.entry.key.expected.error.code"; public static final String ENTRY_KEY_GOT_ERROR_CODE = "ConfigErrorTest.entry.key.got.error.code"; public ConfigErrorTest(String expectedErrorCode){ this.errorCode = expectedErrorCode; } public String getName(){ return getId(); } public TestReport runImpl() throws Exception { SVGConverter c = new SVGConverter(this); c.setDestinationType(DestinationType.PNG); c.setSources(new String[]{ "samples/anne.svg" }); configure(c); try { c.execute(); } catch(SVGConverterException e){ e.printStackTrace(); foundErrorCode = e.getErrorCode(); } if (foundErrorCode == null){ TestReport report = reportError(ERROR_DID_NOT_GET_ERROR); report.addDescriptionEntry(ENTRY_KEY_EXPECTED_ERROR_CODE, errorCode); return report; } if (foundErrorCode.equals(errorCode)){ return reportSuccess(); } TestReport report = reportError(ERROR_UNEXPECTED_ERROR_CODE); report.addDescriptionEntry(ENTRY_KEY_EXPECTED_ERROR_CODE, errorCode); report.addDescriptionEntry(ENTRY_KEY_GOT_ERROR_CODE, foundErrorCode); return report; } protected void configure(SVGConverter c){ } public boolean proceedWithComputedTask(Transcoder transcoder, Map hints, List sources, List dest){ System.out.println("==================> Starting to process Task <========================="); return true; } public boolean proceedWithSourceTranscoding(SVGConverterSource source, File dest) { System.out.print("Transcoding " + source + " to " + dest + " ... "); return true; } public boolean proceedOnSourceTranscodingFailure(SVGConverterSource source, File dest, String errorCode){ System.out.println(" ... FAILURE"); foundErrorCode = errorCode; return true; } public void onSourceTranscodingSuccess(SVGConverterSource source, File dest){ System.out.println(" ... SUCCESS"); } }
How Do They Sound? The voice of keyboard instruments has evolved in the last five hundred years, changed by the way that the strings are made to sing, and the body of the instrument itself. Innovators sought to increase the instrument’s volume and expressiveness. When your finger strikes the keyboard on a harpsichord, a quill plucks a string. All notes are of equal duration and volume, yet it can be played very fast. Strike a key on a fortepiano and a hammer raps the string with the force you used, allowing subtle variations in volume and expression. The modern piano amplifies the qualities of the fortepiano, with even greater control and volume. The powerful sound is mostly achieved by strengthening the hammers, strings and frame. The marvelous subtlety of the modern piano comes from improved control of string vibration through dampers and pedals, all in direct response to the player’s touch. Explore the history of the piano through the evolution of seven different keyboards on display at The Schubert Club Museum. Listen to a sample piece, and compare the same piece of music, Mozart’s“Twinkle, Twinkle”, being played on seven different instruments. 17th Century Italian Harpsichord The harpsichord was the uncontested queen of 16th- and 17th-century European music. Developed in the 1400s in Italy, it is essentially a plucked string instrument like a lute and harp. Because the strings are plucked, not struck, the sound is crisp. Think of the music of Bach: each note stands out strongly, in spite of the dizzyingly fast playing. Typical of its time, this keyboard is constructed entirely out of wood except for the strings and plucking mechanism. Twinkle Twinkle http://schubert.org/wp-content/uploads/2013/09/italian_mozart.mp3 Poglietti http://schubert.org/wp-content/uploads/2013/09/italian_poglietti.mp3 Copy of 1726 Cristofori Fortepiano, Sutherland (1997) Bartolomeo Cristofori invented the fortepiano around 1700, while under the patronage of the Medici family in Florence. Unlike the plucked string of the harpsichord, the hammered string of the fortepiano allows the performer to express subtle variations in volume. Its name – “fortepiano,” means “loud-soft.” Today, only three Cristofori fortepianos survive. The Schubert Club commissioned this copy to allow us to hear his revolutionary innovation. http://schubert.org/wp-content/uploads/2013/09/cristofori_mozart1.mp3 Twinkle Twinkle http://schubert.org/wp-content/uploads/2013/09/cristofori_galuppi.mp3 Galuppi Taws 1791 Square Piano The square piano originated in Germany around 1742 and, unlike Cristofori’s fortepiano, almost immediately became the most popular piano style from St. Petersburg to London, and beyond. The maker of this piano, Charles Taws, emigrated from Scotland and settled in Philadelphia in 1787. This instrument is thought to be the earliest American piano both signed and dated by its maker. The first square pianos were inexpensive and promoted domestic music-making. By mid-century they were being mass produced by several US companies.”It has long been the custom with wretched Forte Piano makers in the city, to bribe needy or sordid musicians, for the purpose of recommending their bad instruments to the incautious buyer.” Charles Taws and Sons advertisement in Poulson’s American Daily Advertiser, February 21, 1816 http://schubert.org/wp-content/uploads/2013/09/taws_mozart.mp3 Twinkle, Twinkle http://schubert.org/wp-content/uploads/2013/09/taws_sicard.mp3 Sicard Broadwood 1795 Grand Piano John Broadwood, a Scottish cabinetmaker, took over Burkat Shudi’s harpsichord company in 1771, and the business went on to become the world’s largest piano manufacturer. The first grand appeared in 1781.Broadwood developed the “English grand action,” more powerful than the “Viennese action” in French and German pianos. He also enhanced the dynamic capability of the fortepiano by revising the pedals and stringing. This and heavier hammers accounted for the distinctive touch. In 1818, Broadwood, a master of self-promotion, presented Beethoven with a grand piano, causing the composer to exclaim: “I shall regard it as an altar upon which I shall place the most beautiful offerings of my spirit to the divine Apollo.” http://schubert.org/wp-content/uploads/2013/09/broadwood_mozart.mp3 Twinkle, Twinkle http://schubert.org/wp-content/uploads/2013/09/broadwood_beethoven.mp3 Beethoven 1830 Kisting Fortepiano offirst half of the 19th century. This piano was originally owned by Adolph von Menzel, one of the most celebrated German painters in the 19th century, who regularly held musical recitals at his home in Berlin. Among those who played on this instrument were Robert and Clara Schumann, Johannes Brahms, and Felix Mendelssohn. The piano has a light Viennese action, well-suited to the music of the era. Donated to the Schubert Club in 1972, this instrument was the beginning of the museum’s keyboard collection. Christian Heinrich Kisting began piano manufacture on Potsdam in 1799. In 1815 he moved his business to Berlin, giving himself the title ‘Court instrument maker.’ In 1828 his son joined the firm. Kisting pianos are mentioned in the writings of E.T.A. Hoffman and other German writers. http://schubert.org/wp-content/uploads/2013/07/kisting_mendelssohn.mp3 Mendelssohn http://schubert.org/wp-content/uploads/2013/07/kisting_mendelssohn.mp3 Beethoven Stein 1830 Square Piano This square piano is a product of a piano dynasty. The patriarch, Johann Andreas Stein, pioneered the Viennese action, distinct from Broadwood’s English. Its small, leather-covered hammers have a very light, responsive touch. Stein’s daughter, Nannette, inherited the piano gene. She moved to Vienna in 1794, and married piano-maker Johann Andreas Streicher, her business partner. Like the Broadwood, the Stein piano appealed to the burgeoning middle class. This piano touted the affluence and sophistication of its owners, while adding the bright sound of music to the domestic scene. Should a piano player seek nourishment for the soul, and should he be fond not only of precise but also gentle, melting play, he could choose no better instrument than one by Stein.” From Musical Yearbook of Vienna and Prague, 1796 http://schubert.org/wp-content/uploads/2013/09/kisting_mozart.mp3 Twinkle, Twinkle Erard 1844 Upright Piano Sebastien Erard patented his double escapement action in 1821, which combined the “Viennese” and “English actions”—a marriage of responsiveness with force that allows rapid repetition of notes. His instruments soon gained high regard among performers. Hungarian-born Franz Liszt was associated with Erard pianos from the start of his career. Liszt’s father and Erard struck a business deal: young Liszt would play Erard’s pianos if the maker shipped them to concerts. On the inside of this upright, Liszt wrote in French in 1844, “I confess to having played wrong notes and scribbled wretched music on this charming instrument.” http://schubert.org/wp-content/uploads/2013/09/erard_mozart.mp3 Twinkle, Twinkle http://schubert.org/wp-content/uploads/2013/09/erard_chopin.mp3 Chopin Streicher 1869 Grand Piano Johann Baptist Streicher continued the piano-building business of his mother Nannette and grandfather Johann Andreas Stein, taking over the firm in 1833. This piano represents the height of 19th-century Viennese piano construction, responding to the ongoing demand, especially from composer-performers, for an increase in volume and power from the relatively new grand piano. Streicher gave a nearly identical piano to Brahms, who used it until the end of his life. It’s smooth, clear tone and fast action is especially complimentary to his style. To his friend Clara Schumann Brahms wrote, “I have a beautiful grand piano from Streicher. With it he wants to demonstrate [his] latest achievements to me, and I believe that if he made a similar one for you, you would be pleased [with it].” http://schubert.org/wp-content/uploads/2013/09/streicher_mozart.mp3 Twinkle, Twinkle http://schubert.org/wp-content/uploads/2013/09/streicher_brahms.mp3 Brahms Bechstein 1878 Grand Piano With its sturdy iron frame and velvety tone, Bechstein’s concert grands were preferred by leading pianists in Europe. This piano, from the Austrian summer house of music patron John DeBodo, was played by Anton Rubinstein, Franz Liszt, Gustav Mahler, Johannes Brahms, Bela Bartók, Zoltan Kodály, and Ernö von Dohnányi. After World War II, the son of the original owner moved to Pennsylvania where he taught piano. His favorite student, Margaret Baxtresser, inherited the piano on DeBodo’s death in 1967 and bequeathed it to The Schubert Club in 2006. Ms. Baxtresser taught at Kent State University where many prominent performers played on her extraordinary Bechstein piano, including Malcolm Frager, André Watts, Garrick Ohlsson, Ruth Laredo, Rudolph Firkusny and Lorin Maazel. All sound recordings by James Callahan with narration by Melissa Ousley.
// // ViewController.h // HanCamera // // Created by 韩畅 on 14/11/6. // Copyright (c) 2014年 韩畅. All rights reserved. // #import <UIKit/UIKit.h> #import <AVFoundation/AVFoundation.h> typedef enum : NSUInteger { hcCameraMode9to16, hcCameraMode1to1, hcCameraMode3to4, } hcCameraMode; @interface ViewController : UIViewController <UIImagePickerControllerDelegate, UINavigationControllerDelegate> @property (nonatomic,retain) AVCaptureSession *session; @property AVCaptureStillImageOutput *imageOutput; @property AVCaptureVideoPreviewLayer *previewSubLayer; @property hcCameraMode cameraMode; #pragma mark Actions - (IBAction)clickSnapImageButton:(id)sender; - (IBAction)clickCameraRollButton:(id)sender; - (IBAction)clickSwitchCameraButton:(id)sender; - (IBAction)clickFlashlightButton:(id)sender; - (IBAction)clickProportionButton:(id)sender; @end
<gh_stars>1-10 //*** <<< Use Configuration Wizard in Context Menu >>> *** // <o> SPI Flash <0=>W25X80 <1=>W25Q64 #define TYPE_SPI 0 //*** <<< end of configuration section >>> *** #if TYPE_SPI == 0 #define FLASH_SECTOR_COUNT ((uint32_t)256) /* Number of sectors */ #else #if TYPE_SPI == 1 #define FLASH_SECTOR_COUNT ((uint32_t)256*8) /* Number of sectors */ #else #define FLASH_SECTOR_COUNT ((uint32_t)256) /* Number of sectors */ #endif #endif #define FLASH_SECTOR_SIZE ((uint32_t)0x1000) /* Sector size: 4kB */ #define FLASH_PAGE_SIZE_ ((uint32_t)256) /* Programming page size in bytes */ #define FLASH_PROGRAM_UNIT ((uint32_t)1) /* Smallest programmable unit in bytes */ #define FLASH_ERASED_VALUE ((uint8_t)0xFF) /* Contents of erased memory */
def t_star(self, s): delta, k, c, n = self._delta, self._k, self._c, self._n f = lambda t: delta * k * self.p_func(t / k) + c(s - t) return max(fminbound(f, -1, s), 0)
Women’s Education May Be Even Better Than We Thought: Estimating the Gains from Education When Schooling Ain’t Learning Women’s schooling has long been regarded as one of the best investments in development. Using two different cross-nationally comparable data sets which both contain measures of schooling, assessments of literacy, and life outcomes for more than 50 countries, we show the association of women’s education (defined as schooling and the acquisition of literacy) with four life outcomes (fertility, child mortality, empowerment, and financial practices) is much larger than the standard estimates of the gains from schooling alone. First, estimates of the association of outcomes with schooling alone cannot distinguish between the association of outcomes with schooling that actually produces increased learning and schooling that does not. Second, typical estimates do not address attenuation bias from measurement error. Using the new data on literacy to partially address these deficiencies, we find that the associations of women’s basic education (completing primary schooling and attaining literacy) with child mortality, fertility, women’s empowerment and the associations of men’s and women’s basic education with positive financial practices are three to five times larger than standard estimates. For instance, our country aggregated OLS estimate of the association of women’s empowerment with primary schooling versus no schooling is 0.15 of a standard deviation of the index, but the estimated association for women with primary schooling and literacy, using IV to correct for attenuation bias, is 0.68, 4.6 times bigger. Our findings raise two conceptual points. First, if the causal pathway through which schooling affects life outcomes is, even partially, through learning then estimates of the impact of schooling will underestimate the impact of education. Second, decisions about how to invest to improve life outcomes necessarily depend on estimates of the relative impacts and relative costs of schooling (e.g., grade completion) versus learning (e.g., literacy) on life outcomes. Our results do share the limitation of all previous observational results that the associations cannot be given causal interpretation and much more work will be needed to be able to make reliable claims about causal pathways.
/** * This method is mainly responsible for the colors of the board. * @param s The String that will determine what kind of board will be implemented. * @param bC the color of the board. * @param pC the Color of the pits. */ private void pickColorScheme(String s, Color bC, Color pC) { if (s.equals("Simple")) { Board b = new Board(m, new SimpleBoard(bC, pC)); generateGame(b, 3); m.attach(b); } else if (s.equals("Rigid")) { Board b = new Board(m, new RigidBoard(bC, pC)); generateGame(b, 3); m.attach(b); } }
Share. Train of thought. Train of thought. There's a science to getting people from point A to point B, and Cities in Motion 2 takes a very technical, not particularly user-friendly approach to its simulation of urban mass transit. Despite significant strides in creative freedom over its predecessor, this is still largely a puzzle about imposing a successful public transportation system on a city that initially doesn't support one. Such a transition isn't easy, of course, and Cities in Motion 2 seems intent on reminding you of that every step of the way, but it's a rewarding puzzle to solve. Exit Theatre Mode Indeed, it sometimes feels less like a game than some staid city-planning simulator tool at the city hall of a major metropolis. Gone, for instance, is the colorful and seemingly hand-drawn art style that graced the original; in its place, developer Colossal Order has carpeted the sequel with drab, blocky cityscapes that sacrifice personality for greater realism. Paradoxically, this look has made it seem like an older game. In the first few hours, even the streets and sidewalks are largely empty of pedestrians, and significant traffic and the combined effect of this initial desolation and an earthy color palette that would look at home in Fallout 3 is not unlike watching a flyover of Pyongyang, North Korea. But it'd be wrong to deny that there's beauty here as well. Zoom in all the way to street level, and you can follow individual citizens (known here as "Cims," humorously enough) going about their daily commutes to gain clues as to where you'll need a new bus or tram stop. Other little details add spice to the faded palette, such as the way fire trucks plow through red lights or the way you can hear the wind rustling through the trees and the water lapping on the shore when you zoom in on a lakeside forest. Those of us in the United States aren't so accustomed to Cities in Motion 2's European vocabulary of trams, water buses, and metros, but seeing them in action quickly gets the point across. It's also a roomier world, with maps four times larger than we saw in 2011. On a practical level, the map sizes justify the construction of lengthy and lucrative train lines between fare zones. It also allows room for your city to grow, which is handy, because perhaps the greatest addition to Cities in Motion 2 over the first is the ability for your cities to grow at all. While the focus on transportation means you have little or no control over the specific types of building that arise (as you do in SimCity), it means we get to construct dozens of rail and road types with adjustable elevations, and it's always fun to see a new row of buildings slowly pop up along them. I was initially disappointed with the mere six maps that all feature the same Seattle-meets-New York aesthetic, until I realized that there's already a small but already blossoming selection of player-made cities on CimExchange.com, and that I could always use the powerful map editor to build a city more to my liking. Or I could, if I were willing to put up with the editor's unwieldiness – a UI design problem that plagues most of Cities in Motion 2. I gave up on my ambitious recreation of Chicago's transit system once I realized how time consuming it was to lay straight roads in the otherwise-welcome absence of a grid. The undo option apparently missed the bus during CiM2's development (SimCity has the same problem), so my early attempts left the avenues of my business districts stitched with tram tracks that went nowhere while I adjusted to the grueling block-by-block and a lane-by lane process of laying tracks. The tutorial, consisting of several heavy blocks of text that demand careful study, wasn't much help, and was all but useless while trying to figure out how to lay a subway line at the right depth with a camera that merely lets you see above and below ground with few hints as to elevation. The menus, too, are the antithesis of intuitiveness. Assigning different purchasable buses or rail cars to selected routes almost always resulted in a mess of pop-up windows, each of which needed to be individually closed. Even navigation presents its own small challenges, as moving about the map requires cumbersome jugglings of the WASD and arrow keys for direction and height, and occasionally holding down Control for more fluid movements. It's not something you can't get used to (and a patch that allows edge scrolling is allegedly on the way after considerable protests), but I still found myself struggling after a many hours. Still, it's a credit to the strengths of Cities in Motion 2 that it managed to keep me playing happily, to the tune of a soundtrack that sounds like it was lifted from a 1980s educational program. Much of its appeal lies in its commitment to the day-to-day workings of contemporary fictional cities over Cities in Motion's 100-year timelines for mostly unalterable maps of Berlin, Helsinki, Vienna, and Amsterdam – particularly in the way traffic patterns correspond to a day-and-night cycle and the way you can expect bursty rush hours on Monday mornings and Friday evenings. These additions provide a richer and more realistics experience that extends far beyond mere cosmetics; they add meaning, for instance, to the new abilities to finely tune transit timetables and to set fares at both a daily and monthly level. Here we have a world in constant flux, where neighborhoods shift from white to blue collar and where every stop matters, and the resulting juggle of timetable tweaks, graph study, and fare adjustments delivers a meaty challenge for micromanagers. Yet the new commitment to realism also means that the pacing might seem unbearably slow, even with accelerated time. Trying to start out with conservative bus lines rather than taking out massive loans and building pricy tram lines, for instance, made the campaign's first surprisingly daunting task of extending 15% coverage to a massive city a real drag. Its realism stumbles, too, in its insistence on matching every line with a depot, and if you're overzealous in your route creation (as I initially was), then Cities in Motion 2 quickly starts looking like City of Depots. It's all great fun when it works, but enough bugs exist to make the experience frustrating at times. Sometimes mission objectives in the 10-hour campaign mode don't update--and CiM2's pacing is such that it may take you awhile to realize it--and I had the misfortune after a crash (I only experienced one) to discover that there's no autosave function. Cities in Motion 2 also suffers from a lack of variety in the meager assortment of vehicles that reveals an all-too-obvious drive toward DLC. Even so, it excels in the way it lets you approach it at your own pace and in one of three difficulty modes. The campaign mode isn't the only mode, fortunately, and I found it best to spend some time in the sandbox mode with unlimited cash so I'd have a better idea of how to use my tools. There's also a multiplayer mode that allows for up to six players to play both cooperatively and competitively – I found the former useful for both showing a friend some of the techniques I'd learned and for speeding up objectives that might have taken me hours on my own. Playing competitively was less appealing, as the few public matches I could find quickly devolved into races to see who could mess up another player's track first. I won't deny that I had fun in the process, but it was short lived. I think it's safe to say that multiplayer is unlikely to have the same appeal as the single-player modes unless you can gather together several like-minded buddies.
// Code generated by easyjson for marshaling/unmarshaling. DO NOT EDIT. package benchmarks import ( json "encoding/json" easyjson "github.com/mailru/easyjson" jlexer "github.com/mailru/easyjson/jlexer" jwriter "github.com/mailru/easyjson/jwriter" ) // suppress unused package warning var ( _ *json.RawMessage _ *jlexer.Lexer _ *jwriter.Writer _ easyjson.Marshaler ) func easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks(in *jlexer.Lexer, out *StructModelSl) { isTopLevel := in.IsStart() if in.IsNull() { in.Skip() *out = nil } else { in.Delim('[') if *out == nil { if !in.IsDelim(']') { *out = make(StructModelSl, 0, 0) } else { *out = StructModelSl{} } } else { *out = (*out)[:0] } for !in.IsDelim(']') { var v1 StructModel (v1).UnmarshalEasyJSON(in) *out = append(*out, v1) in.WantComma() } in.Delim(']') } if isTopLevel { in.Consumed() } } func easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks(out *jwriter.Writer, in StructModelSl) { if in == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { out.RawString("null") } else { out.RawByte('[') for v2, v3 := range in { if v2 > 0 { out.RawByte(',') } (v3).MarshalEasyJSON(out) } out.RawByte(']') } } // MarshalJSON supports json.Marshaler interface func (v StructModelSl) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v StructModelSl) MarshalEasyJSON(w *jwriter.Writer) { easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *StructModelSl) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *StructModelSl) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks(l, v) } func easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks1(in *jlexer.Lexer, out *StructModelPtrSl) { isTopLevel := in.IsStart() if in.IsNull() { in.Skip() *out = nil } else { in.Delim('[') if *out == nil { if !in.IsDelim(']') { *out = make(StructModelPtrSl, 0, 0) } else { *out = StructModelPtrSl{} } } else { *out = (*out)[:0] } for !in.IsDelim(']') { var v4 StructModelPtr (v4).UnmarshalEasyJSON(in) *out = append(*out, v4) in.WantComma() } in.Delim(']') } if isTopLevel { in.Consumed() } } func easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks1(out *jwriter.Writer, in StructModelPtrSl) { if in == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { out.RawString("null") } else { out.RawByte('[') for v5, v6 := range in { if v5 > 0 { out.RawByte(',') } (v6).MarshalEasyJSON(out) } out.RawByte(']') } } // MarshalJSON supports json.Marshaler interface func (v StructModelPtrSl) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks1(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v StructModelPtrSl) MarshalEasyJSON(w *jwriter.Writer) { easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks1(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *StructModelPtrSl) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks1(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *StructModelPtrSl) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks1(l, v) } func easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks2(in *jlexer.Lexer, out *StructModelPtr) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeFieldName(false) in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "A": if in.IsNull() { in.Skip() out.A = nil } else { if out.A == nil { out.A = new(string) } *out.A = string(in.String()) } case "B": if in.IsNull() { in.Skip() out.B = nil } else { if out.B == nil { out.B = new(string) } *out.B = string(in.String()) } case "C": if in.IsNull() { in.Skip() out.C = nil } else { if out.C == nil { out.C = new(string) } *out.C = string(in.String()) } case "D": if in.IsNull() { in.Skip() out.D = nil } else { if out.D == nil { out.D = new(string) } *out.D = string(in.String()) } case "E": if in.IsNull() { in.Skip() out.E = nil } else { if out.E == nil { out.E = new(string) } *out.E = string(in.String()) } case "F": if in.IsNull() { in.Skip() out.F = nil } else { if out.F == nil { out.F = new(string) } *out.F = string(in.String()) } case "G": if in.IsNull() { in.Skip() out.G = nil } else { if out.G == nil { out.G = new(string) } *out.G = string(in.String()) } case "H": if in.IsNull() { in.Skip() out.H = nil } else { if out.H == nil { out.H = new(string) } *out.H = string(in.String()) } case "I": if in.IsNull() { in.Skip() out.I = nil } else { if out.I == nil { out.I = new(string) } *out.I = string(in.String()) } case "J": if in.IsNull() { in.Skip() out.J = nil } else { if out.J == nil { out.J = new(string) } *out.J = string(in.String()) } case "K": if in.IsNull() { in.Skip() out.K = nil } else { if out.K == nil { out.K = new(string) } *out.K = string(in.String()) } case "L": if in.IsNull() { in.Skip() out.L = nil } else { if out.L == nil { out.L = new(string) } *out.L = string(in.String()) } case "M": if in.IsNull() { in.Skip() out.M = nil } else { if out.M == nil { out.M = new(string) } *out.M = string(in.String()) } case "N": if in.IsNull() { in.Skip() out.N = nil } else { if out.N == nil { out.N = new(string) } *out.N = string(in.String()) } case "O": if in.IsNull() { in.Skip() out.O = nil } else { if out.O == nil { out.O = new(string) } *out.O = string(in.String()) } case "P": if in.IsNull() { in.Skip() out.P = nil } else { if out.P == nil { out.P = new(string) } *out.P = string(in.String()) } case "Q": if in.IsNull() { in.Skip() out.Q = nil } else { if out.Q == nil { out.Q = new(string) } *out.Q = string(in.String()) } case "R": if in.IsNull() { in.Skip() out.R = nil } else { if out.R == nil { out.R = new(string) } *out.R = string(in.String()) } case "S": if in.IsNull() { in.Skip() out.S = nil } else { if out.S == nil { out.S = new(string) } *out.S = string(in.String()) } case "T": if in.IsNull() { in.Skip() out.T = nil } else { if out.T == nil { out.T = new(string) } *out.T = string(in.String()) } default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks2(out *jwriter.Writer, in StructModelPtr) { out.RawByte('{') first := true _ = first { const prefix string = ",\"A\":" out.RawString(prefix[1:]) if in.A == nil { out.RawString("null") } else { out.String(string(*in.A)) } } { const prefix string = ",\"B\":" out.RawString(prefix) if in.B == nil { out.RawString("null") } else { out.String(string(*in.B)) } } { const prefix string = ",\"C\":" out.RawString(prefix) if in.C == nil { out.RawString("null") } else { out.String(string(*in.C)) } } { const prefix string = ",\"D\":" out.RawString(prefix) if in.D == nil { out.RawString("null") } else { out.String(string(*in.D)) } } { const prefix string = ",\"E\":" out.RawString(prefix) if in.E == nil { out.RawString("null") } else { out.String(string(*in.E)) } } { const prefix string = ",\"F\":" out.RawString(prefix) if in.F == nil { out.RawString("null") } else { out.String(string(*in.F)) } } { const prefix string = ",\"G\":" out.RawString(prefix) if in.G == nil { out.RawString("null") } else { out.String(string(*in.G)) } } { const prefix string = ",\"H\":" out.RawString(prefix) if in.H == nil { out.RawString("null") } else { out.String(string(*in.H)) } } { const prefix string = ",\"I\":" out.RawString(prefix) if in.I == nil { out.RawString("null") } else { out.String(string(*in.I)) } } { const prefix string = ",\"J\":" out.RawString(prefix) if in.J == nil { out.RawString("null") } else { out.String(string(*in.J)) } } { const prefix string = ",\"K\":" out.RawString(prefix) if in.K == nil { out.RawString("null") } else { out.String(string(*in.K)) } } { const prefix string = ",\"L\":" out.RawString(prefix) if in.L == nil { out.RawString("null") } else { out.String(string(*in.L)) } } { const prefix string = ",\"M\":" out.RawString(prefix) if in.M == nil { out.RawString("null") } else { out.String(string(*in.M)) } } { const prefix string = ",\"N\":" out.RawString(prefix) if in.N == nil { out.RawString("null") } else { out.String(string(*in.N)) } } { const prefix string = ",\"O\":" out.RawString(prefix) if in.O == nil { out.RawString("null") } else { out.String(string(*in.O)) } } { const prefix string = ",\"P\":" out.RawString(prefix) if in.P == nil { out.RawString("null") } else { out.String(string(*in.P)) } } { const prefix string = ",\"Q\":" out.RawString(prefix) if in.Q == nil { out.RawString("null") } else { out.String(string(*in.Q)) } } { const prefix string = ",\"R\":" out.RawString(prefix) if in.R == nil { out.RawString("null") } else { out.String(string(*in.R)) } } { const prefix string = ",\"S\":" out.RawString(prefix) if in.S == nil { out.RawString("null") } else { out.String(string(*in.S)) } } { const prefix string = ",\"T\":" out.RawString(prefix) if in.T == nil { out.RawString("null") } else { out.String(string(*in.T)) } } out.RawByte('}') } // MarshalJSON supports json.Marshaler interface func (v StructModelPtr) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks2(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v StructModelPtr) MarshalEasyJSON(w *jwriter.Writer) { easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks2(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *StructModelPtr) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks2(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *StructModelPtr) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks2(l, v) } func easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks3(in *jlexer.Lexer, out *StructModelInternSl) { isTopLevel := in.IsStart() if in.IsNull() { in.Skip() *out = nil } else { in.Delim('[') if *out == nil { if !in.IsDelim(']') { *out = make(StructModelInternSl, 0, 0) } else { *out = StructModelInternSl{} } } else { *out = (*out)[:0] } for !in.IsDelim(']') { var v7 StructModelIntern (v7).UnmarshalEasyJSON(in) *out = append(*out, v7) in.WantComma() } in.Delim(']') } if isTopLevel { in.Consumed() } } func easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks3(out *jwriter.Writer, in StructModelInternSl) { if in == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { out.RawString("null") } else { out.RawByte('[') for v8, v9 := range in { if v8 > 0 { out.RawByte(',') } (v9).MarshalEasyJSON(out) } out.RawByte(']') } } // MarshalJSON supports json.Marshaler interface func (v StructModelInternSl) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks3(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v StructModelInternSl) MarshalEasyJSON(w *jwriter.Writer) { easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks3(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *StructModelInternSl) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks3(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *StructModelInternSl) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks3(l, v) } func easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks4(in *jlexer.Lexer, out *StructModelIntern) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeFieldName(false) in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "A": out.A = string(in.StringIntern()) case "B": out.B = string(in.StringIntern()) case "C": out.C = string(in.StringIntern()) case "D": out.D = string(in.StringIntern()) case "E": out.E = string(in.StringIntern()) case "F": out.F = string(in.StringIntern()) case "G": out.G = string(in.StringIntern()) case "H": out.H = string(in.StringIntern()) case "I": out.I = string(in.StringIntern()) case "J": out.J = string(in.StringIntern()) case "K": out.K = string(in.StringIntern()) case "L": out.L = string(in.StringIntern()) case "M": out.M = string(in.StringIntern()) case "N": out.N = string(in.StringIntern()) case "O": out.O = string(in.StringIntern()) case "P": out.P = string(in.StringIntern()) case "Q": out.Q = string(in.StringIntern()) case "R": out.R = string(in.StringIntern()) case "S": out.S = string(in.StringIntern()) case "T": out.T = string(in.StringIntern()) default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks4(out *jwriter.Writer, in StructModelIntern) { out.RawByte('{') first := true _ = first { const prefix string = ",\"A\":" out.RawString(prefix[1:]) out.String(string(in.A)) } { const prefix string = ",\"B\":" out.RawString(prefix) out.String(string(in.B)) } { const prefix string = ",\"C\":" out.RawString(prefix) out.String(string(in.C)) } { const prefix string = ",\"D\":" out.RawString(prefix) out.String(string(in.D)) } { const prefix string = ",\"E\":" out.RawString(prefix) out.String(string(in.E)) } { const prefix string = ",\"F\":" out.RawString(prefix) out.String(string(in.F)) } { const prefix string = ",\"G\":" out.RawString(prefix) out.String(string(in.G)) } { const prefix string = ",\"H\":" out.RawString(prefix) out.String(string(in.H)) } { const prefix string = ",\"I\":" out.RawString(prefix) out.String(string(in.I)) } { const prefix string = ",\"J\":" out.RawString(prefix) out.String(string(in.J)) } { const prefix string = ",\"K\":" out.RawString(prefix) out.String(string(in.K)) } { const prefix string = ",\"L\":" out.RawString(prefix) out.String(string(in.L)) } { const prefix string = ",\"M\":" out.RawString(prefix) out.String(string(in.M)) } { const prefix string = ",\"N\":" out.RawString(prefix) out.String(string(in.N)) } { const prefix string = ",\"O\":" out.RawString(prefix) out.String(string(in.O)) } { const prefix string = ",\"P\":" out.RawString(prefix) out.String(string(in.P)) } { const prefix string = ",\"Q\":" out.RawString(prefix) out.String(string(in.Q)) } { const prefix string = ",\"R\":" out.RawString(prefix) out.String(string(in.R)) } { const prefix string = ",\"S\":" out.RawString(prefix) out.String(string(in.S)) } { const prefix string = ",\"T\":" out.RawString(prefix) out.String(string(in.T)) } out.RawByte('}') } // MarshalJSON supports json.Marshaler interface func (v StructModelIntern) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks4(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v StructModelIntern) MarshalEasyJSON(w *jwriter.Writer) { easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks4(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *StructModelIntern) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks4(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *StructModelIntern) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks4(l, v) } func easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks5(in *jlexer.Lexer, out *StructModel) { isTopLevel := in.IsStart() if in.IsNull() { if isTopLevel { in.Consumed() } in.Skip() return } in.Delim('{') for !in.IsDelim('}') { key := in.UnsafeFieldName(false) in.WantColon() if in.IsNull() { in.Skip() in.WantComma() continue } switch key { case "A": out.A = string(in.String()) case "B": out.B = string(in.String()) case "C": out.C = string(in.String()) case "D": out.D = string(in.String()) case "E": out.E = string(in.String()) case "F": out.F = string(in.String()) case "G": out.G = string(in.String()) case "H": out.H = string(in.String()) case "I": out.I = string(in.String()) case "J": out.J = string(in.String()) case "K": out.K = string(in.String()) case "L": out.L = string(in.String()) case "M": out.M = string(in.String()) case "N": out.N = string(in.String()) case "O": out.O = string(in.String()) case "P": out.P = string(in.String()) case "Q": out.Q = string(in.String()) case "R": out.R = string(in.String()) case "S": out.S = string(in.String()) case "T": out.T = string(in.String()) default: in.SkipRecursive() } in.WantComma() } in.Delim('}') if isTopLevel { in.Consumed() } } func easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks5(out *jwriter.Writer, in StructModel) { out.RawByte('{') first := true _ = first { const prefix string = ",\"A\":" out.RawString(prefix[1:]) out.String(string(in.A)) } { const prefix string = ",\"B\":" out.RawString(prefix) out.String(string(in.B)) } { const prefix string = ",\"C\":" out.RawString(prefix) out.String(string(in.C)) } { const prefix string = ",\"D\":" out.RawString(prefix) out.String(string(in.D)) } { const prefix string = ",\"E\":" out.RawString(prefix) out.String(string(in.E)) } { const prefix string = ",\"F\":" out.RawString(prefix) out.String(string(in.F)) } { const prefix string = ",\"G\":" out.RawString(prefix) out.String(string(in.G)) } { const prefix string = ",\"H\":" out.RawString(prefix) out.String(string(in.H)) } { const prefix string = ",\"I\":" out.RawString(prefix) out.String(string(in.I)) } { const prefix string = ",\"J\":" out.RawString(prefix) out.String(string(in.J)) } { const prefix string = ",\"K\":" out.RawString(prefix) out.String(string(in.K)) } { const prefix string = ",\"L\":" out.RawString(prefix) out.String(string(in.L)) } { const prefix string = ",\"M\":" out.RawString(prefix) out.String(string(in.M)) } { const prefix string = ",\"N\":" out.RawString(prefix) out.String(string(in.N)) } { const prefix string = ",\"O\":" out.RawString(prefix) out.String(string(in.O)) } { const prefix string = ",\"P\":" out.RawString(prefix) out.String(string(in.P)) } { const prefix string = ",\"Q\":" out.RawString(prefix) out.String(string(in.Q)) } { const prefix string = ",\"R\":" out.RawString(prefix) out.String(string(in.R)) } { const prefix string = ",\"S\":" out.RawString(prefix) out.String(string(in.S)) } { const prefix string = ",\"T\":" out.RawString(prefix) out.String(string(in.T)) } out.RawByte('}') } // MarshalJSON supports json.Marshaler interface func (v StructModel) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks5(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v StructModel) MarshalEasyJSON(w *jwriter.Writer) { easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks5(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *StructModel) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks5(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *StructModel) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks5(l, v) } func easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks6(in *jlexer.Lexer, out *MapModelSl) { isTopLevel := in.IsStart() if in.IsNull() { in.Skip() *out = nil } else { in.Delim('[') if *out == nil { if !in.IsDelim(']') { *out = make(MapModelSl, 0, 8) } else { *out = MapModelSl{} } } else { *out = (*out)[:0] } for !in.IsDelim(']') { var v10 MapModel (v10).UnmarshalEasyJSON(in) *out = append(*out, v10) in.WantComma() } in.Delim(']') } if isTopLevel { in.Consumed() } } func easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks6(out *jwriter.Writer, in MapModelSl) { if in == nil && (out.Flags&jwriter.NilSliceAsEmpty) == 0 { out.RawString("null") } else { out.RawByte('[') for v11, v12 := range in { if v11 > 0 { out.RawByte(',') } (v12).MarshalEasyJSON(out) } out.RawByte(']') } } // MarshalJSON supports json.Marshaler interface func (v MapModelSl) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks6(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v MapModelSl) MarshalEasyJSON(w *jwriter.Writer) { easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks6(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *MapModelSl) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks6(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *MapModelSl) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks6(l, v) } func easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks7(in *jlexer.Lexer, out *MapModel) { isTopLevel := in.IsStart() if in.IsNull() { in.Skip() } else { in.Delim('{') *out = make(MapModel) for !in.IsDelim('}') { key := string(in.String()) in.WantColon() var v13 string v13 = string(in.String()) (*out)[key] = v13 in.WantComma() } in.Delim('}') } if isTopLevel { in.Consumed() } } func easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks7(out *jwriter.Writer, in MapModel) { if in == nil && (out.Flags&jwriter.NilMapAsEmpty) == 0 { out.RawString(`null`) } else { out.RawByte('{') v14First := true for v14Name, v14Value := range in { if v14First { v14First = false } else { out.RawByte(',') } out.String(string(v14Name)) out.RawByte(':') out.String(string(v14Value)) } out.RawByte('}') } } // MarshalJSON supports json.Marshaler interface func (v MapModel) MarshalJSON() ([]byte, error) { w := jwriter.Writer{} easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks7(&w, v) return w.Buffer.BuildBytes(), w.Error } // MarshalEasyJSON supports easyjson.Marshaler interface func (v MapModel) MarshalEasyJSON(w *jwriter.Writer) { easyjsonD2b7633eEncodeGithubComMacroPowerMarshalBenchGoBenchmarks7(w, v) } // UnmarshalJSON supports json.Unmarshaler interface func (v *MapModel) UnmarshalJSON(data []byte) error { r := jlexer.Lexer{Data: data} easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks7(&r, v) return r.Error() } // UnmarshalEasyJSON supports easyjson.Unmarshaler interface func (v *MapModel) UnmarshalEasyJSON(l *jlexer.Lexer) { easyjsonD2b7633eDecodeGithubComMacroPowerMarshalBenchGoBenchmarks7(l, v) }
// messageMAC returns the hex-decoded HMAC tag from the signature and its // corresponding hash function. func messageMAC(signature string) ([]byte, error) { if signature == "" { return nil, &errors.Error{ Category: errors.AUTHENTICATIONERROR, Message: "missing signature", } } buf, err := hex.DecodeString(signature) if err != nil { return nil, &errors.Error{ Category: errors.AUTHENTICATIONERROR, Message: fmt.Sprintf("error decoding signature %q: %v", signature, err), } } return buf, nil }
//FUNCTION TO UNITE TWO BINOMIAL HEAP INTO ONE BINOMIAL HEAP node* binomialheapunion(node* h1, node* h2) { node* finalheap = makebinomialheap(); finalheap = binomialmerge(h1,h2); if(finalheap == NULL) return finalheap; node* x = makebinomialheap(); x=finalheap; node* nextx = x->sibling; node* prevx = NULL; while(nextx != NULL) { nextx = x->sibling; if(nextx == NULL) break; if((x->degree != nextx->degree) || (nextx->sibling != NULL && (nextx->sibling)->degree == x->degree)) { prevx = x; x = nextx; } else { if(x->key <= nextx->key){ x->sibling = nextx->sibling; binomiallink(nextx,x); } else { if(prevx == NULL) { finalheap = nextx; } else { prevx->sibling = nextx; } binomiallink(x,nextx); x = nextx; } } } return finalheap; }
Southampton Fire Department took the trophy for best fire truck in the Southampton Village Parade of Lights on Saturday for a truck with a Frozen theme, while a second SOuthampton Fire Department truck in the parade was decorated to look like Olaf, the living snowman from the Disney film. The windshield served as the eyes, and a carrot nose was added to the front of the truck, decorated by the Southampton Hose Company. Olaf’s three points of hair stick up from the top of the cab. White and orange Christmas lights gave the Olaf truck its glow for the Parade of Lights. Firefighters dressed as Frozen characters Elsa, Olaf and Anna. Frozen fever continues in Southampton Village. On Sunday, December 14 kids can meet Frozen characters Elsa, Anna and Hans at Southampton Arts Center, 25 Jobs Lane. Shows are at 10 a.m., 12:30 p.m. and 2:30 p.m. Admission is $30. Visit southamptonchamber.com for tickets.
import java.util.Scanner; import java.util.ArrayList; public class test{ public static void main(String args[]){ Scanner sc=new Scanner(System.in); int n=sc.nextInt(); int[] horizontal=new int[n*n]; int[] vertical=new int[n*n]; ArrayList<Integer> h=new ArrayList<>(); ArrayList<Integer> v=new ArrayList<>(); for(int i=0;i<n*n;i++){ horizontal[i]=sc.nextInt(); vertical[i]=sc.nextInt(); } for(int i=0;i<n*n;i++){ if(!h.contains(horizontal[i]) && !v.contains(vertical[i])){ System.out.println(i+1 +" "); h.add(horizontal[i]); v.add(vertical[i]); } } } }
/** * Adds a new media action listener. */ public void addListener(ActionListener listener) { if (!mListeners.contains(listener)) { mListeners.add(listener); listener.onMediaActionsChanged(getMediaActions()); } }
#http://codeforces.com/contest/520/problem/A #solved by Benegripe #!/usr/bin/python n = int(raw_input()) s = raw_input() alpha= set() for x in s: if x.lower() not in alpha: alpha.add(x.lower()) if (26 == len(alpha)): print "YES" else: print "NO"
/** * execute action of menu. * @param input interface for ask * @param tracker object for action with */ @Override public void execute(Input input, Tracker tracker) { String name = input.ask("item ru.job4j.user name: "); String description = input.ask("item description: "); tracker.add(new Task(name, description)); }
// NewMySQLFileRepo ... create implementation of FileRepo func NewMySQLFileRepo(Conn *sql.DB) FileRepo { return &mysqlFileRepo{ Conn: Conn, } }
<filename>src/components/Divider.tsx import React from 'react' export type DividerProps = { colour?: string } export const Divider = (props) => { const { colour } = props return ( <div className="bottom-auto top-0 left-0 right-0 w-full absolute pointer-events-none overflow-hidden -mt-20" style={{ height: '80px', transform: 'translateZ(0)' }} > <svg className="absolute bottom-0 overflow-hidden" xmlns="http://www.w3.org/2000/svg" preserveAspectRatio="none" version="1.1" viewBox="0 0 2560 100" x="0" y="0" > <polygon className={`text-${colour} fill-current`} points="2560 0 2560 100 0 100" ></polygon> </svg> </div> ) } Divider.defaultProps = { colour: 'gray-400', }
<filename>reactfx/src/main/java/org/reactfx/util/TriPredicate.java package org.reactfx.util; @FunctionalInterface public interface TriPredicate<A, B, C> { boolean test(A a, B b, C c); }
<filename>sia4e-P1_Core_Spring-C03_Advanced_wiring-04_scope/src/test/java/com/myapp/ShoppingCartTest.java package com.myapp; import static org.junit.Assert.assertNotEquals; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import com.myapp.Config; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(classes = Config.class) public class ShoppingCartTest { @Autowired private StoreService storeService1; @Autowired private StoreService storeService2; @Test public void shopTest() { System.out.println(storeService1); System.out.println(storeService2); assertNotEquals(storeService1, storeService2); } }
{-# LANGUAGE CPP #-} module JS.DOM.EventTarget.Node.Element.HTML ( -- | constructor not exported HTMLElement , IHTMLElement(..) ) where import Control.Monad.IO.Class import JS.Data import JS.DOM.EventTarget.Node.Element import JS.DOM.EventTarget.Node.Element.HTML.Internal import Prelude hiding (id) -- | https://developer.mozilla.org/en-US/docs/Web/API/HTMLElement class IElement j => IHTMLElement j where focus :: MonadIO m => j -> m () focus = liftIO . js_focus . toJS blur :: MonadIO m => j -> m () blur = liftIO . js_blur . toJS instance IHTMLElement HTMLElement #ifdef __GHCJS__ foreign import javascript unsafe "$1.focus();" js_focus :: JSVal -> IO () foreign import javascript unsafe "$1.blur();" js_blur :: JSVal -> IO () #else js_focus :: JSVal -> IO () js_focus _ = pure () js_blur :: JSVal -> IO () js_blur _ = pure () #endif
def parse_value(self,value,type,tag): raise NotImplementedError
CORRELATIONS BETWEEN MUSCULAR STRENGTH AND INDUSTRIAL WORK PERFORMANCE IN MENTALLY RETARDED PERSONS An important task with mentally retarded persons is to place them in an occupation which they can manage without difficulties. With this goal in mind, a group of young adult mentally retarded persons, presented in a previous report (2) underwent evaluation, at the workshops of the ALA centre, Uppsala, of their performance in industrial work. It was considered of interest to determine whether this performance was influenced by muscular strength. In the present study measurements were therefore made of the isometric maximal muscular strength in essentially the same group of persons, and the different components of the test battery used were correlated, by multiple regression analysis, with their performance in industrial work. The studies in this group were made on 42 male and 31 female persons, of ages 19--39 years, with different degrees of mentaI retardation.
Factor-specific deactivation of leucocyte chemotaxis in vivo. This report analyses the locomotory capacity of polymorphonuclear leucocytes (PMNL) isolated from 7 patients with bacterial meningitis. 7 healthy control subjects were also investigated in parallel. It was found that PMNL from the patients suffering from meningitis, isolated both from peripheral blood and from the cerebrospinal fluid, had lost their ability to respond chemotactically to activated serum but not to the chemotactic peptide f-Met-Leu-Phe. The normal chemotactic responsiveness of blood PMNL was restored once the patients recovered from infection.
<gh_stars>0 package org.progress.web.util; public class ParamName { public final static String APARTAMENTS_ID = "id"; public final static String TYPE_OF_SALES = "typeOfSales"; public final static String CITY_NAME = "cityName"; public final static String STREET_NAME = "streetName"; public final static String HOUSE_NUMBER = "houseNumber"; public final static String BUILDING_NUMBER = "buildingNumber"; public final static String KLADR_ID = "kladrId"; public final static String SHORT_ADDRESS = "shortAddress"; public final static String APARTAMENT_LAN = "apartamentLan"; public final static String APARTAMENT_LON = "apartamentLon"; public final static String ROOMS = "rooms"; public final static String DWELLING_TYPE = "dwellingType"; public final static String PRICE = "price"; public final static String CITY_DISTRICT = "cityDistrict"; public final static String FLOOR = "floor"; public final static String FLOORS = "floors"; public final static String ROOM_NUMBER = "roomNumber"; public final static String MATERIAL = "material"; public final static String SIZE_APARTAMENT = "sizeApartament"; public final static String SIZE_LIVING = "sizeLiving"; public final static String SIZE_KITCHEN = "sizeKitchen"; public final static String BALCONY = "balcony"; public final static String LOGGIA = "loggia"; public final static String YEAR_OF_CONSTRUCTION = "yearOfConstruction"; public final static String DESCRIPTION = "description"; public final static String PURE_SALE = "pureSale"; public final static String MORTGAGE = "mortgage"; public final static String EXCHANGE = "exchange"; public final static String RENT = "rent"; public final static String RE_PLANNING = "rePlanning"; public final static String WORKER_ID_TARGET = "idWorkerTarget"; public final static String ID_CUSTOMER = "idCustomer"; public final static String STATUS = "status"; public final static String AD = "isAd"; public final static String CUSTOMERS_ID = "customersId"; public final static String CUSTOMERS_FNAME = "customersFname"; public final static String CUSTOMERS_LNAME = "customersLname"; public final static String CUSTOMERS_MNAME = "customersMname"; public final static String CUSTOMERS_DATE_OF_BIRTHDAY = "customersDateOfBirthday"; public final static String CUSTOMERS_SEX = "customersSex"; public final static String CUSTOMERS_PHONE = "customersPhone"; public final static String CUSTOMERS_EMAIL = "customersEmail"; public final static String CUSTOMERS_ADDRESS = "customersAddress"; public final static String CUSTOMERS_EXTRA = "customersExtra"; public final static String CUSTOMERS_STATUS = "customersStatus"; public final static String WORKER_ID = "userId"; public final static String WORKER_FIRST_NAME = "workerFName"; public final static String WORKER_LAST_NAME = "workerLName"; public final static String WORKER_MIDDLE_NAME = "workerMName"; public final static String WORKER_EMAIL = "workerEmail"; public final static String WORKER_PASSWORD = "<PASSWORD>"; public final static String GROUP_NAME = "groupName"; public final static String NEWS_ID = "newsId"; public final static String NEWS_HEADER = "newsHeader"; public final static String NEWS_TEXT = "newsText"; public final static String PLANNER_ID = "plannerTaskId"; public final static String PLANNER_TARGET_OBJECT_UUID = "plannerTargetObjectUUID"; public final static String PLANNER_TASK_TYPE = "plannerTaskType"; public final static String PLANNER_TASK_COLOR = "plannerTaskColor"; public final static String PLANNER_TASK_TITLE = "plannerTaskTitle"; public final static String PLANNER_TASK_DESCRIPTION = "plannerTaskDescription"; public final static String PLANNER_TASK_START_DATE = "plannerTaskStartDate"; public final static String PLANNER_TASK_END_DATE = "plannerTaskEndDate"; }
<gh_stars>1-10 package net.dugged.cutelessmod.mixins; import net.dugged.cutelessmod.CutelessMod; import net.minecraft.client.gui.GuiButton; import net.minecraft.client.gui.GuiIngameMenu; import net.minecraft.client.gui.GuiScreen; import net.minecraft.client.multiplayer.GuiConnecting; import net.minecraft.client.resources.I18n; import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.injection.At; import org.spongepowered.asm.mixin.injection.Inject; import org.spongepowered.asm.mixin.injection.callback.CallbackInfo; @Mixin(GuiIngameMenu.class) public abstract class MixinGuiIngameMenu extends GuiScreen { @Inject(method = "updateScreen", at = @At("HEAD")) public void addReloadButton(final CallbackInfo ci) { if (!this.mc.isIntegratedServerRunning()) { CutelessMod.currentServer = this.mc.getCurrentServerData(); if (GuiScreen.isShiftKeyDown()) { (this.buttonList.get(0)).displayString = I18n.format("text.cutelessmod.relog"); } else (this.buttonList.get(0)).displayString = I18n.format("menu.disconnect"); } } @Inject(method = "actionPerformed", at = @At("RETURN")) public void handleRelogButton(final GuiButton button, final CallbackInfo ci) { if (!this.mc.isIntegratedServerRunning() && GuiScreen.isShiftKeyDown() && button.id == 1) { this.mc.displayGuiScreen(new GuiConnecting(this, this.mc, CutelessMod.currentServer)); } } }
These are incredibly quick, incredibly easy and incredibly good! I keep all the ingredients made up in my freezer and refrigerator so that such lunches are a snap for me! These little gems can’t be picked up and eaten like regular pizza as they are too wet/messy, so get out a fork to enjoy these. This recipe is not suitable until you reach the nuts rung of the Phase 2 OWL carb ladder. They are OK for Primal diners, but not Paleo. These would be lovely served with a nice green salad. These can also be made with ground beef, or with a bit of shredded, cooked chicken. More delicious low-carb recipes can be at your fingertips with your very own set of Jennifer Eloff and friends’ best-selling cookbooks LOW CARBING AMONG FRIENDS. She has collaborated with famous low-carb Chef George Stella and several other talented chefs to bring you a wealth of delicious recipes you are going to want to try. Even a few of my recipes are in her cookbooks! Order your 5-volume set TODAY! (available individually) from Amazon or: http://amongfriends.us/order.php DISCLAIMER: I do not get paid for this book promotion or for the inclusion of my recipes therein. I do so merely because they are GREAT cookbooks any low-carb cook would be proud to add to their cookbook collection INGREDIENTS: 12 oz. breakfast sausage (I make homemade sausage) 1 egg, beaten ¼ tsp. each fennel seed and dried oregano leaves (or ½ tsp. Italian seasoning) ¼ c. low-carb spaghetti sauce (I use Lucini basil pesto sauce) ¼ c. pesto sauce (or use commercial pesto) 1 c. grated mozzarella cheese (about 4 oz.) ½ oz. thin sliced red or green bell pepper, cut into strips DIRECTIONS: Preheat oven to 350º. In a medium mixing bowl, well mix the sausage, egg, fennel and oregano with a fork or your hands. Spoon about 1 tablespoon of meat into each of 12 muffin cups and press down with the back of the spoon. I used a silicone muffin pan. Pop into your 350º preheated oven for 15 minutes. Remove, tilt and blot off excess grease with paper toweling. Top each meat pattie evenly with 1 tsp. spaghetti sauce. Next spread 1 tsp. pesto sauce on top. Sprinkle each with about 1T. of the mozzarella cheese, going back and using it all up evenly. Top with a piece of bell pepper. Pop back into oven and bake for an additional 10-15 minutes. Cool slightly and gently lift them out with a fork onto your serving platter. NUTRITIONAL INFO: Makes 12 mini-meatzas, each contains: 161 calories 13.4 g fat 1.44 g carbs, .36 g fiber, 1.08 g NET CARBS 8.5 g protein 136 mg sodium
Paul Finebaum is not on board with Nick Saban's antics as of late, and is especially confused by Saban blocking Maurice Smith's transfer to Georgia. (2:01) Alabama reserve defensive back Maurice Smith said Monday that he's "willing to go as far as it takes" in his ongoing battle with the Crimson Tide, who have blocked his request to join SEC rival Georgia as a graduate transfer. In an interview with AL.com, Smith made his first public comments about his battle with the school, saying he's determined to see the process through because he believes it is "bigger than me." "I believe that this will not only help me but help anyone else who comes into my situation in the future," Smith, a senior who graduated from Alabama in three years, said in the interview. "That's why I say it's almost bigger than me because not only am I stepping out on faith but I'm trying to get a point across that if you do everything right you should be awarded what is better for you or best for you if that's something you choose to do. "There's really not a deadline. I'm willing to go as far as it takes to play the season at the school that I desire, which is Georgia." "I believe that this will not only help me but help anyone else who comes into my situation in the future," said Maurice Smith, who is seeking to transfer from Alabama to SEC rival Georgia. Mark J. Rebilas/USA TODAY Sports In mid-June, AL.com reported that Smith was seeking to leave Alabama as a graduate transfer, which would make him eligible to play immediately at another school. Saban addressed the issue last week, citing an SEC rule that restricts players from transferring to another SEC school, except under unique circumstances. "I know there's a lot of talk out there and interest in the Maurice Smith deal,'' Saban said. "Nothing has changed. We support the SEC rule of not granting guys' release to go to other SEC schools. It has been our policy here not to do that unless there was a special circumstance.'' Saban also said Smith had opportunities to go to schools outside the SEC, but that he and the staff had told Smith he would be a significant contributor this season if he stayed. Alabama opened camp last Wednesday. "We have told him, 'Because we support the SEC rule, it would not be in your best interest to have to sit out. If you want to go someplace else [outside the SEC], we'd be glad to help you do that,'" Saban said. In the AL.com interview, Smith expressed disappointment that what he felt was going to be a simple transfer has "transpired into something far greater." "There's a lot of adversity that's been thrown at me, the university, coach [Nick] Saban, and it's really making it something that it's really not," Smith said to AL.com. "I believe it was something that was just as simple as a transfer. But now it's kind of transpired into something far greater, and a lot of bad things have been coming out of this. It's just very disappointing because I never meant to hurt anyone. ... I just feel like I've earned the right to better myself my last year and go into this season without any doubts. "Honestly, it's not what everyone is making it out to be. It's not personal. I feel like it's what's best for me for my senior year. And this is a hard decision, of course, because I have so much love for all my teammates and all of the staff and everyone for the university, especially all of the love that they've shown me over these last three years. And it's really a business decision. It's not meant to cause any hurt or harm to the university, to the coaches or to make anyone look bad appearance-wise."
package conf import ( "gopkg.in/ini.v1" "HFish/utils/log" "container/list" ) var cfg *ini.File func init() { c, err := ini.Load("./config.ini") if err != nil { log.Pr("HFish", "127.0.0.1", "打开配置文件失败", err) } c.BlockMode = false cfg = c } func Get(node string, key string) string { val := cfg.Section(node).Key(key).String() return val } func GetInt(node string, key string) int { val, _ := cfg.Section(node).Key(key).Int() return val } func Contains(l *list.List, value string) (bool, *list.Element) { for e := l.Front(); e != nil; e = e.Next() { if e.Value == value { return true, e } } return false, nil } func GetCustomName() []string { names := cfg.SectionStrings() var existConfig []string rpcStatus := Get("rpc", "status") // 判断 RPC 是否开启 1 RPC 服务端 2 RPC 客户端 if rpcStatus == "1" || rpcStatus == "0" { existConfig = []string{ "DEFAULT", "rpc", "admin", "api", "plug", "web", "deep", "ssh", "redis", "mysql", "telnet", "ftp", "mem_cache", "http", "tftp", "elasticsearch", "vnc", } } else if rpcStatus == "2" { existConfig = []string{ "DEFAULT", "rpc", "api", "plug", "web", "deep", "ssh", "redis", "mysql", "telnet", "ftp", "mem_cache", "http", "tftp", "elasticsearch", "vnc", } } for i := 0; i < len(names); i++ { for j := 0; j < len(existConfig); j++ { if names[i] == existConfig[j] { names = append(names[:i], names[i+1:]...) } } } return names }
import { Component, OnInit, OnDestroy } from '@angular/core'; import { FormGroup, FormBuilder } from '@angular/forms'; import { View } from '../../../../shared/view.shared'; import { ClientService, Client } from '../../../../service/client.service'; import { UserService, User } from '../../../../service/user.service'; import stateCity from "./state-city.type"; import { Subject } from 'rxjs'; import { map } from 'rxjs/operators'; import { take } from 'rxjs/operators'; import { takeUntil } from 'rxjs/operators' @Component({ selector: 'alter-perfil-of-client', templateUrl: './alter-perfil-of-client.component.html', styleUrls: ['./alter-perfil-of-client.component.css'], }) export class AlterPerfilOfClientComponent implements OnInit, OnDestroy { private unsubscribe$ = new Subject(); public clientForm: FormGroup = this.createForm(this.clientService.client); public stateCity = stateCity; private clientImageIconUrl = { uploadImage : false } public active = { text: "", message: false, }; public imageNew: Blob | Uint8Array | ArrayBuffer; public imageDisplay: any = localStorage.getItem("avatar"); public phoneMask: any = { mask: "(00) 0000-0000", lazy: false, }; public cellPhoneMask: any = { mask: "(00) 0 0000-0000", lazy: false, }; public birth: any = { mask: "00/00/0000", lazy: false, }; public cepMask: any = { mask: "00000-000", lazy: false, }; constructor( private view:View, private clientService:ClientService, private userService:UserService, private fb: FormBuilder ){} private createForm (client: Client): FormGroup { return this.fb.group (client); } private updateForm(client: Partial<Client>): void { this.clientForm.patchValue(client) } private getForm():Client { return this.clientForm.value } public ngOnInit(){ window.scroll(0,0); this.userService.getUserInState().pipe(takeUntil(this.unsubscribe$)).subscribe((user)=>{ if(Object.keys(user).length != 0){ this.getClient() } }) } public getClient(){ let ClientByForeignKey:Client = this.clientService.client ClientByForeignKey.FOREIGN_KEY_USER = this.userService.pullUserInState().PRIMARY_KEY this.clientService.getClientByForeignKeyUserInApi(ClientByForeignKey).pipe(takeUntil(this.unsubscribe$)).subscribe(c=>{ if(Object.keys(c).length != 0){ let client:Client = c[0] this.convertURLtoFile(client.imageIconUrl) if(client.email == ''){ client.email = this.userService.pullUserInState().email } if(client.stateFederal == ''){ client.stateFederal = 'AC' } if(client.city == ''){ client.city = 'Acrelândia' } if(client.country == ''){ client.country = 'Brazil' } if(client.sex == ''){ client.sex = 'male' } this.updateForm(client) this.view.putLoader() } }) } public identifyMyselfAs(sex:String){ if(sex == 'other'){ this.clientForm.patchValue({sex:''}) setTimeout(() => {document.getElementById("identifyMyselfAsSex").focus(),100}); } if(sex == 'removeSexOther'){ this.clientForm.patchValue({sex:'male'}) } } public setCity(value){ for (const key in stateCity) { if (stateCity[key].sigla == value){ this.clientForm.patchValue({city:stateCity[key].cidades[0]}) } } } public async validateCep(caracter) { let caracterUnderline = caracter.replace(/_/g, ""); let caracterSubtrair = caracterUnderline.replace(/-/g, ""); let cep = caracterSubtrair; if (cep.length == 8) { const postal = await fetch(`https://viacep.com.br/ws/${cep}/json/`).then((resp) => resp.json()); if (postal.erro) { this.clientForm.get("cep").markAsTouched(); } else { this.setCep(postal); } } } public setCep(postal) { this.clientForm.patchValue({ neighborhood: postal.bairro, city: postal.localidade, street: postal.logradouro, stateFederal: postal.uf, }); this.clientForm.get("neighborhood").markAsTouched(); this.clientForm.get("city").markAsTouched(); this.clientForm.get("street").markAsTouched(); this.clientForm.get("stateFederal").markAsTouched(); this.clientForm.get("cep").markAsTouched(); } public uploadImage(event: Event) { var file = (<HTMLInputElement>event.target).files[0]; if ((<HTMLInputElement>event.target).files[0] && <HTMLInputElement>event.target) { var reader = new FileReader(); reader.onload = () => { this.imageDisplay = reader.result; this.imageNew = file; this.clientImageIconUrl.uploadImage = true }; reader.readAsDataURL((<HTMLInputElement>event.target).files[0]); } } public async convertURLtoFile(url){ let response = await fetch(url); let data = await response.blob(); let metadata = { type: 'image/jpeg' }; const time = new Date(); let file = new File([data], `${time.getFullYear()}${time.getMonth()+1}${time.getDate()}${time.getHours()}${time.getMinutes()}${time.getSeconds()}${time.getMilliseconds()}.jpg`, metadata); var reader = new FileReader(); reader.onload = () => { this.imageDisplay = reader.result; this.imageNew = file; }; reader.readAsDataURL(file); } public validateForm() { this.active.message = false; this.clientForm.get("name").markAsTouched(); this.clientForm.get("sex").markAsTouched(); if (this.clientForm.get("sex").invalid) { this.active.text = "*Me identifico como"; this.active.message = true; } if (this.clientForm.get("name").invalid) { this.active.text = "*Qual o seu nome?"; this.active.message = true; } if (this.clientForm.valid && this.imageNew) { this.view.setLoader(true); this.salve() console.log(this.clientForm.value); } window.scroll(0, 0); } public async salve(){ await this.alterarNameOfUser() if(this.clientImageIconUrl.uploadImage){ if(this.clientForm.get("imageIconPath").value == 'google' || this.clientForm.get("imageIconPath").value == 'email') await this.sendImagemStorage() await this.putImagemStorage() } const client:Client = this.clientForm.value await this.clientService.putClientByUidInApi(client) this.view.setLoader(false); } public async putImagemStorage(){ await this.clientService.putImagemStorageInApi(this.clientForm.get("imageIconPath").value,this.imageNew).then(async (url: any) => { this.clientForm.patchValue({ imageIconUrl : await url }) }); } public async sendImagemStorage(){ const data = new Date(); this.clientForm.patchValue({ imageIconPath: `icon/${data.getFullYear()}${data.getMonth()+1}${data.getDate()}${data.getHours()}${data.getMinutes()}${data.getSeconds()}${data.getMilliseconds()}.jpg`, }); await this.clientService.sendImagemStorageInApi(this.clientForm.get("imageIconPath").value,this.imageNew).then(async (url: any) => { this.clientForm.patchValue({ imageIconUrl : await url }) }); } public async alterarNameOfUser(){ if(this.userService.pullUserInState().name != this.clientForm.value.name){ let user:User = this.userService.pullUserInState() user.name = this.clientForm.value.name delete user.email delete user.password delete user.retypePassword delete user.terms delete user.type //await this.userService.putUserByUidInApi(user) } } public ngOnDestroy(){ this.unsubscribe$.next(); this.unsubscribe$.complete(); } }
def update_general(self, engine): changed = False if self.default_nat is not None: status = engine.default_nat.status if not status and self.default_nat: engine.default_nat.enable() changed = True elif status and not self.default_nat: engine.default_nat.disable() changed = True if self.file_reputation is not None: status = engine.file_reputation.status if not status and self.file_reputation: engine.file_reputation.enable() changed = True elif status and not self.file_reputation: engine.file_reputation.disable() changed = True if self.antivirus is not None: status = engine.antivirus.status if not status and self.antivirus: engine.antivirus.enable() changed = True elif status and not self.antivirus: engine.antivirus.disable() changed = True if self.domain_server_address: dns = [d.value for d in engine.dns] if set(dns) ^ set(self.domain_server_address): engine.data.update(domain_server_address=[]) engine.dns.add(self.domain_server_address) changed = True return changed
def Repeat(t, max=4294967295, ctx=None): t = _to_tactic(t, ctx) return Tactic(Z3_tactic_repeat(t.ctx.ref(), t.tactic, max), t.ctx)
package com.gwtjs.icustom.common.htmlarea.service; import static org.junit.Assert.assertNotNull; import java.util.ArrayList; import java.util.Date; import java.util.List; import javax.inject.Inject; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.junit4.SpringRunner; import com.gwtjs.icustom.entity.PageVO; import com.gwtjs.icustom.entity.PagedResult; import com.gwtjs.icustom.entity.ResultWrapper; import com.gwtjs.icustom.htmlarea.HtmlAreaApplication; import com.gwtjs.icustom.htmlarea.service.IHtmlAreaService; import com.gwtjs.icustom.htmlarea.vo.HtmlAreaVO; import com.gwtjs.icustom.log.ICustomLogger; import com.gwtjs.icustom.log.ICustomLoggerFactory; import com.gwtjs.icustom.util.json.JsonUtil; @RunWith(SpringRunner.class) @SpringBootTest(classes = HtmlAreaApplication.class) public class IHtmlAreaSaveOrUpdateTests { private static final ICustomLogger log = ICustomLoggerFactory.getLogger(IHtmlAreaSaveOrUpdateTests.class); @Inject private IHtmlAreaService htmlAreaService; @Test public void contextLoads() { assertNotNull(htmlAreaService); log.info("htmlAreaService", htmlAreaService); } @Test public void testSaveOrUpdateHTML() { String content = "{\"name\":\"中文试一下\",\"title\":\"中文试一下\",\"mappingUrl\":\"fasdfasdf\",\"content\":\"asdfasdf中文试一下\",\"description\":\"中文试一下\",\"programFilterEnable\":true,\"category\":\"\",\"validFromDate\":\"2017-12-04\",\"validToDate\":\"2017-12-04\",\"htmlAreaId\":\"0\"}"; HtmlAreaVO vo = (HtmlAreaVO) JsonUtil.getDTO(content, HtmlAreaVO.class); ResultWrapper result = htmlAreaService.saveOrUpdate(vo); log.info("saveOrUpdate result", result); } }
/** * @author Santhosh Kumar Tekuri */ public class ReadMessage extends Task{ private final long maxHeadSize; private final MessageParser parser; protected ReadMessage(long maxHeadSize, MessageParser parser){ super(OP_READ); this.maxHeadSize = maxHeadSize; this.parser = parser; } @Override protected boolean process(int readyOp) throws IOException{ while(true){ int read; try{ read = in.read(buffer); }catch(IOException ex){ if(consumed+buffer.position()==0 && message instanceof Request) throw IGNORABLE_EOF_EXCEPTION; throw ex; } if(read==0){ in.addReadInterest(); return false; } if(read==-1){ if(consumed==0 && message instanceof Request) throw IGNORABLE_EOF_EXCEPTION; throw message.badMessage("Unexpected EOF"); } buffer.flip(); int pos = buffer.position(); parser.consumed = consumed; boolean done = parser.parse(buffer, false); consumed += buffer.position()-pos; if(maxHeadSize>0 && (done ? consumed>maxHeadSize : consumed>=maxHeadSize)){ if(message instanceof Request) throw Status.REQUEST_HEADER_FIELDS_TOO_LARGE; else throw message.badMessage("Response Header Fields Too Large"); } if(done) break; else buffer.compact(); } if(buffer.hasRemaining()){ in = new BufferInput(in, buffer); // unread buffer = Reactor.current().allocator.allocate(); } if(HTTP){ println("readMessage(){"); Debugger.println(message); } // init payload -------------------------------- keepAlive = message.isKeepAlive(); long contentLength = -1; List<Encoding> encodings = null; if(!emptyPayload){ if(message instanceof Request){ Request request = (Request)message; if(!request.method.requestPayloadAllowed) emptyPayload = true; }else{ Response response = (Response)message; if(response.status.payloadNotAllowed) emptyPayload = true; } } if(!emptyPayload){ if(message.isChunked()){ HeadersParser trailersParser = new HeadersParser(); trailersParser.resetForTrailers(message); in = new ChunkedInput(in, trailersParser); }else{ Header clHeader = message.headers.get(Message.CONTENT_LENGTH); if(clHeader!=null){ if(clHeader.getValue().length()==0) throw message.badMessage("Empty Content-Length"); if(clHeader.getValue().charAt(0)=='-') throw message.badMessage("Negative Content-Length"); contentLength = Util.parseLong(clHeader.getValue()); if(contentLength==0) emptyPayload = true; else in = new FixedLengthInput(in, contentLength); }else{ encodings = message.getContentEncodings(); if(keepAlive || !(message instanceof Response)){ if(!encodings.isEmpty()) in = encodings.remove(encodings.size()-1).wrap(in); else if(message instanceof Request) emptyPayload = true; else keepAlive = false; } } } } if(!emptyPayload){ if(encodings==null) encodings = message.getContentEncodings(); message.setPayload(new SocketPayload(contentLength, message.headers.value(Message.CONTENT_TYPE), in, encodings)); } if(HTTP){ if(emptyPayload) println("payload = empty"); else println("payload = "+in); println("}"); } return true; } private Message message; private ByteBuffer buffer; private long consumed = 0; private boolean keepAlive; private boolean emptyPayload; public void reset(Message message, boolean emptyPayload){ this.message = message; if(buffer==null) buffer = Reactor.current().allocator.allocate(); else buffer.clear(); consumed = 0; parser.reset(message); keepAlive = false; this.emptyPayload = emptyPayload; } public Message getMessage(){ return message; } public long consumed(){ return consumed; } public boolean keepAlive(){ return keepAlive; } public void dispose(){ if(buffer!=null){ Reactor.current().allocator.free(buffer); buffer = null; } } @Override public String toString(){ return "ReadMessage"; } public static final EOFException IGNORABLE_EOF_EXCEPTION = new EOFException(){ @Override public Throwable fillInStackTrace(){ return this; } @Override public String toString(){ return "IGNORABLE_EOF"; } }; }
/** * Test that closing all incognito tabs successfully handles the base tab and * its preview tab opened in incognito mode. This makes sure an incognito profile * shared by the tabs is destroyed safely. */ @Test @MediumTest @Feature({"PreviewTab"}) public void testCloseAllIncognitoTabsClosesPreviewTab() throws Throwable { Assert.assertFalse("Test should have started without any Preview Tab", mEphemeralTabCoordinator.isOpened()); mActivityTestRule.loadUrlInNewTab(mTestServer.getServer().getURL(BASE_PAGE), /*incognito=*/true); mActivityTestRule.getActivity().getTabModelSelector().selectModel(true); ChromeActivity activity = mActivityTestRule.getActivity(); Tab tab = activity.getActivityTab(); Assert.assertTrue(tab.isIncognito()); ContextMenuUtils.selectContextMenuItem(InstrumentationRegistry.getInstrumentation(), activity, tab, PREVIEW_TAB_DOM_ID, R.id.contextmenu_open_in_ephemeral_tab); endAnimations(); BottomSheetController bottomSheet = activity.getRootUiCoordinatorForTesting().getBottomSheetController(); TestThreadUtils.runOnUiThreadBlocking(() -> { bottomSheet.expandSheet(); endAnimations(); IncognitoTabHostUtils.closeAllIncognitoTabs(); endAnimations(); }); Assert.assertEquals(SheetState.HIDDEN, bottomSheet.getSheetState()); }