content
stringlengths
10
4.9M
<gh_stars>10-100 #![feature(test)] #[cfg(test)] mod tests { extern crate test; use std::rc::Rc; use test::{black_box, Bencher}; /// /// A length function that takes ownership of the input variable /// fn length(s: String) -> usize { s.len() } /// /// The same length function, taking ownership of a Rc /// fn rc_length(s: Rc<String>) -> usize { s.len() // calls to the wrapped object require no additions } #[bench] fn bench_string_clone(b: &mut Bencher) { let s: String = (0..100_000).map(|_| 'a').collect(); b.iter(|| { black_box(length(s.clone())); }); } #[bench] fn bench_string_rc(b: &mut Bencher) { let s: String = (0..100_000).map(|_| 'a').collect(); let rc_s = Rc::new(s); b.iter(|| { black_box(rc_length(rc_s.clone())); }); } #[test] fn cloning() { let s = "abcdef".to_owned(); assert_eq!(length(s), 6); // s is now "gone", we can't use it anymore // therefore we can't use it in a loop either! // ... unless we clone s - at a cost! (see benchmark) let s = "abcdef".to_owned(); for _ in 0..10 { // clone is typically an expensive deep copy assert_eq!(length(s.clone()), 6); } } #[test] fn refcounting() { let s = Rc::new("abcdef".to_owned()); // we can clone Rc (reference counters) with low cost assert_eq!(rc_length(s.clone()), 6); for _ in 0..10 { // clone is typically an expensive deep copy assert_eq!(rc_length(s.clone()), 6); } } }
Structures, Systems and Organizational Communication Processes in Franchises in times of crisis: the Spanish case. Current economic crisis together with the Internet revolution has had direct impacts on the franchise sector of Spain: in particular on its unique communication network. The aim of this research is to analyse how Spanish franchise companies have adapted to these changes through its corporate communications management. We want to determine whether the management of communications is ideal to the growth and consolidation of companies in the market. Corporate communications plans and organizational structures were analyzed to verify whether or not information technology (i.e. the use of the Internet) is maximized: the communications aspect being a critical area of company growth. We found that most franchise companies surveyed had adapted well to the changes in information technology, despite economic challenges. The Internet as a communications tool has been limited to its utility as a “bulletin board” for information. The marketing advantage of Internet communication, or its use as an avenue for customer exchange and exchange of goods and services has yet to be maximized. Future research may look into the details of how companies are able to maximize the communications-marketing advantage that Online/Internet can contribute to the franchise sector.
// StartServices reloads, restarts, or starts ATS as necessary, // according to the changed config files and run mode. // Returns nil on success or any error. func (r *TrafficOpsReq) StartServices(syncdsUpdate *UpdateStatus) error { serviceNeeds, err := checkReload(r.Cfg.RunMode, r.getPluginPackagesInstalled(), r.changedFiles) if err != nil { return errors.New("determining if service needs restarted - not reloading or restarting! : " + err.Error()) } log.Infof("t3c-check-reload returned '%+v'\n", serviceNeeds) if (serviceNeeds == t3cutil.ServiceNeedsRestart || serviceNeeds == t3cutil.ServiceNeedsReload) && !r.IsPackageInstalled("trafficserver") { return errors.New("trafficserver needs " + serviceNeeds.String() + " but is not installed.") } svcStatus, _, err := util.GetServiceStatus("trafficserver") if err != nil { return errors.New("getting trafficserver service status: " + err.Error()) } switch r.Cfg.RunMode { case t3cutil.ModeBadAss: startStr := "restart" if svcStatus != util.SvcRunning { startStr = "start" } if _, err := util.ServiceStart("trafficserver", startStr); err != nil { return errors.New("failed to restart trafficserver") } log.Infoln("trafficserver has been " + startStr + "ed") if *syncdsUpdate == UpdateTropsNeeded { *syncdsUpdate = UpdateTropsSuccessful } return nil case t3cutil.ModeReport: if serviceNeeds == t3cutil.ServiceNeedsRestart { log.Errorln("ATS configuration has changed. The new config will be picked up the next time ATS is started.") } else if serviceNeeds == t3cutil.ServiceNeedsReload { log.Errorln("ATS configuration has changed. 'traffic_ctl config reload' needs to be run") } return nil case t3cutil.ModeSyncDS: fallthrough case t3cutil.ModeRevalidate: if serviceNeeds == t3cutil.ServiceNeedsRestart { if *syncdsUpdate == UpdateTropsNeeded { *syncdsUpdate = UpdateTropsSuccessful } log.Errorln("ATS configuration has changed. The new config will be picked up the next time ATS is started.") } else if serviceNeeds == t3cutil.ServiceNeedsReload { log.Infoln("ATS configuration has changed, Running 'traffic_ctl config reload' now.") if _, _, err := util.ExecCommand(config.TSHome+config.TrafficCtl, "config", "reload"); err != nil { if *syncdsUpdate == UpdateTropsNeeded { *syncdsUpdate = UpdateTropsFailed } return errors.New("ATS configuration has changed and 'traffic_ctl config reload' failed, check ATS logs: " + err.Error()) } if *syncdsUpdate == UpdateTropsNeeded { *syncdsUpdate = UpdateTropsSuccessful } log.Infoln("ATS 'traffic_ctl config reload' was successful") } if *syncdsUpdate == UpdateTropsNeeded { *syncdsUpdate = UpdateTropsSuccessful } return nil } return errors.New("Unknown run mode '" + r.Cfg.RunMode.String() + "'! Not reloading or restarting!") }
# Copyright (c) 2020 The Regents of the University of Michigan # All rights reserved. # This software is licensed under the BSD 3-Clause License. import signac project = signac.init_project('lj-signac-example') sps = [{ 'epsilon': 1.0, 'sigma': 1.0, 'kT': kT, 'tau': 0.1, 'r_cut': 3.0, } for kT in (0.1, 1.0, 1.5)] for sp in sps: project.open_job(sp).init()
import { ApolloError } from '@apollo/client'; import { Button, Grid, makeStyles, Typography } from '@material-ui/core'; import clsx from 'clsx'; import { isPlainObject } from 'lodash'; import React, { isValidElement, ReactNode } from 'react'; import { getErrorInfo } from '../../api/error.types'; import { ButtonLink, StatusCode, useNavigate } from '../Routing'; import { ErrorRenderers, renderError } from './error-handling'; const useStyles = makeStyles(({ spacing }) => ({ page: { overflow: 'auto', padding: spacing(4, 0, 0, 4), }, header: { marginBottom: spacing(4), }, buttons: { marginTop: spacing(3), }, })); export interface ErrorProps { /** * The error body to display. * Strings can be given as a shortcut and converted to appropriate typography. * * An object can also be given to specify react nodes or render functions for * each error code. */ children: ReactNode | ErrorRenderers; /** * The error. This is used to determine if component should show * and it's given to the children error rendering functions */ error?: ApolloError; /** Force rendering on/off instead of being based on error existence */ show?: boolean; /** Style as a complete page (more spacing) */ page?: boolean; /** Turn off back & home buttons */ disableButtons?: boolean; } /** * Display errors. * * @example * <Error error={error}> * {{ * NotFound: (e) => <div>Couldn't find it. {e.message}</div>, * Default: 'Something went wrong', * }} * </Error> */ export const Error = ({ error, children, show, page, disableButtons, }: ErrorProps) => { const classes = useStyles(); const navigate = useNavigate(); if (!(show ?? error)) { return null; } const node = isPlainObject(children) && !isValidElement(children) ? renderError(error, children as ErrorRenderers) : children; const rendered = typeof node !== 'string' ? ( node ) : ( <Typography variant="h2">{node}</Typography> ); const statusCode = error && getErrorInfo(error).codes.includes('NotFound') ? 404 : 500; return ( <div className={clsx(page && classes.page)}> {/* Default status code to be helpful for the most common ones. The children can still override this by rendering <StatusCode /> themselves */} <StatusCode code={statusCode} /> <Typography gutterBottom>Oops, Sorry.</Typography> {rendered} {!disableButtons && ( <Grid container spacing={3} className={classes.buttons}> <Grid item> <Button onClick={() => navigate(-1)} variant="contained" color="secondary" > Back </Button> </Grid> <Grid item> <ButtonLink to="/" variant="contained" color="secondary"> Home </ButtonLink> </Grid> </Grid> )} </div> ); };
Did you ever think you’d hear Nebraska being bandied about as a “toss up” state in a Presidential election? Mitt Romney won the state by 23 points in 2012. Well thanks to Donald Trump, Nebraska has now gone from a Solid Republican state to a Toss Up in the latest Cook Report scorecard ratings. That’s not all of it. Twelve other states have shifted as well with Donald Trump becoming the presumptive nominee and the shift is all in favor of the Democrats. Read it and weep: This has been an exceedingly unpredictable year. Although we remain convinced that Hillary Clinton is very vulnerable and would probably lose to most other Republicans, Donald Trump’s historic unpopularity with wide swaths of the electorate – women, millennials, independents and Latinos – make him the initial November underdog. As a result, we are shifting 13 ratings on our Electoral Vote scorecard, almost all of them favoring Democrats. Our assessments are based on publicly available polling, data on demographic change and private discussions with a large number of pollsters in both parties. Much could change, but undecided voters begin more hostile to Trump than Clinton. Here is the map and the states as they show now: Of course, things could change. Trump has shown at times he has good political instincts. The problem is, the media that has helped him is now going to put greater scrutiny on him. He will not be attacking fellow Republicans anymore but a Democrat and that will likely be Hillary Clinton. I am not confident things will change. The GOP could be headed for a historic defeat.
Lost in Translation: Detecting Tax Shelter Activity in Financial Statements Whether financial statements of public U.S. corporations provide sufficient information to the public to determine a corporation's tax payment to the U.S. Treasury and its involvement in "tax shelter" transactions has been much debated since the well publicized collapses of Enron Corporation and WorldCom, Inc. In this paper, we use specific examples to demonstrate how "income tax note" data can be analyzed to answer these two questions and, in so doing, point out the limitations of using financial accounting information to address tax-related issues. We conclude with suggestions to increase the transparency of a corporation's tax activities through enhanced disclosure.
Shipping Shipping rates vary. USA: Ground shipping in the US is $3.50 and takes 5-9 business days. I recommend Priority shipping in the US. Priority shipping is guaranteed to arrive within 2-3 business days for $5.50, this also includes an online tracking number and insurance. Overnight shipping can get pricy. It is $23 but arrives the next business day with insurance and tracking number. CANADA: First class shipping to Canada costs $8.00 and does not include tracking or insurance. It also takes 1-2 WEEKS to arrive. Priority shipping to Canada is $25 but includes tracking and insurance. Priority will arrive in 2-5 business days. Message me for overnight rates. NETHERLANDS: First class shipping to the Netherlands costs $11.88 and does not include tracking or insurance. It also takes 1-2 WEEKS to arrive. Priority shipping to the Netherlands is $25 but includes tracking and insurance. Priority will arrive in 2-5 business days. Message me for overnight rates. UK: First class shipping to the UK costs $11.88 and does not include tracking or insurance. It also takes 1-2 WEEKS to arrive. Priority shipping to the UK is $25 but includes tracking and insurance. Priority will arrive in 2-5 business days. Message me for overnight rates. EVERYWHERE ELSE: Ground shipping is $18.00 with no tracking and no insurance. To assure the best rate please send me a convo before purchasing. THANK YOU
//--------------------------------------------------------------------------- /// Book histograms and create ordered collection for easy manipulation void StTofpMatchMaker::bookHistograms(void){ mHitPosHistNames = new TOrdCollection; hTofpHitMap1 = new TH2D("tofpHitMap1","valid hit positions", 500,-250,0, 120,-1.23, -1.08); mHitPosHistNames->AddLast(hTofpHitMap1); hTofpHitMap2 = new TH2D("tofpHitMap2","valid hit positions", 500,-250,0, 120,-1.23, -1.08); mHitPosHistNames->AddLast(hTofpHitMap2); hTofpHitMap3 = new TH2D("tofpHitMap3","valid hit positions", 500,-250,0, 120,-1.23, -1.08); mHitPosHistNames->AddLast(hTofpHitMap3); hTofpHitMap4 = new TH2D("tofpHitMap4","valid hit positions", 500,-250,0, 120,-1.23, -1.08); mHitPosHistNames->AddLast(hTofpHitMap4); hTofpSlatHitVecSize = new TH1D("SlatMult","Slat Mult per Track",10,0,10); mHitPosHistNames->AddLast(hTofpSlatHitVecSize); hTofpSlatIdA0 = new TH1D("tofpSlatIdA0","events per slat",41,0.5,41.5); mHitPosHistNames->AddLast(hTofpSlatIdA0); hTofpSlatIdA1 = new TH1D("tofpSlatIdA1","valid slat",41,0.5,41.5); mHitPosHistNames->AddLast(hTofpSlatIdA1); hTofpSlatIdB1 = new TH1D("tofpSlatIdB1","#tracks match valid slat",41,0.5,41.5); mHitPosHistNames->AddLast(hTofpSlatIdB1); hTofpSlatIdD1 = new TH1D("tofpSlatIdD1","track match per valid slat",41,0.5,41.5); mHitPosHistNames->AddLast(hTofpSlatIdD1); hTofpSlatIdD2 = new TH1D("tofpSlatIdD2","single track match per slat",41,0.5,41.5); mHitPosHistNames->AddLast(hTofpSlatIdD2); hTofpSlatIdE1 = new TH1D("tofpSlatIdE1","one slat for one track match",41,0.5,41.5); mHitPosHistNames->AddLast(hTofpSlatIdE1); hTofpSlatIdE2 = new TH1D("tofpSlatIdE2","recovered from hitprof-weight",41,0.5,41.5); mHitPosHistNames->AddLast(hTofpSlatIdE2); hTofpSlatIdE3 = new TH1D("tofpSlatIdE3","recovered from ss",41,0.5,41.5); mHitPosHistNames->AddLast(hTofpSlatIdE3); hTofpSlatIdE4 = new TH1D("tofpSlatIdE4","recovered from closest hitplane",41,0.5,41.5); mHitPosHistNames->AddLast(hTofpSlatIdE4); hTofpSlatIdE5 = new TH1D("tofpSlatIdE5","total recovered slat per track match",41,0.5,41.5); mHitPosHistNames->AddLast(hTofpSlatIdE5); hTofpSlatIdF1 = new TH1D("tofpSlatIdF1","primary track match per slat",41,0.5,41.5); mHitPosHistNames->AddLast(hTofpSlatIdF1); mTrackHistNames = new TOrdCollection; hTofpNumberOfTrackHits = new TH1D("tofpNumberOfTrackHits","numberOfTrackHits",80,0,80); mTrackHistNames->AddLast(hTofpNumberOfTrackHits); hTofpPtTrack = new TH1D("tofpPtTrack","ptTrack",250,0.,10); mTrackHistNames->AddLast(hTofpPtTrack); hTofpDCATrackprimVertex = new TH1D("tofpDCATrackprimVertex","DCA distribution",6000,-30.,30.); mTrackHistNames->AddLast(hTofpDCATrackprimVertex); mOccupancyHistNames = new TOrdCollection; hTofpNumberOfValidAdc = new TH1D("tofpNumberOfValidTdc","numberOfValidTdc",41,0,41); mOccupancyHistNames->AddLast(hTofpNumberOfValidAdc); hTofpNumberOfValidTdc = new TH1D("tofpNumberOfValidAdc","numberOfValidAdc",41,0,41); mOccupancyHistNames->AddLast(hTofpNumberOfValidTdc); hTofpNumberOfValidSlats = new TH1D("tofpNumberOfValidSlats","numberOfValidSlats",41,0,41); mOccupancyHistNames->AddLast(hTofpNumberOfValidSlats); hTofpNumberOfGlobalTracks = new TH1D("tofpNumberOfGlobalTracks","numberOfGlobalTracks",50,0,50); mOccupancyHistNames->AddLast(hTofpNumberOfGlobalTracks); hTofpNumberOfHitSlats = new TH1D("tofpNumberOfHitSlats","numberOfHitSlats",50,0,50); mOccupancyHistNames->AddLast(hTofpNumberOfHitSlats); hTofpNumberOfSingleHitTracks = new TH1D("tofpNumberOfSingleHitTracks","numberOfSingleHitTracks",50,0,50); mOccupancyHistNames->AddLast(hTofpNumberOfSingleHitTracks); hTofpNumberOfSingleValidHitTracks = new TH1D("tofpNumberOfSingleValidTracks","numberOfSingleValidHitTracks",50,0,50); mOccupancyHistNames->AddLast(hTofpNumberOfSingleValidHitTracks); mMatchHistNames = new TOrdCollection; char buf[20]; for (int i=0;i<NTOFP;i++){ sprintf(buf,"tofpSlathit_%d",i+1); hTofpMatchHit[i] = new TH2D(buf,buf,5,-2.5,2.5,5,-2.5,2.5); hTofpMatchHit[i]->SetXTitle("iEta"); hTofpMatchHit[i]->SetYTitle("iPhi"); mMatchHistNames->AddLast(hTofpMatchHit[i]); sprintf(buf,"tofpSlatnohit_%d",i+1); hTofpMatchNoHit[i] = new TH2D(buf,buf,5,-2.5,2.5,5,-2.5,2.5); hTofpMatchNoHit[i]->SetXTitle("iEta"); hTofpMatchNoHit[i]->SetYTitle("iPhi"); mMatchHistNames->AddLast(hTofpMatchNoHit[i]); } return; }
Upregulated expression level of the growth factor, progranulin, is associated with the development of primary Sjögren’s syndrome The aim of the present study was to investigate the expression and effect of progranulin (PGRN) in patients with primary Sjögren’s syndrome (pSS). In total, 26 newly diagnosed pSS patients and 26 healthy subjects were enrolled in this study. The serum levels of PGRN and the inflammatory factor, interleukin-6 (IL-6), were detected using ELISA. In addition, the mRNA expression levels of these molecules were detected by quantitative polymerase chain reaction. The serum levels of PGRN and IL-6 in the pSS patients increased significantly compared with the healthy controls (P<0.05). During the remission stages, the levels of PGRN and IL-6 were comparable to those of the healthy controls. The serum level of PGRN in the pSS patients was shown to correlate with that of IL-6 in the pre-treatment and post-treatment stages. PGRN was upregulated in the pSS patients, indicating a possible role of PGRN in the pathogenesis and development of pSS. Introduction Sjögren's syndrome is a common chronic autoimmune disease characterized by lymphocytic infiltration of glands and ocular and oral dryness, which primarily affects the salivary and lacrimal glands. This syndrome may occur as a primary Sjögren's syndrome (pSS), or in association with other systemic autoimmune diseases, such as rheumatoid arthritis and systemic lupus erythematosus (1). Sjögren's syndrome may manifest within a wide spectrum of diseases, ranging from a limited, organ-specific autoimmune exocrinopathy to a systemic disease with widespread autoimmune manifestations and pronounced immunological features (2). pSS is characterized by polyclonal B cell activation, leading to chronic hypergammaglobulinemia, increased levels of β 2 -microglobulinemia and the concomitant presence of a variety of autoantibodies (3). Multiple factors, including viral infection, hormonal balance and genetic background, are involved in the pathogenesis of pSS. The presence of T and B cells, macrophages and dendritic cells varies according to the severity of the lesion (4). The influence of abnormal cytokine production in this disease has attracted considerable attention (5). Progranulin (PGRN) is an autocrine growth factor with multiple physiological and pathological functions. PGRN can bind to tumor necrosis factor receptors and is therapeutic against inflammatory arthritis in mice (6). Therefore, PGRN is a potential target for the treatment of autoimmune diseases. However, the changes in PGRN expression in pSS patients remain unclear. In the present study, the serum levels of PGRN in the peripheral blood of pSS patients and healthy controls were examined to investigate the possible role of PGRN in the pathogenesis and development of pSS. Materials and methods Patients. In total, 26 newly diagnosed pSS patients were recruited for the study. All patients met the criteria revised by American College of Rheumatology in 1997 for the classification of pSS (7). None of the patients had been treated with immunosuppressive drugs prior to specimen collection. The patients received symptomatic and supportive treatment, as well as immunosuppressive therapy, within a period of 21 consecutive days. Peripheral blood samples were collected from the patients. The control group included 26 healthy volunteers, matching the gender and ages of the pSS patients (female, 25; male, 1; age range, 24-65 years; median age, 44.8±10.96 years). All the subjects signed informed consent forms prior to entering the study. Ethical approval for the research was obtained from the Medical Ethical Committee of Qilu Hospital, Shandong University (Jinan, China). ELISA. Coagulated blood (5 ml) was collected from each patient and control subject prior to and following the administration of prednisone. The blood was centrifuged (5000 x g for 10 min at 4˚C) and the serum specimens were stored at -80˚C. Upregulated expression level of the growth factor, progranulin, is associated with the development of primary Sjögren's syndrome The serum levels of PGRN and the inflammatory factor, interleukin-6 (IL-6), were measured using a commercial ELISA kit (Yonghui Company, Beijing, China), according to the manufacturer's instructions. Quantitative polymerase chain reaction (qPCR). Peripheral blood mononuclear cells were separated using red blood cell lysis buffer (Pharmacia Diagnostics, Uppsala, Sweden) and the total RNA was isolated using TRIzol reagent (Invitrogen Life Technologies, Carlsbad, CA, USA), according to the manufacturer's instructions. An Eppendorf Biophotometer (Brinkmann Instruments, Westbury, NY, USA) was used to determine the RNA concentration, and the concentration was adjusted to 1 µg/ml for reverse transcription. The RNA was reverse-transcribed to form cDNA using a ReverTra Ace qPCR RT kit (Toyobo Corporation, Osaka, Japan). qPCR was performed using the Light Cycler TaqMan Master kit (Toyobo Corporation), according to the manufacturer's instructions, on a Bio-Rad IQ5 detection system (Bio-Rad Laboratories, Hercules, CA, USA). Fluorescence qPCR was performed using SYBR Green (Toyobo Corporation). Each sample was determined in triplicate, and the qPCR products were run on agarose gels to confirm the expected size of the samples. Melting-curve analysis was also performed to ensure the specificity of the products. The relative mRNA expression levels of IL-6 were determined using the comparative Ct method, using arithmetic formulae from the relative expression software tool (Bio-Rad Laboratories). The relative expression of PGRN was calculated using the ΔΔ Ct method. The expression of mRNA was normalized against the expression of the GAPDH gene. Immunoblot analysis. Total proteins were harvested from the blood collected from the patients and control group. The proteins were separated using 10% SDS/PAGE, and subjected to immunoblot analyses. The primary mouse anti-human PGRN monoclonal antibody (clone 296628) was purchased from R&D Systems (Minneapolis, MN, USA), while the primary mouse anti-GAPDH monoclonal and secondary horseradish peroxidase-conjugated goat anti-mouse antibodies were purchased from Santa Cruz Biotechnology, Inc. (Santa Cruz, CA, USA). Bound antibodies were quantified using an enhanced chemiluminescence system (Pierce Biotechnology, Inc., Rockford, IL, USA). The experiments were performed three times. Statistical analysis. Statistical analysis was performed using SPSS 17.0 software (SPSS, Inc., Chicago, IL, USA). The data are presented as the median ± interquartile range and were analyzed with the Mann-Whitney U test. Comparisons among the pre-treated, post-treated and control groups were performed with an independent sample non-parametric test. In addition, correlations between PGRN and IL-6 levels were assessed using Spearman's rank correlation. P<0.05 was considered to indicate a statistically significant difference. mRNA and protein expression levels of PGRN are increased in pSS patients. In this study, 26 newly diagnosed pSS patients were enrolled (Table I). The control group included 26 healthy volunteers, matching the gender and ages of the pSS patients. Among the pSS patients (Table I), 25 were female and one was male, with an age range of 24-65 years (median age, 44.8±10.96 years). The course of the disease from the initial appearance of symptoms to the enrollment in the study varied between 2 and 98 months (median disease course, 20.4±22.0 months). To determine the mRNA expression level of PGRN, peripheral blood mononuclear cells from the healthy controls and pSS patients were separated prior to and following treatment with prednisone. The total mRNA was isolated and the mRNA expression level of PGRN was investigated with qPCR. Using IQ5 software, the data are presented as the fold change in the gene expression normalized against GAPDH. As shown in Fig. 1A, there was a 3.45-fold increase in the relative mRNA expression of PGRN in the pSS patients prior to treatment with prednisone (10 mg) when compared with the healthy controls (P<0.05; Fig. 1A). Following treatment with A B A B prednisone, the mRNA expression level of PGRN showed only a 1.6-fold increase when compared with the healthy controls (P<0.05). The difference in the expression levels before and after treatment with prednisone was statistically significant (P<0.05). In order to determine the protein expression levels of PGRN, peripheral blood mononuclear cells were separated and the total proteins were isolated and determined by immunoblotting. The protein expression level of PGRN was increased in the pSS patients prior to treatment when compared with the healthy controls (P<0.05; Fig. 1B). Following treatment, the protein expression levels of PGRN decreased (Fig. 1B). These results indicated that PGRN expression may be positively associated with the development of pSS. Serum PGRN levels are increased in pSS patients. ELISA was performed to investigate the serum levels of PGRN (Table II), and IL-6 served as the cytokine control. As demonstrated in Table II, the levels of PGRN in pSS patients were significantly upregulated when compared with the healthy controls (P<0.05; Table II). In addition, the difference between the PGRN levels prior to and following prednisone treatment was statistically significant (P<0.05). Following treatment, the serum levels of PGRN were significantly downregulated, but remained higher than the healthy control levels (P<0.05; Table II). The IL-6 levels were higher in pSS patients prior to treatment when compared with the healthy control (P<0.05) and post-treatment patient groups (P<0.05; Table II). Therefore, the PGRN level in the patients was altered based on the development of pSS. PGRN levels correlate with IL-6 in pSS patients. To examine the association between the serum levels of PGRN and the pSS-related inflammatory factor, IL-6, Spearman's rank correlation analysis was performed in pSS patients prior to and following treatment. The results demonstrated that the serum level of PGRN in the pre-treatment group correlated with the level of IL-6 (r=0.617, P=0.001; Fig. 2A). Similarly, the serum level of PGRN in the post-treatment group correlated with the level of IL-6 ( Fig. 2B). Discussion PGRN is an autocrine growth factor containing 7.5 repeats of a cysteine-rich motif in the order, P-G-F-B-A-C-D-E, where P is the half motif (8). PGRN is predominantly expressed in epithelial and immune cells, neurons (9) and chondrocytes (10), and high expression levels of PGRN are found in a variety of human cancer types (11). Several studies have revealed that PGRN plays an important role in a number of pathological processes, including early embryonic development, wound healing and inflammation (12)(13)(14)(15)(16)(17). PGRN also functions as a regulator of cartilage development and degradation (18). PGRN, binding directly to the tumor necrosis factor receptor, is involved in a number of physiological and pathological functions. Upregulation of PGRN has been reported in chemotherapy-induced amenorrhea (19). The present study demonstrated that the levels of PGRN in the peripheral blood were upregulated in the pre-treated and post-treated pSS patients when compared with the healthy controls, indicating that PGRN may be involved in the development of pSS. In the pre-treated pSS patients, the levels of IL-6 were higher compared with the control and post-treated patient groups. In addition, the IL-6 levels were shown to linearly correlate with the levels of PGRN (P<0.05). IL-6 has been identified as an important factor in the pathogenesis of pSS (20), and murine lupus models have demonstrated the involvement of IL-6 in B-cell hyperactivation and the onset of systemic lupus erythematosus (21,22). In conclusion, the present study demonstrated that PGRN is upregulated in pSS patients, indicating a possible role of PGRN in the pathogenesis and development of pSS.
A second order approximation for the light scattering by uniform nanoegg dimers in the quasistatic regime We study the scattering of light by nanoegg dimers using a computational scheme that allows for the efficient simulation nanoegg dimers. We utilize the quasistatic approximation combined with translation relations of spherical harmonic function that allows easier transition between different coordinates systems making the scattering problem easier to solve. We use this scheme to simulate the field enhancement and resonance properties of nanoegg dimers to study the effect of the various parameters on these resonance properties and field enhancement.
<gh_stars>0 import { HttpClient } from '@angular/common/http'; import { Injectable } from '@angular/core'; import { ECPair, networks, payments } from 'bitcoinjs-lib'; import { Observable, of, ReplaySubject } from 'rxjs'; import { environment } from '../../environments/environment'; import { EnderecoModel } from '../models/endereco.model'; /** * Servico responsavel por manipular a carteira. */ @Injectable({ providedIn: 'root' }) export class CarteiraService { // enderecos cadastrados para o minerador private cacheEnderecosMinerador: EnderecoModel[]; // enderecos cadastrados para a carteira private cacheEnderecosCarteira: EnderecoModel[]; constructor( private httpClient: HttpClient, ) { } /** * Lista os enderecos cadastrados para o minerador. */ listarEnderecosMinerador(): Observable<EnderecoModel[]> { // utiliza dados cacheados if( this.cacheEnderecosMinerador ){ return of(this.cacheEnderecosMinerador); } // solicita a lista de enderecos const result = this.httpClient .get<EnderecoModel[]>(`${environment.urlApiServer}/api/v1/enderecos/minerador`); // cachea a resposta recebida result.subscribe(response => { this.cacheEnderecosMinerador = response; }); // retorna o resultado da solicitacao return result; } /** * Lista os enderecos cadastrados para a carteira. */ listarEnderecosCarteira(): Observable<EnderecoModel[]> { // utiliza dados cacheados if( this.cacheEnderecosCarteira ){ return of(this.cacheEnderecosCarteira); } // solicita a lista de enderecos const result = this.httpClient .get<EnderecoModel[]>(`${environment.urlApiServer}/api/v1/enderecos/carteira`); // cachea a resposta recebida result.subscribe(response => { this.cacheEnderecosCarteira = response; }); // retorna o resultado da solicitacao return result; } /** * Gera um novo endereco de maneira aleatorio e registra o mesmo na TetNet. */ gerarNovoEndereco() { // dados do pair de chaves let keyPair = ECPair.makeRandom() // gera um endereco valido para a rede testnet let { address } = payments.p2pkh({ pubkey: keyPair.publicKey, network: networks.testnet }); // retorna o endereco gerado return address; } /** * Salva o endereco infornado na lista de enderecos cadastrados para a carteira. */ salvarNovoEndereco(dados: EnderecoModel): boolean { // consiste novos dados dados.endereco = dados && dados.endereco && dados.endereco.length > 34 ? dados.endereco.substr(0, 34) : dados.endereco; dados.rotulo = dados && dados.rotulo && dados.rotulo.length > 20 ? dados.rotulo.substr(0, 20): dados.rotulo; // salva o endereco this.cacheEnderecosCarteira.push(dados); // notifica sucesso return true; } }
use crate::errors::{ChassisError, ChassisResult}; use crate::model::{Block, ComponentTrait, Module}; use crate::parse::{eq_attr_name, parse_component, parse_module}; use proc_macro2::{Group, Punct, Spacing, TokenStream, TokenTree}; use std::fs::File; use std::{env, fs}; use syn::Item; pub fn export_module(module: Module) -> ChassisResult<()> { let module_json_path = PathBuf::from(env!("OUT_DIR")) .join("chassis-modules") .join(env::var("CARGO_CRATE_NAME").unwrap()) .join(format!("{}.json", module.name)); fs::create_dir_all(module_json_path.parent().unwrap()); serde_json::to_writer(File::create(&module_json_path).unwrap(), &module); eprintln!("Created chassis module at {:?}", module_json_path); Ok(()) }
Blood transfusion in deceased donor kidney transplantation Background Given the unpredictable timing of deceased donor organs and the need for blood transfusion, this study was carried out to determine the rate and risk factors for transfusion in order to identifying a low-risk cohort in the face of a critical blood shortage. Methods This retrospective chart review examined 306 consecutive deceased solitary kidney transplant recipients from January 2006 to August 2012. Results Records show that 80 (26.1%) patients were transfused with a total of 300 units (0.98 units/transplant) during their first hospital stay. Transfusions were higher in patients on warfarin (8/14, 57%, 5.1 units/transplant) and antiplatelet agents (46/136, 33.8%, 1.1 unit/transplant) compared to no anticoagulants (74/156, 16.7%, 0.47 units/transplant). In a multivariable logistic regression analysis warfarin (odd ratio (OR) 8.2, 95% confidence interval (CI) 2.5–27, P=0.001), antiplatelet agents (OR 2.9, 95% CI 1.6–5.3, P=0.001), recipient age ≥55 years (OR 2.2, 95% CI 1.2–3.9, P=0.008), recipient male (OR 0.36, 95% CI 0.2–0.64, P=0.001) and preop hemoglobin ≥115 g/L (OR 0.32, 95% CI 0.18–0.57, P<0.001) were independent predictors of blood transfusion. Lower bleeding cohorts with transfusion rates <5% could not be identified. Conclusion The need for blood is significantly higher in subjects on either warfarin or antiplatelet agents. These patients might be avoided if kidney transplantation is to occur during a critical blood shortage. Unfortunately even patients not on anticoagulation are at some risk. Background The need for blood transfusion after kidney transplantation is not well studied. Rates in one study approached 50% in those receiving deceased donor organs, a rate that seems quite high . The need for blood transfusion with respect to anticoagulation has been addressed in two studies . One reported that patients receiving anticoagulation were not at increased risk of bleeding , the second concluded the use of warfarin preoperatively was not associated with adverse outcomes in a small case-control series . These observations seem counter to the experience at our center. Lastly the Canadian Committee on Blood and Blood Products issued a framework document on rationing blood during a shortage . The document specifically addressed transplantation and requested that centers collect current rates of blood transfusion in order to categorize high-and low-risk procedures and groups. Knowing risks would aid in individualizing informed consent. The document recommended delaying live donation procedures but allowed deceased donation surgery with informed consent. The purpose of this study was to examine the prevalence of blood transfusion in deceased organ donor solitary kidney transplantation at our center, to identify risk factors for transfusion need in particular anticoagulation status, and to examine the level of hemoglobin that triggered transfusion. Ideally a low-risk cohort could be identified that could be transplanted in the face of a critical blood shortage to avoid organ waste. Methods Consecutive deceased donor organ transplant recipients of solitary kidneys were identified in the Program's electronic database from January 2006 until November 2012. Combined organ and living donor transplants procedures were excluded. Data were extracted from our electronic health records which came into existence in January 2006. Approval for this retrospective study was obtained from our institution's research ethics board. All patients received induction with either basiliximab or antithymocyte globulin (ATG) and all received preoperative solumedrol (500 mg) intravenously. ATG was reserved for those who were repeat transplant recipients, deceased cardiac donor recipients, and the highly sensitized. Postoperative patients received oral tacrolimus, mycophenolate, and prednisone. All patients were treated with sulfamethoxazole-trimethoprim for pneumocystis prophylaxis and oral rantidine for gastrointestinal ulcer prophylaxis. Patients on a proton pump inhibitor pre transplant were continued on this agent in place of ranitidine. All patients were to receive subcutaneous heparin 5000 units twice or thrice daily for venous thromboembolism prophylaxis. Patients on warfarin received vitamin K and fresh frozen plasma to reduce their INR to <1.5. However two patients received a prothrombin complex to reduce their INR. No therapy was given to patients on antiplatelet agents. Data extracted included packed red cells transfused, hemoglobin at transfusion, anticoagulation on admission (warfarin, antiplatelet use, or both), erythropoiesis stimulating agents, cause of ESRD, diabetes mellitus, age, gender, delayed graft function (DGF as defined as need for dialysis post transplantation), days in hospital, cold ischemic time, CMV status, HLA sensitization as defined by most recent panel reactive antibody (PRA,%), reversal of anticoagulation therapy (in the case of warfarin treated patients, postoperative prophylaxis). Patient data for transfusions were limited to the first hospitalization or to 30 days whichever came first. Variables associated with transfusion need were examined by binary logistic regression analysis. Variables statistically associated with transfusion in a univariable analysis were studied in a multivariable model. For age and pre-transplantation hemoglobin receiver-operator characteristic curves were used to identify values with the best sensitivity and specificity. Significance was assumed at 5%. Inappropriate transfusion was defined as initiation of therapy for hemoglobin >90 g/L. Statistical analysis was performed using IBM SPSS Statistics software version 20.0. Results There were 306 patients in the cohort. Blood transfusion occurred in 80 (26.1%) patients for a total of 300 units (0.98 units/transplant). Characteristics associated with the need for blood are shown in Table 1. Transfusion rates ( Table 2) were higher in patients on warfarin (8/14, 57%, 5.1 units/transplant) and antiplatelet agents (46/ 136, 33.8%, 1.14 units/transplant) compared to those not on anticoagulants (26/156, 16.7%, 0.47 units/transplant). Overall transfused patients received 3.8±3.8 units. Slightly more than half (44/80) of patients transfused received either 1 (n=14) or 2 (n=30) units. Only 18 (5.9%) patients received ≥5 units and six received ≥10 units. Of the 18 patients requiring 5 or more units 15 were either on antiplatelet agents or warfarin. Therefore only three (1.9%) patients on no anticoagulants, nine (6.6%) on antiplatelet agents, and four (29%) on warfarin required had a large transfusion requirement (≥5 units). Of the patients on antiplatelet agents almost all were on aspirin and only four were on clopidigrel. The mean hemoglobin at transfusion was 68±8 g/L. Only 9 units were transfused for a hemoglobin ≥90 g/L and 21 for a hemoglobin level between 80 and 89 g/L. Therefore 90% of transfused units were for a hemoglobin level <80 g/L. Transfusion decisions were not consistent as 60 patients had nadir hemoglobin levels <80 g/L and were not transfused. Lower bleeding cohorts could be identified however the reduction in blood use was minimal. For example, those on no anticoagulants and male sex reduced the cohort to 83 patients (27%) with a transfusion rate of 10.8% (0.37 units/transplant). Those on no anticoagulants and a hemoglobin ≥115 g/L reduced the cohort to 31% of the overall population sample with a transfusion rate of 12.5% (0.36 units/transplant). Restricting those on no anticoagulants and recipient age <55 years reduced the cohort to 36% of the sample with a transfusion rate of 13.6%. Men, no anticoagulants and hemoglobin >114 g/L represented only 51 patients (16.7% of the cohort) with a transfusion rate of 9.8% (0.26 units/transplant). The lowest transfusion cohort consisted of men <55 years of age, no anticoagulants and hemoglobin ≥115 g/L. These represented only 40 patients (13% of the entire cohort) and 7.5% were transfused. Discussion Based on this analysis patients on anticoagulants especially warfarin are at particularly increased risk of requiring blood transfusion post kidney transplantation. This analysis shows that transfusion is also not uncommon (16.7%) in those not on anticoagulants. Smaller cohorts could be identified but this greatly restricted access without greatly reducing the need for blood. This analysis was restricted to deceased donors since live donation could be delayed during a blood shortage. Transfusion was also higher in male donors (OR 1.9, P=0.014) however this was not included in the analysis since most would want to proceed with the transplantation regardless to avoid organ waste. Donor age was not significantly associated with transfusion. There may be other factors that correlate to transfusion including the presence of heart disease. Unfortunately this was not specifically examined. Those who were on antiplatelet agents were almost certain to have vascular disease or Length of hospital stay (days) 12.5 ± 10.2 18.7 ± 13.7 13.9 ± 12.9 10.6 ± 5.9 0.001 be at high risk for cardiovascular events. Older patients were more likely to be transfused and this might have been due to a perceived higher risk of vascular disease. Patients at high risk of cardiovascular events might also be avoided during a critical blood shortage but this was not specifically examined. The frequency of blood transfusion was greater than appreciated but in keeping with other studies. In a single center from the US, Scornik et al. found rates of transfusion to be 51% of deceased donor kidney recipients and 30% of live donor recipients . In our study 26% of deceased donors and 14% of 209 live donor kidney recipients over the same time period (data not shown) were transfused. In another single center US study, 25% of kidney recipients were transfused. A UK study reported 45% of patients on warfarin and 29% of non-warfarin-treated patients required blood . In aggregate our transfusion rates appear to the same or lower than these reports. The impact of anticoagulation of transfusion rates was analyzed in two of the above studies. The UK study as noted above reported that those on warfarin were at no increased risk (45% versus 29%) . However it is not clear if the controls included subjects on antiplatelet agents and given the small sample size a significant difference may have been missed. In the larger study by Eng et al. pre transplant anticoagulation was not associated with the need for transfusion. However patients receiving postoperative heparin required more blood. Their study may have included live and deceased donor recipients . Analyzing both live and deceased donors may have reduced the likelihood of detecting an effect of anticoagulation on transfusion rates in deceased donors. In an analysis of our live donors anticoagulation with antiplatelet agents or warfarin was also not associated with bleeding (data not shown). More of our patients were on antiplatelet agents (136 of 306) compared to the Eng study (69 of 327). In the Eng study those on antiplatelet agents who were transfused tended to receive on average more units (3.5 units for clopidogrel, 3.3 units for ASA, and 2.5 units for no anticoagulation). Differences between centers may well be an important determinant of transfusion practice and rates. It is not clear how appropriate our transfusion practice approached the ideal. A recent meta-analysis of blood transfusion practice did not find any adverse effects of a restrictive policy of blood transfusion compared to a liberal policy . Unfortunately none of the 19 studies were of kidney transplant recipients but did include a variety of subjects with acute bleeding in the context of surgery. Adverse effects, including 30-day mortality, stroke, myocardial infarction, and length of stay, showed a trend to be less in the restricted arm. For the most part our practice approached the restrictive spectrum (mean hemoglobin at transfusion was 68 g/L) however 10% were transfused at more liberal rates (hemoglobin >90 g/L). A more restrictive policy may have reduced the need of transfusion in our center without compromising outcomes. The effects of blood transfusion on new HLA antibody formation appears to be low in the short term . However the cumulative effect of transfusions may compromise outcomes in those needing a repeat transplant . Conclusions Given the above, restricting access to deceased kidney transplantation to those not on anticoagulants might be prudent during a critical blood shortage. Patients undergoing transplantation during a blood shortage should be informed of their risk. At our center the risk is small (2%) for a large bleed but modest (16.7%) for the probability of needing some blood for those not on anticoagulants. The rates are considerably higher for those on antiplatelet agents and warfarin. Each center should review their transfusion practice and know their transfusion rates to better inform their patients.
#include <cstdio> #include <cstring> #include <algorithm> #include <cmath> #include <map> #include <vector> using namespace std; int main() { int n,l,v1,v2,k; scanf("%d%d%d%d%d",&n,&l,&v1,&v2,&k); int m = ceil(n / (double)k); double tot1 = (double)l * (v1 + v2); double tot2 = (2 * v1 * (double)(m - 1) + v1 + v2); double tot = tot1 / tot2; double ans = (tot / v2) + (l - tot) / (double)v1; printf("%.12lf\n",ans); return 0; }
def register_callback(self, callback): callback(self._severity) self.logger_callbacks.append(callback)
/** * TODO: in the future, make it independent from PredicateImpl */ public class AtomPredicateImpl extends PredicateImpl implements AtomPredicate { protected AtomPredicateImpl(String name, ImmutableList<TermType> expectedBaseTypes) { super(name, expectedBaseTypes); } }
/* * Algorithm for spanNot()==span(SpanCondition.NOT_CONTAINED) * * Theoretical algorithm: - Iterate through the string, and at each code point boundary: + If the code point there * is in the set, then return with the current position. + If a set string matches at the current position, then * return with the current position. * * Optimized implementation: * * (Same assumption as for span() above.) * * Create and cache a spanNotSet which contains all of the single code points of the original set but none of its * strings. For each set string add its initial code point to the spanNotSet. (Also add its final code point for * spanNotBack().) * * - Loop: * + Do spanLength=spanNotSet.span(SpanCondition.NOT_CONTAINED). * + If the current code point is in the original set, then return the current position. * + If any set string matches at the current position, then return the current position. * + If there is no match at the current position, neither for the code point * there nor for any set string, then skip this code point and continue the loop. This happens for * set-string-initial code points that were added to spanNotSet when there is not actually a match for such a set * string. * * @return the length of the span */ private int spanNot(CharSequence s, int start, int length) { int pos = start, rest = length; int i, stringsLength = strings.size(); do { i = spanNotSet.span(s.subSequence(pos, pos + rest), SpanCondition.NOT_CONTAINED); if (i == rest) { return length; } pos += i; rest -= i; int cpLength = spanOne(spanSet, s, pos, rest); if (cpLength > 0) { return pos - start; } for (i = 0; i < stringsLength; ++i) { if (spanLengths[i] == ALL_CP_CONTAINED) { continue; } String string = strings.get(i); int length16 = string.length(); if (length16 <= rest && matches16CPB(s, pos, length, string, length16)) { return pos - start; } } pos -= cpLength; rest += cpLength; } while (rest != 0); return length; }
// Error returns error in a string representation. func (err Error) Error() string { var buf strings.Builder defer buf.Reset() buf.WriteString(errorsPrefix + " ") switch err { case ErrTooManyRequests: buf.WriteString("too many requests (limit has been exceeded)") case ErrUnauthorized: buf.WriteString("unauthorized (invalid credentials)") case ErrBadRequest: buf.WriteString("bad request (empty file or wrong format)") default: buf.WriteString("unknown error") } return buf.String() }
<reponame>snamiki1212/tweet2csv<filename>src/convert.ts import { Parser } from "json2csv"; import { writeFile } from "fs"; import { config } from "../config"; const outputFile = "./output"; const excludingIDs = config.excludingIDs; const squash = (list: any[]) => list.map((obj) => obj.tweet); const addCustomFields = (list: any[]) => list.map((it: any) => { const hasAtMark = it.full_text.includes("@"); const hasHTTP = it.full_text.includes("http"); const isMoreThan139 = Array.from(it.full_text).length >= 139; const isExcludedID = excludingIDs.includes(Number(it.id)); const created_at_unixtime = new Date(it.created_at).getTime() / 1000; // REF: https://stackoverflow.com/questions/11893083/convert-normal-date-to-unix-timestamp const SHOULD_VIEW = !hasAtMark && !hasHTTP && isMoreThan139 && !isExcludedID; return Object.assign(it, { created_at_unixtime, hasAtMark, hasHTTP, isMoreThan139, isExcludedID, SHOULD_VIEW, }); }); const getFields = (obj: any) => Object.keys(obj); const jsonrize = (list: any[]) => list.reduce((prev, curr) => ({...prev, [curr.id]: curr}), {}) export const main = (operate: "csv" | "json", tweets: any[]) => { try { // shape before CSVrize const squashed = squash(tweets); const data = addCustomFields(squashed); // build csv from json let buildObject; if (operate === "csv") { const fields = getFields(data[0]); const parser = new Parser({ fields }); buildObject = parser.parse(data); } else if (operate === "json") { const json = jsonrize(data) buildObject = JSON.stringify(json); } else { throw new Error("invalid operate param."); } // write writeFile(`${outputFile}.${operate}`, buildObject, (err: any) => { if (err) throw err; }); } catch (e) { console.error(e); } };
def create_kg(kg, triples): for (s, p, o) in tqdm(triples): s = clean_url(s) o = clean_url(o) p = clean_url(p) s_v = Vertex(str(s)) o_v = Vertex(str(o)) p_v = Vertex(str(p), predicate=True, _from=s_v, _to=o_v) kg.add_vertex(s_v) kg.add_vertex(p_v) kg.add_vertex(o_v) kg.add_edge(s_v, p_v) kg.add_edge(p_v, o_v)
/** * Update provided {@link MetadataLogger} log level settings from PU property. * Logger is updated only when {@code PROCESSOR} logging category level * is set properly in corresponding PU property. * * @param logger logger to be updated * @param persistenceUnit persistence unit with properties */ static void updateMetadataLogger(final MetadataLogger logger, final PersistenceUnit persistenceUnit) { String levelStr = persistenceUnit.getPersistenceUnitProperty(LogCategory.PROCESSOR.getLogLevelProperty()); if (levelStr == null) { levelStr = persistenceUnit.getPersistenceUnitProperty(CANONICAL_MODEL_GLOBAL_LOG_LEVEL); } if (levelStr != null) { final LogLevel level = LogLevel.toValue(levelStr); if (level != null) { logger.getSession().getSessionLog().setLevel(level.getId(), LogCategory.PROCESSOR.getName()); } } }
import java.util.Scanner; import java.util.stream.Stream; public class Main { public static void main(String[] args) { Scanner sc = new Scanner(System.in); // int[] S = Stream.of(sc.nextLine().split("")).mapToInt(Integer::parseInt).toArray(); String[] S = sc.nextLine().split(""); int v_mae = Integer.parseInt(S[0]+S[1]); int v_usiro = Integer.parseInt(S[2]+S[3]); boolean yymm_flg = false; boolean mmyy_flg = false; if(0 <= v_mae && v_mae <= 99 && 1 <= v_usiro && v_usiro <= 12 ) { yymm_flg=true; } if(0 <= v_usiro && v_usiro <= 99 && 1 <= v_mae && v_mae <= 12 ) { mmyy_flg=true; } if(yymm_flg && !mmyy_flg) { System.out.print("YYMM"); } else if(!yymm_flg && mmyy_flg) { System.out.print("MMYY"); } else if(yymm_flg && mmyy_flg) { System.out.print("AMBIGUOUS"); } else { System.out.print("NA"); } } }
The effect of synthesized cartilage tissue from human adipose-derived mesenchymal stem cells in orthopedic spine surgery in patients with osteoarthritis. Osteoarthritis is a joint disease that causes degeneration of articular cartilage and involvement of subcutaneous bone and inflammation of surrounding tissues. It can affect any joints, but the most common joints are the joints of the hands, feet, knees, thighs, and spine. Osteoarthritis patients need surgery in acute cases. The use of methods that increase the efficiency of this surgery has always been considered by researchers and surgeons. For this purpose, in the current study, the effect of synthesized cartilage tissue from human adipose-derived mesenchymal stem cells was considered in orthopedic spine surgery in patients with osteoarthritis. Thirty patients over the age of 60 who had acute spinal osteoarthritis and required surgery were selected. The pellet culture system of human adipose-derive mesenchymal stem cells of each patient was used to construct cartilage tissue. For 15 of them, in addition to implants, cartilage grafts were transplanted during surgery. All patients were monitored by the Oswestry Disability Index questionnaire, for one year. In general, the results showed that over time, patients with transplanted cartilage tissue and implants were in a better condition than patients who underwent only implant surgery.
/** * Whether or not the Operation is done and result can be retrieved with * get(). * * The most common way to wait for this OperationFuture is to use the get() * method which will block. This method allows one to check if it's complete * without blocking. * * @return true if the Operation is done */ public boolean isDone() { assert op != null : "No operation"; return latch.getCount() == 0 || op.isCancelled() || op.getState() == OperationState.COMPLETE; }
/** * @brief Display the TEST Main Menu choices on HyperTerminal * @param None. * @retval None. */ void FW_APP_PrintMainMenu(void) { printf("\r\n=================== Main Menu ============================\r\n\n"); printf(" Download a new Fw Image ------------------------------- 1\r\n\n"); printf(" Test Protections -------------------------------------- 2\r\n\n"); printf(" Test SE User Code ------------------------------------- 3\r\n\n"); printf(" Multiple download ------------------------------------- 4\r\n\n"); printf(" Validate a FW Image------------------------------------ 5\r\n\n"); printf(" Selection :\r\n\n"); }
// get the pointer of tinyRAMInstance with a given ASM program. func GetTinyRAMInstance(asmPath string, numRegister uint64, primary, auxiliary []uint64) (*tinyRAM, error) { ps, err := parseRawAsm(asmPath) if err != nil { return nil, err } tr := tinyRAM{ NumRegister: numRegister, Prog: ps, PrimaryInput: primary, AuxiliaryInput: auxiliary, Register: make([]uint64, numRegister), Memory: make([]uint64, 64), } return &tr, nil }
# | # _` | __ \ _` | __| _ \ __ \ _` | _` | # ( | | | ( | ( ( | | | ( | ( | # \__,_| _| _| \__,_| \___| \___/ _| _| \__,_| \__,_| import sys import math def read_line(): return sys.stdin.readline()[:-1] def read_int(): return int(sys.stdin.readline()) def read_int_line(): return [int(v) for v in sys.stdin.readline().split()] def read_float_line(): return [float(v) for v in sys.stdin.readline().split()] def gcd(a,b): if a == 0: return b return gcd(b % a, a) def printDivisors(n): i = 1 a,b = 0,0 while i <= math.sqrt(n): if (n % i == 0) : if gcd(i,n//i)==1: a = i b = n//i i = i + 1 return (a,b) x =read_int() ans = printDivisors(x) print(*ans)
<filename>dataset_factory.py from sklearn.model_selection import train_test_split import numpy as np import torch from torch.utils.data import Dataset from torchvision import transforms import h5py from skimage import io, transform import os import matplotlib.pyplot as plt def dataset_factory(use_images=True, image_folder="raw_data\\mapbox_api", transform=True, data_augment_angle=10): #Longitude,Latitude,Speed,Distance,Distance_x,Distance_y,PCI_64,PCI_65,PCI_302 selected_features = [0, 1, 3, 4, 5, 6, 7, 8] # # ['SINR', 'RSRP', 'RSRQ', 'Power'] selected_targets = [1] dataset_path='dataset' features = np.load("{}\\training_features.npy".format(dataset_path)) targets = np.load("{}\\training_targets.npy".format(dataset_path)) test_features = np.load("{}\\test_features.npy".format(dataset_path)) test_targets = np.load("{}\\test_targets.npy".format(dataset_path)) target_mu = np.load("{}\\targets_mu.npy".format(dataset_path)) target_std = np.load("{}\\targets_std.npy".format(dataset_path)) features_mu = np.load("{}\\features_mu.npy".format(dataset_path)) features_std = np.load("{}\\features_std.npy".format(dataset_path)) images = np.load("{}\\train_image_idx.npy".format(dataset_path)) test_images = np.load("{}\\test_image_idx.npy".format(dataset_path)) features = features[:, selected_features] test_features = test_features[:, selected_features] features_mu = features_mu[selected_features] features_std = features_std[selected_features] targets = targets[:, selected_targets] test_targets = test_targets[:, selected_targets] target_mu = target_mu[selected_targets] target_std = target_std[selected_targets] # Data augmentation if transform: composed = transforms.Compose([transforms.ToPILImage(), transforms.Grayscale(), transforms.RandomAffine(data_augment_angle, shear=10), transforms.ToTensor()]) else: composed = None # Dataset train_dataset = DrivetestDataset(features, targets, images, target_mu, target_std, features_mu, features_std, use_images, image_folder, transform=composed) #valid_dataset = DrivetestDataset(images, features, targets, valid_idx, target_mu, target_std, features_mean, features_std, use_images, image_folder) test_dataset = DrivetestDataset(test_features, test_targets, test_images, target_mu, target_std, features_mu, features_std, use_images, image_folder, transform=transforms.Compose([transforms.ToPILImage(), transforms.Grayscale(), transforms.ToTensor()])) return train_dataset, test_dataset class DrivetestDataset(Dataset): def __init__(self, features, targets, images, target_mu, target_std, feature_mu, feature_std, use_images, image_folder, transform=None): self.features = features self.targets = targets self.image_idx = images self.target_mu = target_mu self.target_std = target_std self.feature_mu = feature_mu self.feature_std = feature_std self.distances = (self.features[:,2] * self.feature_std[2])+self.feature_mu[2] self.targets_unnorm = (self.targets * self.target_std)+self.target_mu self.use_images = use_images self.image_folder = image_folder self.transform = transform def get_811Mhz_idx(self): return np.argwhere(np.asarray(self.features[:,7] != 1)) def get_2630Mhz_idx(self): return np.argwhere(np.asarray(self.features[:,7] == 1)) def __getitem__(self, index): idx = self.image_idx[index] X = torch.from_numpy(self.features[index]).float() # Features (normalized) if self.use_images: if self.image_folder == None: #images are then pointer to hdf5 image = self.image_idx[index] else: img_name = os.path.join(self.image_folder, "{}.png".format(idx)) image = io.imread(img_name) image = image / 255 A = torch.from_numpy(image).float().permute(2,0,1) else: A = torch.tensor(0) y = torch.from_numpy(self.targets[index]).float() # Target dist = torch.abs(torch.tensor(self.distances[index])).float().view(1) # Unormalized distance dist = dist * 1000 # to meters if self.use_images: if self.transform: A = self.transform(A) return X, A, y, dist def __len__(self): return len(self.features) if __name__ == '__main__': train, test = dataset_factory() data = train.__getitem__(1) fig = plt.figure(figsize=(5,5)) plt.imshow(data[1].permute(1,2,0).numpy()) plt.show()
/*Static Dynamic Programming */ class Solution { public: int countNumbersWithUniqueDigits(int n) { if(n == 0) return 1; static vector<int> dp= {10}; if(dp.size()<=n){ for(int i = dp.size(); i<n; i++){ int num = 9, j = i; dp.push_back(1); while(j>=0){ dp[i] *= num; if(j<i) num--; j--; } dp[i] += dp[i-1]; } } return dp[n-1]; } }
<reponame>goncaloperes/Babylon.js import "./engine.alpha"; import "./engine.computeShader"; import "./engine.cubeTexture"; import "./engine.debugging"; import "./engine.dynamicBuffer"; import "./engine.dynamicTexture"; import "./engine.multiRender"; import "./engine.query"; import "./engine.rawTexture"; import "./engine.readTexture"; import "./engine.renderTarget"; import "./engine.renderTargetCube"; import "./engine.storageBuffer"; import "./engine.uniformBuffer"; import "./engine.videoTexture";
package de.tuberlin.cit.livescale.messaging.messages; import java.util.Map; import java.util.UUID; import de.tuberlin.cit.livescale.messaging.AbstractMessage; /** * Test-Message * * @author louis * */ public class TestMessage extends AbstractMessage { private static final String FIELD_SIX = "FIELD_SIX"; private static final String FIELD_FIVE = "FIELD_FIVE"; private String fieldFive, fieldSix; @Override public void fromMap(Map<String, Object> messageMap) { super.fromMap(messageMap); this.setFieldFive((String) messageMap.get(FIELD_FIVE)); this.setFieldSix((String) messageMap.get(FIELD_SIX)); } /* * (non-Javadoc) * * @see de.tuberlin.cit.livescale.messaging.Message#getUUID() */ @Override public UUID getUUID() { return UUID.randomUUID(); } /* * (non-Javadoc) * * @see de.tuberlin.cit.livescale.messaging.Message#toMap(java.util.Map) */ @Override public void toMap(Map<String, Object> messageMap) { super.toMap(messageMap); messageMap.put(FIELD_FIVE, this.getFieldFive()); messageMap.put(FIELD_SIX, this.getFieldSix()); } /** * Returns the fieldFive. * * @return the fieldFive */ public String getFieldFive() { return this.fieldFive; } /** * Sets the fieldFive to the specified value. * * @param fieldFive the fieldFive to set */ public void setFieldFive(String fieldFive) { this.fieldFive = fieldFive; } /** * Returns the fieldSix. * * @return the fieldSix */ public String getFieldSix() { return this.fieldSix; } /** * Sets the fieldSix to the specified value. * * @param fieldSix the fieldSix to set */ public void setFieldSix(String fieldSix) { this.fieldSix = fieldSix; } }
<reponame>carped99/mago3d-converter<filename>tiles3d/src/types/tileset.rs use serde::{Deserialize, Serialize}; use crate::types::asset::Asset; use crate::types::extension_schema_json::ExtensionSchemaJson; use crate::types::extras_schema_json::ExtrasSchemaJson; use crate::types::properties::Properties; use crate::types::tile::Tile; #[derive(Clone, PartialEq, Debug, Deserialize, Serialize)] pub struct Tileset { pub asset: Asset, #[serde(skip_serializing_if = "Option::is_none")] pub extensions: Option<ExtensionSchemaJson>, #[doc = " Names of 3D Tiles extensions required to properly load this tileset."] #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "extensionsRequired")] pub extensions_required: Option<Vec<String>>, #[doc = " Names of 3D Tiles extensions used somewhere in this tileset."] #[serde(skip_serializing_if = "Option::is_none")] #[serde(rename = "extensionsUsed")] pub extensions_used: Option<Vec<String>>, #[serde(skip_serializing_if = "Option::is_none")] pub extras: Option<ExtrasSchemaJson>, #[doc = " The error, in meters, introduced if this tileset is not rendered. At runtime, the geometric "] #[doc = " error is used to compute screen space error (SSE), i.e., the error measured in pixels."] #[serde(rename = "geometricError")] pub geometric_error: f64, #[doc = " A dictionary object of metadata about per-feature properties."] #[serde(skip_serializing_if = "Option::is_none")] pub properties: Option<::std::collections::BTreeMap<String, Properties>>, #[doc = " The root tile."] pub root: Tile, }
/** * Class : RDBWorkitemLogger * Usage : */ public class RDBWorkitemLogger implements WorkitemLogger<RDBWorkitemLogger> { private ConcurrentLinkedQueue<String> buffer = new ConcurrentLinkedQueue<>(); private boolean isFlushed = false; public RDBWorkitemLogger append(String logLine) { this.buffer.add(logLine); return this; } public String consumeOne() { return this.buffer.poll(); } public String dumpMultilineString() { return this.dumpString(""); } public String dumpString(String delimiter) { return String.join(delimiter, this.buffer); } public int size() { return this.buffer.size(); } public void clear() { this.buffer.clear(); } }
package services; import java.util.Collection; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.*; import org.springframework.security.authentication.TestingAuthenticationToken; import org.springframework.security.core.context.SecurityContext; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.security.core.userdetails.UserDetails; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.transaction.TransactionConfiguration; import org.springframework.transaction.TransactionSystemException; import org.springframework.transaction.annotation.Transactional; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.util.Assert; import com.mchange.util.AssertException; import domain.Assessment; import domain.CreditCard; import domain.Innkeeper; import domain.Lodge; import domain.Pilgrim; import domain.Request; import domain.Route; import domain.Stage; import security.LoginService; import utilities.AbstractTest; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(locations = { "classpath:spring/datasource.xml", "classpath:spring/config/packages.xml" }) @Transactional @TransactionConfiguration(defaultRollback = false) public class AssessmentServicePositiveTest extends AbstractTest { // Service under test ------------------------------------------------ @Autowired private AssessmentService assessmentService; // Tests -------------------------------------------------------------- @Test public void testPositiveCreateAssessment(){ authenticate("pilgrim2"); Assessment assessment; Integer lengthRating; Integer difficultyRating; assessment=assessmentService.create(36); lengthRating=5; difficultyRating=5; assessment.setLengthRating(lengthRating); assessment.setDifficultyRating(difficultyRating); assessmentService.save(assessment); unauthenticate(); } }
MIA > Archive > Kautsky > Social Revolution Karl Kautsky The Social Revolution Volume II On the Day after the Social Revolution (Part 2) The Organization of the Productive Process The application of the two above mentioned methods of the trust to production have not exhausted the resources of the proletarian regime in relation to the increase of production. The productive process considered as a continuous transaction, as a reproductive process demands an undisturbed continuation, not simply of production, but also of circulation. If production is to go on without interruption it is necessary not simply that there be laborers for the creation of products, but it is also necessary that there be no break in the securing of raw materials and essentials of production, the necessary tools and machines, and the means of sustenance for the laborers, and that no interruption occur and that the finished product find a sale. A stoppage in circulation signifies an economic crisis. It stops in some cases because too much is produced of some wares. In this case the industrial plants from which these products came cannot further function in their full capacity because of the lack of sale for their products. They receive no money for their products and the result of this is they lack the means to buy raw materials, to pay wages and so forth. But crises can also occur because too little of many of certain wares have been produced, as for instance was the case in the crisis of the English cotton industry at the time of the war of the Rebellion in the United States, which for some time greatly disturbed the production of cotton. The crises are the worst scourges of the modern productive system. To abolish them is one of the most important tasks of a proletarian regime. This can be done only through the systematic regulation of production and circulation as well as of re-production. It has already been admitted that the object of socialism is the organization of production. But a portion of this problem is already solved by capital in that it substitutes for a number of little independent industries the organization of production into one great industry in which thousands of laborers are employed. The trusts have already accomplished the organization of whole branches of industry. What, however, only a proletarian regime can accomplish is the systematic regulation and circulation of products, the exchange between industry and industry, between producers and consumers, in which the idea of consumption is taken in its highest sense, so as to include not simply personal but productive consumption. The weaver for example consumes yarn in productive consumption while the piece of bread that he eats is included in personal consumption. The proletarian can only accomplish this regulation of the circulation of products by the abolition of private property in industry, and it not only can do this but it must do it if the process of production is to proceed under its direction and its regime is to be permanent. It must fix the height of production of each individual social productive plant according to the basis calculated upon the existing productive power (laborers and means of production) and of the existing needs, and see to it that each productive plant has not only the necessary laborers but also the necessary means of production and that the necessary products are delivered to the consumers. Is not this task however insoluble in the great modern States? It would presuppose that in Germany the State is to become the director of production of two million productive plants and to act as medium for the circulation of this product, which will come to it partially in the form of means of production and partially as means of consumption to be distributed to sixty million consumers, of which each one has a special and changing need. The task appears overwhelming if one does not proceed from the point of view of regulating the necessities of humanity from above according to a very simple pattern and assigning to each one, barrack fashion, his portion, which would mean the lowering of modern civilization to a much lower stage. Are we destined then to come to a barrack or prison-like State? Certainly the problem is not simple. It is the most difficult which will come to the proletarian regime and will furnish it with many hard nuts to crack. But its difficulties must not be exaggerated. In the first place it must be remembered that we are not compelled to create out of nothing over night a complete organization of production and circulation. There is one existing at present of a certain character, or otherwise the existence of the present society would be impossible. The question is simply to transform this organization, which has hitherto been an unconscious one going on behind the shoulders of those engaged in it with friction, sorrow and woe, bankruptcies and crises, under the operation of the law of value, ever being readjusted, into a conscious system in which a previous calculation of all modifying factors will take the place of the retroactive corrections through the play of supply and demand. There is a proportionality between the different branches of labor to-day even though it is wholly incomplete and incompetent; it is necessary, not to introduce, but rather to make complete and permanent. As with money and with prices it is necessary to connect with that which is historically descended and not to build everything from the ground anew; but only to broaden out at some points or to restrict others and to formulate more clearly the loose relations. This problem is considerably restricted by the fact already discussed that the concentration of production in the most perfect productive plants has already perceptibly decreased the number of industries. Of the 2,146,972 businesses which constituted the industry of the German Empire in 1895 there were only 17,941 great businesses having more than 50 laborers (and these contained three million laborers out of the total number of eight million industrial workers). To be sure I do not assert that only these great industries will be retained in activity. To attempt to give absolutely exact figures of a future condition would be absurd. All the numbers herewith printed have simply the purpose of illustrating the problems which arise and not of narrowly setting forth how things will be formulated in reality. The relation of two million industrial plants to 18,000 great industries shows that the number of industrial plants would be perceptibly decreased under a proletarian regime. But the difficulties of the organization of production and circulation can be diminished in other directions as well as by a decrease in the number of plants. Production can be divided into two great fields; those in which the production is for consumption and those in which production is for production. The production of means of production, thanks to the extensive division of labor, has become to-day the most important portion of production and it continues to increase steadily. Scarcely a single article of consumption comes from the hand of a single producer, but all run through a number of productive processes so that those who finally fit it for our use are only the last in a long row of producers. The production of articles for consumption and for the means of production have a wholly different character. The production for further production belongs to the domain of gigantic industries such as the iron industry, mining, etc. These are all highly organized in owner’s agreements, cartels, etc. But even among the users of these means of production, operator’s agreements are already very extensive. In most cases in this held to-day the individual operator does not deal with individual operators, but union of operators with union of operators, industrial branch deals with industrial branch, and those places where the union of operators is least developed are just the regions in which there are relatively few producers and few consumers dealing with each other. For consumption is here not by an individual but by a whole industry. In the manufacture of spinning and weaving machines, for example, there were in 1895 1,152 businesses with 17,047 laborers. Of these, however, there were 774 industries which had only 1,474 laborers and were scarcely to be considered. Among the great industries there were only 73 with 10,755 laborers. Opposed to these were 200,000 textile industries (not simply spinners and weavers) whose numbers, as we have seen, may be reduced to a thousand or perhaps to a hundred. On the one side there remained after the completion of the concentration of production in the most perfect industries perhaps 50 manufacturers of machinery and on the other side 2,000 spinning and weaving establishments. Is it then so impossible that the former should agree with the latter in regard to the demand for machines, and that their production should be systematically regulated? With this relatively small number of purchasers and consumers it is easily conceivable that in the sphere of the production of the means of production to-day, production for the open market has already disappeared and production for orders, that is to say, regulated, thoughtful production and circulation has taken its place. The production of articles for consumption has another character. To be sure we have here the gigantic industries (sugar factories and breweries), but as a general thing the little industry is still generally dominant. Here it is necessary to satisfy the individual needs of the market, and the small industry can do this better than the large. The number of productive plants is here large and would not ordinarily be capable of reduction as in the production of means of production. Here also production for the open market still rules. But because of the greater number of consumers this is much more difficult to supervise than is production for production. The number of operators’ agreements is fewer here. The organization of the production and circulation of all articles of consumption accordingly offers much greater difficulties than that of the means of production. Here also we must again distinguish the two forms, namely: the production of necessary articles of consumption, and of luxuries. The demand for necessary articles of consumption ordinarily shows rather small fluctuations. It is quite definite. Day in and day out one needs the same amount of flour, bread, meat and vegetables. Year in and year out there is little change in the demand for boots and linen. On the other hand, the demand for means of consumption changes the more readily the more these take on the character of unnecessary luxuries, whose possession or use is agreeable but not indispensable. Here consumption is much more whimsical, but when we look closer we see that this really proceeds much less from the purchasing individual than from the industry itself. Changes in fashion, for example, springs not so much from the changes in taste of the public as from the necessity of the producer to render impossible of further use the old wares which have already been sold, in order to thereby appeal to consumers to purchase new wares. The new and modern goods must accordingly be very strikingly distinguished from the old. Next to the restlessness which lies in the very nature of the modern manner of production, this strife of the producer is the main cause of the rapid changes of fashion. It is this which first produces the new fashions and then makes them necessary to the public. The variations in demand for articles of consumption, especially of luxuries, are influenced much more by the variations in the income of the consumers than by variations in taste. These last variations again, so far as they do not remain isolated but really have a wide extension through society, so as to perceptibly influence consumption, arise from the contrast between prosperity and crises, from the contrast between the strong demand for labor and the increase of enforced idleness. When, however, we investigate the source of these variations we find that they spring from the held of the production of the means of production. It is universally known and recognized that to-day it is the iron industry especially which gives rise to crises. The alternation between prosperity and crises and therewith the great variations in the demand for articles of consumption also arises out of the sphere of the production of the means of production. In the other sphere, as we have already seen, the concentration of industry and the organization of production is already so far developed that it has made possible a really complete organization of production and circulation. Stability in the production of means of production carries with it stability in demand for means of consumption, and this can be easily established by the State without direct regulation of consumption. Only one phase of the disturbances in circulation which spring from production is of importance to the proletarian regime, – only under-production, never over-production. To-day the latter is the principal cause of crises, for the greatest difficulty at present is the sale, or getting rid of the product. The purchase of goods, the procuring of the products that one needs, ordinarily causes very little complaint from those lucky ones who have the necessary small change in their pockets. Under proletarian regime this relation mould be reversed. There will be no need of anxiety regarding the disposal of the products when completed. Private individuals will not be purchasing for sale to other private individuals, but society will be purchasing for its own necessities. A crisis can then only arise when a sufficient amount of a number of products has not been produced to supply the need either for production or personal consumption. If accordingly there are here and there, or even anywhere, too much produced this will signify only a wasting of labor power and a loss for society, but will not hinder the progress of production and consumption. It will be the principal anxiety of the new regime to see to it that there is not insufficient production in any sphere. Accordingly it will, to be sure, also take care that no labor power is wasted in superfluous production, for every such waste signifies an abstraction from all the others and an unnecessary extension of the labor time. Top of the page The Remnants of Private Property in the Means of Production We have seen that the proletarian regime would make short work of the smaller businesses where they represent the little, undeveloped plants, not only in industry but also in exchange. The efforts referred to above for the organization of circulation would also lead to the greatest possible abolition of the little middlemen by crushing them out, partially through co-operatives for consumption, partly through extension of municipal activity. Superintendence and organization of the productive processes will be much easier when it is not necessary to deal with countless operators, but rather with only a few organizations. Besides the work of the middle-men the direct producers of articles of consumption for local necessity would fall to the cooperatives and municipalities – for example, bakeries, milk and vegetable production and erection of buildings. But it is not to be expected that all small private industries will disappear in this manner. This will be specially true in agriculture. To be sure those agricultural plants which have already become capitalist industries would fall with the wage system and be transformed into national, municipal or co-operative businesses. Therewith a large number of the little competing farmers of to-day would cease to exist and go as laborers into the industrial or agricultural great industry, because they could there secure a respectable existence. But we may be sure that some farmers would always remain with their own family, or at the most with one assistant, or maid that will be reckoned as part of the family, and would continue their little industry. With the present conservative nature of our farmers it is highly probable that a number of them would continue to work in the present manner. The proletarian governmental power would have absolutely no inclination to take over such little businesses. As yet no socialist who is to be taken seriously has ever demanded that the farmers should be exappropriated, or that their goods should be confiscated. It is much more probable that each little farmer would be permitted to work on as he has previously done. The farmer has nothing to fear from a socialist regime. Indeed it is highly probable that these agricultural industries would receive considerable strengthening through the new regime. It would bring an abolition of militarism, of burdens of taxation, bring self-government and nationalism of schools and road taxes, an abolition of poor relief and perhaps also a lowering of mortgage burdens, and many other advantages. We have also seen that the victorious proletariat has every reason to increase the amount of products, and among those products for which the demand would be increased, the most important are agricultural products. In spite of all the refutation of the theory of increasing misery there is still much hunger to satisfy, and this fact alone justifies us in the opinion that the raising of wages mould show itself above all in an increase of the demand for agricultural products. The proletarian regime would also have the greatest interest in increasing the production of the farmers and it would have powerful forces at its disposal for this purpose. Its own interests demand that the agricultural industry should be brought to a higher stage through the care of animals, machines and fertilizers, through improvement of the soil, etc. It mould in this manner assist in Increasing agricultural products, including those in the industries not yet socialized. But here, as well as in every sphere, conditions would make it necessary to simplify the circulation process by substituting for a large number of private individuals trading their products with one another a few organizations united for economic purposes. The State would much prefer instead of selling breeding animals, machines and fertilizers to the individual farmers to deal with the farmers’ societies and co-operatives. These societies and co-operatives would find as the purchasers of their products no longer private middle-men, but either co-operatives, unions for consumption, municipalities or national industries (mills, sugar factories, breweries and such like). So here also the private industry would continually recede before the social, and the latter would finally transform the agricultural industry itself and permit the development of many such industries through the co-operative or municipal co-operative into one great social industry. The farmers will combine their possessions and operate them in common, especially when they see how the social operation of the expropriated great industry proves that with the same expenditure of labor perceptibly more can be produced, or that with the same number of products the laborers can be granted considerably more leisure than is possible in the small industry. If the small industry is still able to assert itself in agriculture this is due not a little to the fact that it can pump more labor out of its laborers than the great industry. It is undeniable that farmers work harder than the wage workers of the great land owners. The farmer has scarcely any free time, and even during the little free time that he has he must be continually studying how he can improve his business. There is nothing else in his life but his business, and that is also one of the reasons why he is so hard for us to gain. But this holds true only for the older generation; the younger generation is conscious of other things. They feel a strong impulse towards enjoyments and pleasures, towards joy, and also towards a higher culture, and because they cannot satisfy these impulses in the country they stream into the cities and populate the level plains. When once the farmer sees, however, that he can remain in agriculture without being compelled to renounce leisure and culture he will no longer flee from agriculture, but will simply move from the little industry to the great and therewith the last fortress of private property will disappear. But the victorious proletariat will not consider a violent hastening of this development, and this for the very good reason that it does not feel itself called upon to get its head cracked without any necessity. And this has been the result of every attempt to force the farmers to a new stage of production. However high may be my estimate of the belligerency and fearlessness of the proletariat, its struggle is not directed against the little people that are themselves exploited, but against the great exploiters. Along with agriculture the small industry in business comes into consideration. This also need not completely disappear at once. To be sure the new regime, as we have already seen, would, whenever poorly organized industry came in competition with the more perfect, strive to concentrate production in the well directed great industries. This could be easily attained, however, without the application of force by the simple raising of wages. But there will always be branches of industry in which the machine cannot compete successfully with hand labor, or, cannot accomplish what the latter can accomplish. It is highly significant that an investigation of the factory statistics of the German empire did not yield a single form of production in which the small industry still exclusively rules, with one insignificant exception (four plants each with one laborer). A few figures that, so far as I know, have never yet been published are here given. In the following branches of industry the small business rules almost exclusively, more than 97 per cent of all industries, while the great business with more than fifty laborers does not exist at all: Number of Factories with: 1 to 5 workers 6 to 50 workers Number of motors Makers of whetstones 77 2 52 Makers of violins 1,037 24 5 Preparation of anatomical material 126 3 Scavengers 971 2 11 Spinners (materials not given) 275 3 2 Weavers (materials not given) 608 6 5 Rubber toys 4 Barbers, hairdressers, wigmakers 60,035 470 6 Cleaners of clothes and bootblacks 744 4 7 Chimneysweeps 3,860 26 Sculptors and painters 5,630 84 2 If we exclude painters, barbers, chimneysweeps, violin makers and, according to my opinion, also scavengers and bootblacks, this reduces the held of existing small businesses, in industries which are outside the field of competition of great industries, to practically nil. Nevertheless it may be granted that the small industry will have a definite position in the future in many branches of industry that produce directly for human consumption, for the machines manufacture essentially only products in bulk, while many purchasers desire that their personal taste shall be considered. It is easily possible that even under a proletarian regime the number of small businesses may increase as the well being of the masses increases. The demand for products of hand labor as a result of this may become active. Artistic hand work may accordingly receive a new impulse. However, we need not expect the realization of the picture of the future that William Morris has painted for us in his beautiful Utopia, in which the machine plays no role whatever. The machine will remain the ruler of the productive process. It will never give up this position again to hand labor. This, however, does not exclude the possibility that hand work in many artistic branches will again flourish and that it will even conquer many new fields. Meanwhile it to-day too often maintains its existence only as the product of extreme misery. As a house industry hand work in a socialist society call only exist as an expensive luxury which may in a universal well being find an extensive distribution. The foundation of the productive process will still remain the machine-driven great industry. The problematical small industries will at the most be maintained as islands in the ocean of the great social businesses. These little industries, again, can take on the most various forms in regard to the ownership of their means of production and the disposal of their products. They may be dependent upon a great national or municipal industry, from which they receive their raw material and tools and to which they dispose of their products. They can produce for private customers, or for the open market, etc. as to-day, so then, a laborer can occupy himself in the most diverse occupations one after another. A seamstress, for example, can occupy herself for a time in a national factory and at another time make dresses for private customers at home, then again can sew for another customer in her own house, and finally she may, with a few comrades, unite in a co-operative for the manufacture of clothing for sale. In this, as in every other relation, the greatest diversity and possibility of change will rule. Nothing is more false than to represent the socialist society as a simple, rigid mechanism whose wheels when once set in motion run on continuously in the same manner. The most manifold forms of property in the means of production – national, municipal, cooperatives of consumption and production, and private can exist beside each other in a socialist society – the most diverse forms of industrial organization, bureaucratic, trades union, cooperative and individual; the most diverse forms of remuneration of labor, fixed wages, time wages, piece wages, participation in the economics in raw material, machinery, etc., participation in the results of intensive labor the most diverse forms of circulation of products, like contract by purchase from the warehouses of the State, from municipalities, from co-operatives of production, from producers themselves, etc., etc. The same manifold character of economic mechanism that exists to-day is possible in a socialistic society. Only the hunting and the hunted, the struggling and resisting, the annihilated and being annihilated of the present competitive struggle are excluded and therewith the contrast between exploiter and exploited. Top of the page Intellectual Production So much for the most important economic problems that arise from the political victory of the proletariat and the means to their solution. It would be very alluring in this connection to follow these conditions further, to investigate the problems which housing and international commerce, the relations of city and country, etc., carry with them, all of which will be deeply touched by the domination of the proletariat and cannot continue in their present manner. But I must turn from the discussion of these themes at this point because I have said elsewhere the most essential things that I have to say upon them (the position of a socialist community in relation to colonies and world’s commerce I have discussed in my preface to Atlanticus, A View of the Future State, p.XIX, and The Future of the Individual Home, in my Agrarfrage, p.447, etc.). I wish to discuss only one point in this connection about which much indefiniteness exists – the future of intellectual production. We have here hitherto only investigated the problem of material production which is most fundamental. But upon this basis there arises a production of artistic works, scientific investigation and literary activities of various forms. The continuation of this production is no less necessary for modern civilization than the undisturbed continuance of the production of bread and meat, coal and iron. A proletarian revolution, however renders its continuance in the former manner impossible. What has it to substitute therefor? That no reasonable man to-day fears that the victorious proletariat will cause a return to the old condition of barbarism or that it will fling art and science and superfluous rubbish into the lumber room but that on the contrary it is just among those broad popular sections of the proletariat that the most interest and the highest regard for art and science is to be found, I have already shown in my essay concerning Reform and Revolution. But my whole inquiry is not so much in the nature of an investigation into what the victorious proletariat might do as to what by virtue of the power of logic and facts it can and must do. There will be no lack of the necessary material objects for art and science. We have already seen that it is one of the strong points of the proletarian regime that through the abolition of private property in the means of production the possibility will be created of wiping out in the quickest possible manner the ruins of the outgrown means and methods of production which to-day prevent the unfolding of the modern productive powers and which beneath the present dominion of private property can only be slowly and incompletely swept out of the road by competition. The wealth of society must thereby at once attain a level far above that inherited from capitalist society. But material objects alone are not sufficient to secure this elevation. Wealth alone does not give rise to a great ideal life. The question is whether the conditions of production of material goods in socialist society are consistent with the necessary conditions of a highly developed intellectual production. This is strongly denied by our opponents. Let us nest examine some forms of existing intellectual production. It takes on three forms: production through organs of society for direct satisfaction of social needs; then, the production of goods in individual industries, and finally the production of goods under capitalist industry. To the first form of intellectual production belongs the whole system of education from kindergartens to universities. If we disregard the insignificant private schools, this is to-day almost wholly in the hands of society and is conducted by the State not for the purpose of making profits or on account of gain. This holds above all of the modern national and municipal schools, but also of those which are mainly ruins descended from the Middle Ages, but which still exist under clerical organization and community support, and which are especially prominent in the land of Anglo-Saxon culture. This social educational system is of the highest significance for the intellectual life, especially for the scientific, and this is not simply through its influence upon the growing youth. It controls ever more and more scientific investigation in that its teachers, especially in the high schools, have more and more a monopoly of scientific apparatus without which scientific investigation is to-day almost impossible. This is especially true in the field of the natural sciences whose technique has become so highly developed that, aside from a few million sires, the State alone is able to supply the means demanded for the establishment and maintenance of the necessary scientific apparatus. But in many branches of social science, ethnology and archaeology and others, the scientific apparatus of investigation has become ever more comprehensive and expensive. Because of this, science becomes ever more and more an unremunerative occupation, by which a man cannot live and to which only those people can devote themselves who are paid by the State unless they have been very fortunate in the choice of their parents – or of their wives. Attainment of the necessary preliminary knowledge for productive scientific activity demands again a great and ever increasing amount of money. So it is that science is more and more monopolized by the governmental powers and the possessing classes. At the very least a proletarian regime can abolish the conditions which hamper scientific activity at present. It must formulate its educational system, as was previously pointed out, so that each genius will have within his reach all the knowledge that the social educational system has at its disposal. It will increase enormously the demand for educated people, and therewith also for the power of scientific investigation. Finally it will operate through the abolition of class antagonisms to make the investigators in the sphere of social science, where employed by the State, internally and externally free. So long as there are class antagonisms there will be very different standpoints from which society will be observed. There is no greater hypocrisy or self-deception than the talk about an existing science which is above class antagonisms. Science exists only in the heads of investigators and these are the products of society and cannot get out of it or reach above it. Even in a socialist society science will be dependent upon social conditions but these will then at least be uniform and not antagonistic. Even worse than the internal dependence upon social conditions, from which no investigator can free himself, is the external dependence of many of those from governmental or other dominating institutions, for example, clerical. These compel the intellectual workers to direct their views according to those of the governing classes and will not permit them to investigate freely and independently, and it compels them to seek in a scientific manner for arguments that will justify the existing order and repel the aspiring classes. So the class dominion operates directly to demoralize science. The intellectual workers will have every reason to breathe freer when the proletarian regime sweeps away the direct and indirect dominion of the class of capitalists and land owners. The intellectual life so far as it is connected with education has nothing to fear and everything to hope from the victory of the proletariat. How is it, then, with the production of intellectual commodities? In this connection we will first study individual production. Here painting and sculpture come most prominently into consideration, together with a portion of literary writing. A proletarian regime will no more make this form of commodity production impossible, than it will abolish the little private industry in material production. Just as little as the needle and thimble, will brush and palette, or ink and pen belong to those means of production which must under all conditions be socialized. But one thing is well possible and that is that with the cessation of capitalist exploitation the number of purchasers that heretofore constituted the market for the commodities produced by the little artistic industry will be reduced. This will certainly not be without influence on the articles of artistic production. It will not abolish such production but only alter its character. The easel painting and statuettes which can most easily change their places and possessors, that can be placed wherever we wish, are the special form of commodity production in art. They include those forms of artistic work that can easiest take the form of commodities, which, like jewelry, can be accumulated and stored either for the purpose of re-selling at a profit or to hoard as treasures. It is possible that their production for the purposes of sale will find many obstacles in a socialist society. But in place of these, other forms of artistic production will appear. A proletarian regime will immensely increase the number of public buildings. It will endeavor to make attractive every place occupied by the people, whether for labor, for consultation, or for pleasure. Instead of accumulating statuettes and pictures that will be thrown into a great impersonal market from whence they finally and a place utterly unknown to the artist and are used for wholly unthought of purposes, the artist will work together with the architect as was the case in the Golden Age of art in Athens under Pericles and in the Italian Renaissance. One art will support and raise the other and artistic labor will have a definite social aim so that its products, its surroundings and its public will not be dependent on chance. On the other side the necessity to produce artistic works for sale as commodities will cease. Above all there will no longer be need to offer individual labor for profit or as wage labor, or for the production of commodities. I have already pointed out that a proletarian regime would endeavor, as is perfectly evident from the standpoint of the wage-worker, to shorten the labor time and raise the wages. I have also shown to how high a degree this can be done, particularly in the line of highly developed capitalist production, simply through the concentration of industry in the most perfect centers of production and through the most perfect utilization of these most perfect industries. It is by no means fantastic to conclude that a doubling of the wages and a reduction of labor time to half of the present one is possible at once, and technical science is already sufficiently advanced to expect rapid progress in this field. The further one goes in this direction the more the possibility increases for those who are engaged in material production to give themselves up also to intellectual activity and especially to those forms that bring no material gain, but rather find their reward in themselves and which are the highest forms of intellectual activity. The greater increased leisure may in part, indeed in overwhelming part, lead to pure intellectual enjoyment. With the talented the creative genius will be free and the union of material with artistic literary and scientific production will be made possible. This union, however, will not be simply possible. It will be an economic necessity. We have seen that a proletarian regime must aim to make culture a universal good. If we should seek to extend culture in the present sense of the word it would end in making the growing generation useless for material production and hence would undermine the foundations of society. To-day the social division of labor is developed in such a manner that material and intellectual labor are well-nigh mutually exclusive. Material production exists under such conditions that only the few who have been favored by nature or by special conditions are able to engage in the higher intellectual labor. On the other side intellectual labor as it is carried on to-day makes those who follow it incapable of and disinclined toward physical labor. To give culture to all mankind under such conditions would simply make all material production impossible because then no one would be found who could or would carry it on. If we are to make the higher intellectual culture a common good without endangering the existence of society, then not simply pedagogical but economic necessity demands that this be done in such a manner that the growing generation will be made familiar in schools not simply with intellectual but also with physical labor and the habit of uniting intellectual and material production will be firmly rooted. The proletarian regime must proceed from two directions to secure the union of material and intellectual production and to free the latter in the mass of the population from its present material fetters. On the one side this must be done through the continuous shortening of the labor time of the so-called hand laborers. This will come as a result of the increasing productivity of labor whereby more time will be continuously granted for intellectual labor to those engaged in material production. On the other side this will be accomplished by an increase of the physical labor of the cultured, an unavoidable result of the continual increase in numbers of the latter. It is, however, plain that with this union, physical labor for gain and for the necessary labor in the interest of society, and intellectual labor for the free exercise of personality would be freed from every social compulsion. For intellectual labor is much more incompatible with such compulsion than physical. This liberation of intellectual labor by the proletariat is not the pious wish of the Utopian but the economically necessary consequence of its victory. Finally we must observe the third form of intellectual production – that which is capitalistically exploited. Since the first of these three forms of intellectual production includes mainly science and the second the fine arts, so what we have to say now applies to the utilization of all spheres of intellectual activity, but particularly, however, to the heroes of the pen and the stage, to whom now stand opposed as capitalist directors of industry, the publishers, periodical owners and theater directors. Capitalist exploitation in such a form is impossible of continuance under a proletarian regime. It rests, however, upon the fact that to get even a questionable intellectual production to the public requires an expensive technical apparatus and extensive co-operative powers. The individual cannot here act for himself. Does that, however, not mean that here again the alternative to capitalist industry is national industry? If this is so, must not the centering of so great and important a part of the intellectual life in the State threaten in the highest degree that intellectual life with uniformity and stagnation? It is true that the governmental power will cease to be a class organ, but will it not still be the organ of a majority? Can the intellectual life be made dependent upon the decisions of the majority? Would not every new truth, every new conception and discovery be comprehended and thought out by the insignificant minority? Does not this new order threaten to bring at once the best and keenest of the intellectual thinkers in the various spheres into continuous conflict with the proletarian regime? And even if this creates increased freedom for the artistic and scientific development would not this be more than offset by the fetters that it will lay upon the intellectual activity when this can only be pursued by social means? Here is certainly an important but not an insoluble problem. We must first notice that as for all production so also for the social necessities of intellectual production the State will from the beginning not be the only leading and means granting organ which will come into consideration, but there will also be municipalities. Through these alone all uniformity and every domination of the intellectual life by central power is excluded. As another substitute for the capitalist industry in individual production, still other organizations must be considered; those of free unions which will serve art and science and the public life and advance production in these spheres in the most diverse ways, or undertake them directly as even today we have countless unions which bring out plays, publish newspapers, purchase artistic works, publish writings, fit out scientific expeditions, etc. The shorter the hours of labor in material production and the higher the wages the more will these free unions be favored. They must increase in numbers, in enthusiasm and in the intelligence of their members as well as in the resources which the intellectuals can contribute to support the common cause. I expect that these free unions will play an even more important role in the intellectual life. It is their destiny to enter into the place now occupied by capital and individual production and to organize and to lead the social nature. Here also the proletarian regime leads not to greater bondage but to greater freedom. Freedom of education and of scientific investigation from the fetters of capitalist dominion; freedom of the individual from the oppression of exclusive, exhaustive physical labor; displacement of the capitalist industry in the intellectual production of society by the free unions, – along this road proceeds the tendency of the proletarian regime in the sphere of intellectual production. We see that the problems in the field of production are of a contradictory nature. The capitalist system of production has created the task of formulating the social process of production in a simple and systematic manner. This task consists in placing the individual in a fixed order to whose rules he must conform. On the other side this same manner of production has more than ever brought the individual to a self-consciousness, placed him on his own feet and freed him from society. More than ever mankind demands to-day the possibility of developing a personality and its relation to other men in order to determine in the freest manner the more sensitive and individual of these relations, especially the marriage relation, but also their relation as artists and thinkers to the external world. Regulation of social chaos and liberation of the individual – these are the two historical tasks that capitalism has placed before society. They appear to be contradictory, but they are simultaneously soluble because each of them belongs to a different sphere of social life. Undoubtedly whoever should seek to rule both spheres in the same manner would find himself involved in insoluble contradictions. It is on this point that anarchism is wrecked. Anarchism arises out of the reaction of the little bourgeois against the repressive and oppressive capitalism. The little handworker who was accustomed to direct his labor according to his own pleasure rebels against the discipline and the monotony of the factory. His ideal remains the free labor of the individual and when this is no longer possible he seeks to replace it by common working together in free unions wholly independent of each other. The “new middle class,” the intellectuals, is, as we have already seen many times, in its social position only a refined and more sensitive expression of the earlier little bourgeois. Its manner of working develops in them the same need for free labor, the same repugnance to discipline and uniformity. So it is that their social ideal becomes the same as that of the small bourgeois, that is the anarchist. But that which is a progressive ideal in their sphere of production shows itself to be reactionary in the field of material production where it corresponds to the conditions of production of the now extinct hand work. In the present stage of production there are only two possible forms of material production so far as production in quantities is concerned, aside from a few remnants which are mainly curiosities: on the one side communistic with social property in the means of production, and the systematic direction of production from a central point, or the capitalistic. The anarchistic system of production can, under the best conditions, be only a transitory episode. Material production through free unions without central production leads to chaos unless the commodities produced exchange on the basis of the law of value determined by free competition. We have seen above what the consequence is for individual industry under free competition. It determines the correct proportionality of individual means of production to one another and prevents any one from swamping society with buttons or leaving it without bread. Production of commodities under the present conditions of social production must continuously take on some form of capitalist production, as countless productive co-operatives have shown. To strive for an anarchist ideal in material production is at best a Sisyphus task. It is wholly different with intellectual production. This is built upon material production, on the surplus of products and labor powers which proceed from material production. It is possible only when material life is secured. If the latter falls into confusion then our whole existence is threatened. Consequently it is absolutely unimportant for society in what relations the existing surplus of products and labor powers are applied to the individual fields of free intellectual creation. The exception to this is the educational system which has its special laws, and has not yet been turned over to free competition in any society, but has been socially regulated. Society would fall into bad condition if all the world should set to work at the manufacture of one kind of commodities such, for example, as buttons, and thereby direct too much labor power to this, so that not enough was left for the production of others, such for example, as bread. On the other hand the relation between lyric poems and tragedies, works on Assyriology and botany which are to be produced is no essential one; it has neither maximum nor minimum point. If to-day there should be twice as many dramas as yesterday, and at the same time one-half as many lyrics, or if to-day twenty works on Assryiology should appear and only ten Botanical, while yesterday the relations were reversed, still the existence of society would not be touched in the slightest thereby. These facts and their economic expression in that the law of value, in spite of all psychological theories of value, only holds good for material production and not for intellectual. In this held a central direction of production is not only unnecessary, but absolutely foolish. Here free production can rule without the necessity of production of commodities of value or of capitalist production. Communism in material production, anarchism in the intellectual. This is the type of the socialist productive system which will arise from the dominion of the proletariat or, in other words, out of the social revolution by the logic of economic facts whatever may be the wishes, ideas and theories of the proletariat. Top of the page The Preliminary Psychical Conditions to the Dominion of the Proletariat It will have occurred to very many readers that in this investigation I have spoken only of economic conditions. I have not investigated what are to be the ethical foundations of the new society, whether they shall rest upon Kantian or Spencerian, upon the categorical imperative, or whether the greatest good to the greatest number shall be the principal motive. I have not investigated which of the above theories shall constitute the juridical foundation, whether the right to the complete product of labor, or the right to existence, or some other one of the fundamental economic rights which the judicial socialists have discovered. No doubt laws and ethics will play a part in the social revolution, but the determining factor will always be the demands of economics. But beside law and ethics psychology also comes into consideration. Will not problems arise therefrom for the proletarian regime and those of great significance? Does not the socialist society presuppose extraordinary people, actual angels in unselfishness, joy in labor, and intelligence? Will not the social revolution with the present race full of egoism and brutality be the signal for a raging battle for spoils or lead to a universal idleness? All transformations of economic foundations amount to nothing so long as mankind is not ennobled. The treatment and the text are not new. They were sung a hundred years ago as the song arose of the crushed oppressed classes. The gentle landlords of the Holy Alliance would gladly have given their beloved children all possible freedom, but these children must first attain the necessary ripeness. I do not intend to deny that every system of production demands certain definite technical and also psychological preliminary conditions in order to enable it to be realized. What shall be the necessary forms of these psychological conditions of a given manner of production depends upon the character of the economic tasks which it sets forth. No one will claim that in my investigation I have presupposed mankind of an angelic character. The problem that we have to solve presupposes intelligence, discipline and talent for organization. These are the psychological foundations of a socialist society. Those are just the ones that the capitalist society has created. It is the historical task of capital to discipline and organize the laborers, and to widen their intellectual horizon beyond the boundaries of the workshop and the church door. For socialism to rise on the basis of hand work or agricultural industry is impossible, not simply on economic grounds because of the low productivity of industry, but also for psychological reasons. I have already shown how small bourgeois psychology inclines towards anarchy and opposes the discipline of the social industry. It is one of the greatest difficulties that capital meets in the beginnings of capitalist production, in that it must take its first laborers directly from hand work or from agriculture. It had to fight with this in the eighteenth century in England and to-day in the Southern States of America which renders very difficult the rapid advance of the great industry not-withstanding the nearness to raw materials greatly favors such industry. Not discipline alone but also the talent for organization is difficult of development in little bourgeois and agricultural positions. There are no great bodies of men to be united in systematic co-operation. On this economic stage it is only the soldiers who offer the opportunity to organize in great bodies. The great generals are also great organizers. Capitalist production transplants the task of organization of great masses of the community to industry. The capitalists constitute naturally the head people, the field generals of those who are under them and become prominent factors in organization. Correspondingly the organizing talent in its appointees is very highly valued and rewarded by capital. Under these conditions the organizing talent grows rapidly. It can be applied equally well to the uses of a proletarian regime that will also need numerous directors of factories and organizers of trusts. Capital also demands intelligent labor power, so we see that the competitive struggle above all enforces the betterment of the industrial school. On the other side the development of industry and the existence of newspapers contributes to extend the intellectual horizon of the laborer. But not alone the pressure of capital in the exploitation of great bodies of labor, but the struggle of the proletarian against this exploitation develops the psychological conditions for socialist production; it develops discipline in every way, as we have already seen, of a wholly different character and from that given capital, and this struggle develops also a talent for organization, for it is only through the unanimous co-operation of the great body of mankind that the proletariat can assert itself against capital and the capitalist state. Organization is the most important weapon of the proletariat and nearly all its great leaders are also great organizers. To the money of capital, and the weapon of the military States, the proletariat has nothing to oppose save its economic indispensability and its organization. That its intelligence grows with these and through these needs no proof. The social revolution requires high intelligence, strict discipline and complete organization of this great mass and these must exist simultaneously with and be indispensable to economic life if it is to attain strength to overcome so extremely powerful an opponent. We may expect that it will only succeed when these peculiarities are developed in the highest degree and also that the victory of the proletariat and therewith the social revolution will not come before not only the economic but also the psychological conditions to a socialist society are present in a high degree. This does not mean that mankind should be angels nor that we shall need to wait so very long for its psychological ripeness. While the modern proletariat has need of no great change in order to make it ripe for socialist society, nevertheless we may expect that this society will greatly alter the character of mankind. That which is demanded as a preliminary condition to a socialist society, and which the capitalist society makes impossible, and which would be therefore the most impossible preliminary condition, that is, the creation of a higher type of mankind than the modern one, that will be the natural result of socialism. It will bring security, rest and leisure to mankind; it will raise their minds above the commonplace because they will no longer need to continuously think of where the bread for the morrow is to come from. It will make personalities independent of other personalities, so that the feeling of slavery as well as of human adoration will disappear. It will at the same time create a balance between country and city, make the treasures of the cultured rich attainable to all mankind and give back to them the nature which arises from the strength and joy of living. Simultaneously with the abolition of the physiological roots of pessimism it will do away with the social ones also, together with the misery and degradation of the one who makes a virtue of necessity, and the satiety of the other who in idle luxury has drained the cup of enjoyment even to the dregs. Socialism will abolish poverty and satiety and unnaturalness, make mankind joyful, appreciative of beauty, capable of happiness, and thereby it will bring freedom in scientific and artistic creation for all. May we not expect that under such conditions a new type of mankind will arise which will be far superior to the highest type which culture has hitherto created? An over-man (Uebermensch), if you will, not as an exception but as a rule, an over-man compared with his predecessors, but not as opposed to his comrades, a noble man who seeks his satisfaction not by being great among crippled dwarfs, but great among the great, happy among the happy – who does not draw his feeling of strength from the fact that he raises himself upon the bodies of the down-trodden, but because a union with his fellow-workers gives him courage to dare the attainment of the highest tasks. So we may expect that a realm of strength and of beauty will arise that will be worthy the ideal of our best and noblest thinkers. Top of the page Last updated on 28.1.2004
// Code generated by go-swagger; DO NOT EDIT. package descriptors // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( "fmt" "io" "github.com/go-openapi/runtime" strfmt "github.com/go-openapi/strfmt" "github.com/aeckard87/WornOut/models" ) // GetDescriptorsByDetailReader is a Reader for the GetDescriptorsByDetail structure. type GetDescriptorsByDetailReader struct { formats strfmt.Registry } // ReadResponse reads a server response into the received o. func (o *GetDescriptorsByDetailReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { case 200: result := NewGetDescriptorsByDetailOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil case 404: result := NewGetDescriptorsByDetailNotFound() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return nil, result default: return nil, runtime.NewAPIError("unknown error", response, response.Code()) } } // NewGetDescriptorsByDetailOK creates a GetDescriptorsByDetailOK with default headers values func NewGetDescriptorsByDetailOK() *GetDescriptorsByDetailOK { return &GetDescriptorsByDetailOK{} } /*GetDescriptorsByDetailOK handles this case with default header values. Status Ok */ type GetDescriptorsByDetailOK struct { Payload *models.Description } func (o *GetDescriptorsByDetailOK) Error() string { return fmt.Sprintf("[GET /details/{id}/descriptors][%d] getDescriptorsByDetailOK %+v", 200, o.Payload) } func (o *GetDescriptorsByDetailOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { o.Payload = new(models.Description) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { return err } return nil } // NewGetDescriptorsByDetailNotFound creates a GetDescriptorsByDetailNotFound with default headers values func NewGetDescriptorsByDetailNotFound() *GetDescriptorsByDetailNotFound { return &GetDescriptorsByDetailNotFound{} } /*GetDescriptorsByDetailNotFound handles this case with default header values. No items found */ type GetDescriptorsByDetailNotFound struct { } func (o *GetDescriptorsByDetailNotFound) Error() string { return fmt.Sprintf("[GET /details/{id}/descriptors][%d] getDescriptorsByDetailNotFound ", 404) } func (o *GetDescriptorsByDetailNotFound) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { return nil }
/** Minimizes a cost function. * <p>The initial simplex is built from two vertices that are * considered to represent two opposite vertices of a box parallel * to the canonical axes of the space. The simplex is the subset of * vertices encountered while going from vertexA to vertexB * travelling along the box edges only. This can be seen as a scaled * regular simplex using the projected separation between the given * points as the scaling factor along each coordinate axis.</p> * <p>The optimization is performed in multi-start mode.</p> * @param f cost function * @param maxEvaluations maximal number of function calls for each * start (note that the number will be checked <em>after</em> * complete simplices have been evaluated, this means that in some * cases this number will be exceeded by a few units, depending on * the dimension of the problem) * @param checker object to use to check for convergence * @param vertexA first vertex * @param vertexB last vertex * @param starts number of starts to perform (including the * first one), multi-start is disabled if value is less than or * equal to 1 * @param seed seed for the random vector generator (32 bits * integers array). If null, the current time will be used for the * generator initialization. * @return the point/cost pairs giving the minimal cost * @exception CostException if the cost function throws one during * the search * @exception NoConvergenceException if none of the starts did * converge (it is not thrown if at least one start did converge) */ public PointCostPair minimizes(CostFunction f, int maxEvaluations, ConvergenceChecker checker, double[] vertexA, double[] vertexB, int starts, int[] seed) throws CostException, NoConvergenceException { buildSimplex(vertexA, vertexB); double[] mean = new double[vertexA.length]; double[] standardDeviation = new double[vertexA.length]; for (int i = 0; i < vertexA.length; ++i) { mean[i] = 0.5 * (vertexA[i] + vertexB[i]); standardDeviation[i] = 0.5 * Math.abs(vertexA[i] - vertexB[i]); } RandomVectorGenerator rvg = new UncorrelatedRandomVectorGenerator(mean, standardDeviation, new UniformRandomGenerator(seed)); setMultiStart(starts, rvg); return minimizes(f, maxEvaluations, checker); }
Today, Ubisoft Entertainment has released a range of stunning photos of a hand-crafted Sony PS4 and DUAL SHOCK 4 controllers, to mark the upcoming launch of Far Cry Primal on February 23rd, 2016. Working free-hand, accredited master carver and stone mason, Nick Roberson, was commissioned to intricately replicate these items from stone, to showcase the Stone Age era that the franchise moves to in the latest installment. In addition to the PS4 console and controllers, Nick Roberson was also commissioned to craft an entire gaming den from hand-carved finest Fletcher Band sandstone. The den was also kitted out with a stone television, pizza box, beer can and an impressive carved coffee table. The result is a stunning image of a prehistoric take on the contemporary items of today, as regularly seen in modern gaming pads across the country.
def loadNumericalFeatures(featuresFile, delimiter=","): try: if not os.path.exists(featuresFile): prettyPrint("Unable to find the features file \"%s\"" % featuresFile, "warning") return [] content = open(featuresFile).read() if content.lower().find("[") != -1 and content.lower().find("]") != -1: features = eval(content) else: features = [float(f) for f in content.replace(' ','').split(delimiter)] except Exception as e: prettyPrintError(e) return [] return features
Sequence-independent DNA binding activity of DnaA protein, the initiator of chromosomal DNA replication in Escherichia coli. The DnaA protein specifically binds to the origin of chromosomal DNA replication and initiates DNA synthesis. In addition to this sequence-specific DNA binding, DnaA protein binds to DNA in a sequence-independent manner. We here compared the two DNA binding activities. Binding of ATP and ADP to DnaA inhibited the sequence-independent DNA binding, but not sequence-specific binding. Sequence-independent DNA binding, but not sequence-specific binding, required incubation at high temperatures. Mutations in the C-terminal domain affected the sequence-independent DNA binding activity less drastically than they did the sequence-specific binding. On the other hand, the mutant DnaA433, which has mutations in a membrane-binding domain (K327 to I344) was inert for sequence-independent binding, but could bind specifically to DNA. These results suggest that the two DNA binding activities involve different domains and perform different functions from each other in Escherichia coli cells.
package base import ( "encoding/json" "github.com/couchbase/gocb" "gopkg.in/couchbase/gocbcore.v7" ) // BinaryDocument is type alias that allows SGTranscoder to differentiate between documents that are // intended to be written as binary docs, versus json documents that are being sent as raw bytes // Some additional context here: https://play.golang.org/p/p4fkKiZD59 type BinaryDocument []byte type SGTranscoder struct { } // The default transcoding.go code in gocb makes assumptions about the document // type (binary, json) based on the incoming value (e.g. []byte as binary, interface{} as json). // Sync Gateway needs the ability to write json as raw bytes, so defines separate transcoders for storing // json and binary documents. // Encode applies the default Couchbase transcoding behaviour to encode a Go type. // Figures out how to convert the given struct into bytes and then sets the json flag. func (t SGTranscoder) Encode(value interface{}) ([]byte, uint32, error) { var bytes []byte var err error flags := gocbcore.EncodeCommonFlags(gocbcore.JsonType, gocbcore.NoCompression) switch typedValue := value.(type) { case BinaryDocument: flags = gocbcore.EncodeCommonFlags(gocbcore.BinaryType, gocbcore.NoCompression) bytes = []byte(typedValue) case []byte: bytes = value.([]byte) case *[]byte: bytes = *value.(*[]byte) case string: bytes = []byte(value.(string)) case *string: bytes = []byte(*value.(*string)) case *interface{}: // calls back into this return t.Encode(*value.(*interface{})) default: bytes, err = json.Marshal(value) if err != nil { return nil, 0, err } } // No compression supported currently return bytes, flags, nil } // Decode applies the default Couchbase transcoding behaviour to decode into a Go type. func (t SGTranscoder) Decode(bytes []byte, flags uint32, out interface{}) error { switch typedOut := out.(type) { case *[]byte: *typedOut = bytes return nil default: defaultTranscoder := gocb.DefaultTranscoder{} return defaultTranscoder.Decode(bytes, flags, out) } }
<filename>src/devices/tools/banjo/src/backends/rust.rs // Copyright 2019 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use { crate::ast::{self, BanjoAst, Constant}, crate::backends::util::to_c_name, crate::backends::Backend, failure::{bail, format_err, Error}, std::collections::HashSet, std::io, }; type DeclIter<'a> = std::slice::Iter<'a, ast::Decl>; pub struct RustBackend<'a, W: io::Write> { w: &'a mut W, } impl<'a, W: io::Write> RustBackend<'a, W> { pub fn new(w: &'a mut W) -> Self { RustBackend { w } } } fn can_derive_partialeq( ast: &ast::BanjoAst, ty: &ast::Ty, parents: &mut HashSet<ast::Ident>, ) -> bool { match ty { ast::Ty::Bool | ast::Ty::Int8 | ast::Ty::Int16 | ast::Ty::Int32 | ast::Ty::Int64 | ast::Ty::UInt8 | ast::Ty::UInt16 | ast::Ty::UInt32 | ast::Ty::UInt64 | ast::Ty::Float32 | ast::Ty::Float64 | ast::Ty::USize | ast::Ty::Protocol | ast::Ty::Voidptr | ast::Ty::Enum { .. } => { return true; } ast::Ty::Vector { ref ty, size: _, nullable: _ } => can_derive_partialeq(ast, ty, parents), ast::Ty::Str { size: _, .. } => { return true; } ast::Ty::Union { .. } => { return false; } ast::Ty::Struct => { unreachable!(); } ast::Ty::Array { ty, size } => match resolve_constant_uint(ast, size) { Ok(size) if size <= 32 => can_derive_partialeq(ast, ty, parents), _ => false, }, ast::Ty::Identifier { id: type_id, .. } => { if type_id.is_base_type() { return true; } match ast .id_to_decl(type_id) .expect(&format!("can't find declaration for ident {:?}", type_id)) { ast::Decl::Struct { fields, .. } => { for field in fields { if let ast::Ty::Identifier { id, .. } = &field.ty { // Circular reference. Skip the check on this field to prevent stack // overflow. It's still possible to derive PartialEq as long as other // fields do not prevent the derive. if id == type_id || parents.contains(id) { continue; } } parents.insert(type_id.clone()); if !can_derive_partialeq(ast, &field.ty, parents) { return false; } parents.remove(type_id); } true } // enum.rs template always derive PartialEq ast::Decl::Enum { .. } => true, ast::Decl::Constant { ty, .. } => can_derive_partialeq(ast, ty, parents), ast::Decl::Alias(_, id) => { let alias_ty = ast.id_to_type(&id); can_derive_partialeq(ast, &alias_ty, parents) } // Union is never PartialEq. ast::Decl::Union { .. } => false, // Resource is not generated right now. Just return `false` for now to be // conservative, but consider revisiting this case when they are generated. ast::Decl::Resource { .. } => false, // Protocol will never be generated. ast::Decl::Protocol { .. } => true, } } ast::Ty::Handle { .. } => true, } } // This is not the same as partialeq because we derive opaque Debugs for unions fn can_derive_debug(ast: &ast::BanjoAst, ty: &ast::Ty, parents: &mut HashSet<ast::Ident>) -> bool { match ty { ast::Ty::Bool | ast::Ty::Int8 | ast::Ty::Int16 | ast::Ty::Int32 | ast::Ty::Int64 | ast::Ty::UInt8 | ast::Ty::UInt16 | ast::Ty::UInt32 | ast::Ty::UInt64 | ast::Ty::Float32 | ast::Ty::Float64 | ast::Ty::USize | ast::Ty::Protocol | ast::Ty::Voidptr | ast::Ty::Enum { .. } => { return true; } ast::Ty::Vector { ref ty, size: _, nullable: _ } => can_derive_debug(ast, ty, parents), ast::Ty::Str { size: _, .. } => { return true; } ast::Ty::Union { .. } => { return false; /* technically yes, but done in a custom derive */ } ast::Ty::Struct => { unreachable!(); } ast::Ty::Array { ty, size } => match resolve_constant_uint(ast, size) { Ok(size) if size <= 32 => can_derive_debug(ast, ty, parents), _ => false, }, ast::Ty::Identifier { id: type_id, .. } => { if type_id.is_base_type() { return true; } match ast .id_to_decl(type_id) .expect(&format!("can't find declaration for ident {:?}", type_id)) { ast::Decl::Struct { fields, .. } => { for field in fields { if let ast::Ty::Identifier { id, .. } = &field.ty { // Circular reference. Skip the check on this field to prevent stack // overflow. It's still possible to derive Debug as long as other // fields do not prevent the derive. if id == type_id || parents.contains(id) { continue; } } parents.insert(type_id.clone()); if !can_derive_debug(ast, &field.ty, parents) { return false; } parents.remove(type_id); } true } // union.rs template manually implements Debug. // enum.rs template always derive Debug ast::Decl::Union { .. } | ast::Decl::Enum { .. } => true, ast::Decl::Constant { ty, .. } => can_derive_debug(ast, ty, parents), ast::Decl::Alias(_, id) => { let alias_type = ast.id_to_type(&id); can_derive_debug(ast, &alias_type, parents) } // Resource is not generated right now. Just return `false` for now to be // conservative, but consider revisiting this case when they are generated. ast::Decl::Resource { .. } => false, // Protocol will never be generated. ast::Decl::Protocol { .. } => true, } } ast::Ty::Handle { .. } => true, } } fn resolve_constant_uint(ast: &ast::BanjoAst, constant: &ast::Constant) -> Result<u64, Error> { match constant.0.parse::<u64>() { Ok(uint) => Ok(uint), Err(_) => match ast.id_to_decl(&ast::Ident::new_raw(&constant.0)) { Ok(ast::Decl::Constant { value, .. }) => resolve_constant_uint(ast, &value), _ => bail!("Cannot resolve name {:?} to a uint", constant.0), }, } } fn to_rust_type(ast: &ast::BanjoAst, ty: &ast::Ty) -> Result<String, Error> { match ty { ast::Ty::Bool => Ok(String::from("bool")), ast::Ty::Int8 => Ok(String::from("i8")), ast::Ty::Int16 => Ok(String::from("i16")), ast::Ty::Int32 => Ok(String::from("i32")), ast::Ty::Int64 => Ok(String::from("i64")), ast::Ty::UInt8 => Ok(String::from("u8")), ast::Ty::UInt16 => Ok(String::from("u16")), ast::Ty::UInt32 => Ok(String::from("u32")), ast::Ty::UInt64 => Ok(String::from("u64")), ast::Ty::Float32 => Ok(String::from("f32")), ast::Ty::Float64 => Ok(String::from("f64")), ast::Ty::USize => Ok(String::from("usize")), ast::Ty::Array { ty, size } => { let Constant(ref size) = size; Ok(format!( "[{ty}; {size} as usize]", ty = to_rust_type(&ast, ty)?, size = size.as_str().to_uppercase() )) } ast::Ty::Voidptr => Ok(String::from("*mut std::ffi::c_void /* Voidptr */ ")), ast::Ty::Enum { .. } => Ok(String::from("*mut std::ffi::c_void /* Enum not right*/")), ast::Ty::Str { size, .. } => match size { Some(Constant(c)) => Ok(format!("[u8; {size} as usize]", size = c.to_uppercase())), None => Ok(String::from("*mut std::ffi::c_void /* String */")), }, ast::Ty::Vector { ref ty, size: _, nullable: _ } => to_rust_type(ast, ty), ast::Ty::Identifier { id, reference } => { if id.is_base_type() { Ok(format!("zircon::sys::zx_{}_t", id.name())) } else { match ast.id_to_type(id) { ast::Ty::Enum => return Ok(format!("{}", name = id.name())), ast::Ty::Protocol => return Ok(to_c_name(id.name())), ast::Ty::Struct => { let name = id.name(); if *reference { Ok(format!("*mut {name}", name = name)) } else { Ok(format!("{name}", name = name)) } } ast::Ty::Union => { let name = id.name(); if *reference { Ok(format!("*mut {name}", name = name)) } else { Ok(format!("{name}", name = name)) } } t => to_rust_type(ast, &t), } } } ast::Ty::Handle { .. } => Ok(String::from("zircon::sys::zx_handle_t")), t => Err(format_err!("unknown type in to_rust_type {:?}", t)), } } impl<'a, W: io::Write> RustBackend<'a, W> { // These aren't enums, although conceptually similiar, they get generated as pub const // since banjo might have same value output fn codegen_enum_decl(&self, namespace: DeclIter<'_>, ast: &BanjoAst) -> Result<String, Error> { let mut accum = String::new(); for decl in namespace { if let ast::Decl::Enum { ref name, ref ty, attributes: _, ref variants } = *decl { let mut enum_defines = Vec::new(); let ty = to_rust_type(ast, ty)?; for v in variants { let c_name = v.name.as_str().to_uppercase(); let name = if c_name.chars().next().unwrap().is_numeric() { "_".to_string() + c_name.as_str() } else { c_name }; enum_defines.push(format!( " pub const {name}: Self = Self({val});", name = name, val = v.value, )); } accum.push_str( format!( include_str!("templates/rust/enum.rs"), ty = ty, name = name.name(), enum_decls = enum_defines.join("\n") ) .as_str(), ); } } Ok(accum) } fn codegen_const_decl(&self, namespace: DeclIter<'_>, ast: &BanjoAst) -> Result<String, Error> { let mut accum = Vec::new(); for decl in namespace { if let ast::Decl::Constant { ref name, ref ty, ref value, attributes: _ } = *decl { let Constant(ref size) = value; accum.push(format!( "pub const {name}: {ty} = {val};", name = name.name().to_uppercase(), ty = to_rust_type(ast, ty)?, val = size, )); } } Ok(accum.join("\n")) } fn codegen_struct_decl( &self, namespace: DeclIter<'_>, ast: &BanjoAst, ) -> Result<String, Error> { let mut accum = Vec::new(); for decl in namespace { if let ast::Decl::Struct { ref name, ref fields, ref attributes } = *decl { let mut field_str = Vec::new(); let alignment = if attributes.0.contains(&ast::Attr { key: "Packed".to_string(), val: None }) { "packed" } else { "C" }; let mut partial_eq = true; let mut debug = true; let mut parents = HashSet::new(); for field in fields { parents.clear(); parents.insert(name.clone()); if !can_derive_debug(ast, &field.ty, &mut parents) { debug = false; } parents.clear(); parents.insert(name.clone()); if !can_derive_partialeq(ast, &field.ty, &mut parents) { partial_eq = false; } field_str.push(format!( " pub {c_name}: {ty},", c_name = field.ident.name(), ty = to_rust_type(ast, &field.ty)? )); } accum.push(format!( include_str!("templates/rust/struct.rs"), debug = if debug { ", Debug" } else { "" }, partial_eq = if partial_eq { ", PartialEq" } else { "" }, name = name.name(), struct_fields = field_str.join("\n"), alignment = alignment, )); } } Ok(accum.join("\n")) } fn codegen_union_decl(&self, namespace: DeclIter<'_>, ast: &BanjoAst) -> Result<String, Error> { let mut accum = Vec::new(); for decl in namespace { if let ast::Decl::Union { ref name, ref fields, ref attributes } = *decl { let mut field_str = Vec::new(); let alignment = if attributes.0.contains(&ast::Attr { key: "Packed".to_string(), val: None }) { "packed" } else { "C" }; for field in fields { field_str.push(format!( " pub {c_name}: {ty},", c_name = to_c_name(field.ident.name()).as_str(), ty = to_rust_type(ast, &field.ty)? )); } accum.push(format!( include_str!("templates/rust/union.rs"), name = name.name(), union_fields = field_str.join("\n"), alignment = alignment, )); } } Ok(accum.join("\n")) } fn codegen_includes(&self, ast: &BanjoAst) -> Result<String, Error> { let mut accum = String::new(); for n in ast.namespaces.iter().filter(|n| *n.0 != "zx").filter(|n| *n.0 != ast.primary_namespace) { accum.push_str( format!( "use banjo_{name} as {name};\nuse {name}::*;\n", name = n.0.replace(".", "_") ) .as_str(), ); } Ok(accum) } } impl<'a, W: io::Write> Backend<'a, W> for RustBackend<'a, W> { fn codegen(&mut self, ast: BanjoAst) -> Result<(), Error> { self.w.write_fmt(format_args!( include_str!("templates/rust/header.rs"), includes = self.codegen_includes(&ast)?, primary_namespace = ast.primary_namespace ))?; let namespace = &ast.namespaces[&ast.primary_namespace]; self.w.write_fmt(format_args!( include_str!("templates/rust/body.rs"), enum_decls = self.codegen_enum_decl(namespace.iter(), &ast)?, constant_decls = self.codegen_const_decl(namespace.iter(), &ast)?, struct_decls = self.codegen_struct_decl(namespace.iter(), &ast)?, union_decls = self.codegen_union_decl(namespace.iter(), &ast)?, ))?; Ok(()) } }
<reponame>pedjaristic/love-in-the-time-covid<filename>src/components/FeelGoodPostList.tsx<gh_stars>0 import React from "react"; import { VerticalSnapContainer, SnapItem, SpecialSnapItem, FlexCenter } from "./layout/indexPage"; import { FeelGoodPostItem } from "./PostItem"; import { graphql, useStaticQuery } from "gatsby"; type BasePost = { title: string; summary: string; type: string; name: string; url?: string; public: boolean; source: string; }; export type TextPost = BasePost & { type: "text"; }; export type ArticlePost = BasePost & { type: "article"; url: string; }; export type TweetPost = BasePost & { type: "tweet"; url: string; }; export type ImagePost = BasePost & { type: "image"; url: string; }; export type FeelGoodPost = TextPost | ArticlePost | TweetPost | ImagePost; const FeelGoodPostList: React.FC<{ posts: FeelGoodPost[]; scrollPostHandler: (scrollNumber: number) => void; }> = props => { // Just used to get the title of the page const metadata = useStaticQuery<{ site: { siteMetadata: { title: string }; }; }>(graphql` query { site { siteMetadata { title } } } `).site.siteMetadata; console.log(metadata); // Keep track of the previous visible post const [currentPost, setCurrentPost] = React.useState(0); /** * When provided the height of element and current location, it determines the currently visible post and notifies the parent * *if* the scrolled position is different */ const currentVisiblePost = React.useCallback( (scrollHeight: number, scrollLocation: number) => { const newPostScrollPosition = Math.round( scrollLocation / (scrollHeight / (props.posts.length + 2)) ); if (newPostScrollPosition !== currentPost) { props.scrollPostHandler(newPostScrollPosition); setCurrentPost(newPostScrollPosition); } }, [props.posts.length, currentPost] ); return ( <VerticalSnapContainer onScroll={e => currentVisiblePost( e.currentTarget.scrollHeight, e.currentTarget.scrollTop ) } > <SpecialSnapItem> <FlexCenter> <h1>Hi, we're glad you're here. 💛</h1> <h2> Times are tough right now and it's easy to be overwhelmed. However, amidst the bad, there is still good. <br /><br /> Scroll through the below stack of stories for a quick and uplifting mindfulness break. </h2> </FlexCenter> </SpecialSnapItem> {props.posts.map(post => ( <SnapItem key={post.name}> <FeelGoodPostItem post={post} /> </SnapItem> ))} <SpecialSnapItem> <FlexCenter> <h1>Come again soon! 💛</h1> <h2> That's all for now, though we'll be working to continuously update these stories. Check back soon for fresh content! 🍆 <br /><br /> Have a great day, and don't forget, we will get through this together. </h2> </FlexCenter> </SpecialSnapItem> </VerticalSnapContainer> ); }; export default FeelGoodPostList;
Diesel vehicles will disappear from roads much faster than expected, according to the European Union’s industry commissioner. Elżbieta Bieńkowska spoke after the European parliament backed tougher rules to prevent manufacturers selling cars that produce far more toxic pollution when driven than in official tests. The “dieselgate” scandal began when Volkswagen was caught cheating emissions tests in the US but it is now clear that almost all cars emit far more nitrogen dioxide than regulators intended. The scandal highlighted the EU’s lax vehicle regulations and on Tuesday the European parliament voted strongly in favour of a bill that would bolster EU oversight and allow Brussels to fine carmakers up to €30,000 (£26,000) per vehicle. The new measures will eventually take diesel cars off the roads, Bienkowska said: “Diesel will not disappear from one day to another. But I am quite sure they will disappear much faster than we can imagine.” The mayors of Paris, Madrid and Athens have already signalled a future ban on diesel vehicles. Also on Tuesday, the mayor of London announced tough new proposals to charge polluting vehicles for entering the UK’s capital and the national government may announce similar measures across the country in the coming weeks. “Frankly, we don’t trust the manufacturers,” Sadiq Khan said. Under the EU measures backed on Tuesday, carmakers would no longer directly pay testing agencies for pollution measurements in a bid to break their cosy relationships. EU nations now have to fund car exhaust testing centres, although they may levy fees from car makers to do so. Brussels would also get powers to carry out vehicle spot-checks and levy fines, while national authorities would be able to peer-review each other’s decisions. The law still needs to be finalised in negotiations between EU lawmakers, the European commission and member states. The Volkswagen emissions scandal explained Read more However, the new measures stopped short of creating an independent EU-wide surveillance agency to monitor vehicle emissions, which was one of the key recommendations of a parliamentary report into the dieselgate scandal. National agencies were seen as too vulnerable to lobbying by powerful motor companies in the same country and the report accused EU legislators and governments of caving in to lobbying from the motor industry. Keith Taylor, Green party MEP for the south-east of England and a member of the European parliament’s environment and transport committees, said: “UK Conservative MEPs were the biggest opponents of the dieselgate report and were responsible for watering down its findings, which excoriate their friends in the car industry.” Julia Poliscanova, at the NGO Transport & Environment, said: “It is disappointing that MEPs have rejected the opportunity to make dieselgate history by establishing an independent agency. Nevertheless the parliament has strengthened the new powers for the European commission to spot check cars on the road and properly scrutinise national regulators. Overall this is a good package in response to the dieselgate scandal that has poisoned the air we all breathe.” Corporate Europe Observatory’s campaigner Fabian Hübner said: “The car industry should never have had such a big leverage over emissions regulation. It’s a scandal for how long it has been the puppet master of commission and member states.”
// write callback // Write data to browsers and close the socket. void write_cb(struct ev_loop *loop, struct ev_io *watcher, int revents) { Client *cli; int n; if(EV_ERROR & revents){ perror("invalid event"); return; } cli = (Client*) watcher->data; n = send(watcher->fd, cli->data, cli->len, 0); d_printf("send <%i> [%i] bytes\n", watcher->fd, n); ev_io_stop(loop, watcher); free(cli->data); close(cli->sd); free(cli); d_printf("close <%i>\n", watcher->fd); }
// this worker deals specifically with listening for processes // which are using the process_OK_message feedback for maestro. // func (this *epollListener) listener() { var event syscall.EpollEvent ok_message_len := len(process_OK_message) ok_first_char := []rune(process_OK_message)[0] var local_buf = make([]byte, ok_message_len*4, ok_message_len*4) wakeup_fd, err := NewWakeupFd() if err != nil { debugging.DEBUG_OUT("CRITICAL - could not create a FD for wakingup processMessageListener() thread. FAILING\n") return } this.wakeup_fd_messageListener = wakeup_fd add in our wakeup FD event.Events = syscall.EPOLLIN event.Fd = int32(wakeup_fd.fd_read) debugging.DEBUG_OUT("add FD from event_fd %d to epoll FD %d\n", wakeup_fd.fd_read, this.epfd) if e := syscall.EpollCtl(this.epfd, syscall.EPOLL_CTL_ADD, this.wakeup_fd_messageListener.fd_read, &event); e != nil { debugging.DEBUG_OUT("CRITICAL - could not add FD for waking up epoll - epoll_ctl: %+v\n", e) return } loop forever waiting for stuff debugging.DEBUG_OUT("epollListener FD %d - listener() loop starting\n", this.epfd) for { nevents, e := syscall.EpollWait(this.epfd, this.events[:], -1) if e != nil { debugging.DEBUG_OUT("ERROR - epoll_wait: %+v\n", e) } if this.shutdown_messageListener > 0 { break } for ev := 0; ev < nevents; ev++ { if int(this.events[ev].Fd) == this.wakeup_fd_messageListener.fd_read { val, err2 := this.wakeup_fd_messageListener.ReadWakeup() clear the wakeup if err2 == nil { if val == listener_shutdown_magic_num { debugging.DEBUG_OUT("epollListener listener() got shutdown magic num\n") this.shutdown_messageListener = 1 } else { debugging.DEBUG_OUT("epollListener listener() got wakeup\n") } } else { debugging.DEBUG_OUT("ERROR on WakeupFd ReadWakeup() %s\n", err2.Error()) } } else { it must be a process we are waiting for it's OK string val, ok := this.fdToJob.Load(this.events[ev].Fd) if ok { process_ready := false job := val.(*processStatus) var _p0 unsafe.Pointer currlen := len(job.startMessageBuf) remain := ok_message_len - currlen _p0 = unsafe.Pointer(&local_buf[0]) r0, _, e1 := syscall.RawSyscall(syscall.SYS_READ, uintptr(this.events[ev].Fd), uintptr(_p0), uintptr(remain)) n := int(r0) if e1 != 0 { errno = e1 } else { if n > 0 { offset := -1 stringified := string(local_buf[:n]) fast path: if currlen == 0 && len(stringified) >= remain { if stringified[:ok_message_len] == process_OK_message { process_ready = true } } else { longer path a := []rune(stringified) convert to array of codepoints if currlen == 0 { for i, c := range a { if c == ok_first_char { offset = i break } } if offset == -1 { nothing, the output had no 'process_OK_message' continue } } else { offset = 0 } take := len(a) if take > remain { take = remain } job.startMessageBuf = append(job.startMessageBuf, a[offset:take]...) if (len(job.startMessageBuf) == ok_message_len) && (string(job.startMessageBuf) == process_OK_message) { process_ready = true } } if process_ready { this.removeProcessFDListen(int(this.events[ev].Fd)) switch it over to stdout log capture and report ready: var originid uint32 if job.originLabelId > 0 { originid = job.originLabelId } else { originid = greasego.GetUnusedOriginId() } greasego.AddFDForStdout(int(this.events[ev].Fd), originid) greasego.AddOriginLabel(job.originLabelId, job.job.GetJobName()) job.status = RUNNING submit an event that process is RUNNING debugging.DEBUG_OUT("Got process_ready (OK string) for %s\n", job.job.GetJobName()) controlChan <- newProcessEvent(job.job.GetJobName(), internalEvent_job_running) } } } } if ok } } } big for{} }
// Package arg implements command-line argument parsing. // // It is very similar to the flag handling provided by the stdlib 'flag' package (without // the global handling), and intended to be used as a complement to it. // // Usage // // Define an ArgSet using arg.NewArgSet(), then add arguments to it: // var s string // var i int // var set = arg.NewArgSet() // set.String(&s, "yep", "Usage for the 'yep' arg") // set.Int(&i, "inty", "Usage for the 'inty' arg") // // Or you can create custom args that satisfy the flag.Value (yes, 'flag' // is intentional here) interface (with pointer receivers) and couple them to // arg parsing: // var thingo myArgType // arg.Var(&thingo, "thingo", "Usage for 'thingo'") // // For such args, the default value is just the initial value of the variable. // // After all flags are defined, call ArgSet.Parse() with the list of arguments, // which will usually be the output of FlagSet.Args(): // err = arg.Parse(myFlagSet.Parse()) // err = arg.Parse(os.Args[1:]) // if there are no flags // package arg
package br.lassal.dbvcs.tatubola.integration.versioncontrol; import br.lassal.dbvcs.tatubola.builder.DatabaseSerializerFactory; import br.lassal.dbvcs.tatubola.builder.RelationalDBVersionFactory; import br.lassal.dbvcs.tatubola.integration.IntegrationTestInfo; import br.lassal.dbvcs.tatubola.integration.util.FileSystemUtil; import br.lassal.dbvcs.tatubola.versioncontrol.GitController; import br.lassal.dbvcs.tatubola.versioncontrol.VersionControlSystemException; import org.junit.FixMethodOrder; import org.junit.Test; import org.junit.runners.MethodSorters; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import static org.junit.Assert.*; @FixMethodOrder(MethodSorters.NAME_ASCENDING) public class GitControllerTest { private static Logger logger = LoggerFactory.getLogger(GitControllerTest.class); private final File rootWorkspace = new File("gitControllerTests"); public GitControllerTest(){ this.cleanUpWorkspace(); } private void cleanUpWorkspace(){ FileSystemUtil.deleteDir(this.rootWorkspace); } private GitController vcs; /** * Creates a VCS Controller using the following ENV variables * - GITREPO_USER : username in GitHub for the repository * - GITREPO_PWD : password for the GitHub account * @return */ private GitController getVCSController(){ if(this.vcs == null){ try { URL githubRepo = new URL(IntegrationTestInfo.REMOTE_REPO); String username = IntegrationTestInfo.getVCSRepositoryUsername(); String password = IntegrationTestInfo.getVCSRepositoryPassword(); String baseBranch = IntegrationTestInfo.REPO_BASE_BRANCH; this.vcs = (GitController) this.getFactory() .createVCSController(IntegrationTestInfo.REMOTE_REPO, username, password, baseBranch); this.vcs.setWorkspacePath(this.rootWorkspace); } catch (MalformedURLException e) { logger.error("Error trying to create GitController in test", e); } } return this.vcs; } private String getGitHead() throws IOException { String currentGitHead = new String(Files.readAllBytes(Paths.get(this.rootWorkspace.getAbsolutePath(),".git/HEAD"))); return currentGitHead.trim(); } private DatabaseSerializerFactory getFactory(){ return RelationalDBVersionFactory.getInstance(); } /** * Tests the setup of the local repository using the test credentials * - Creates a new local Git repository in the rootWorkspace * - Checks if git creates the base infrastructure * * @throws VersionControlSystemException * @throws IOException */ @Test public void test1_setupLocalRepository() throws VersionControlSystemException, IOException { GitController vcs = this.getVCSController(); //first test verify workspace does not exists assertFalse(this.rootWorkspace.exists()); vcs.setupRepositoryInitialState(); assertTrue(this.rootWorkspace.exists()); String currentGitHead = this.getGitHead(); String expectedHead = "ref: refs/heads/master"; assertEquals(expectedHead, currentGitHead); } /** * Test the checkout of a new branch and commit in the local git repository in * the rootWorkspace * @throws VersionControlSystemException * @throws IOException */ @Test public void test2_changeBranch() throws VersionControlSystemException, IOException { GitController vcs = this.getVCSController(); vcs.setupRepositoryInitialState(); String branch = "DummyBranch"; vcs.checkout(branch); String currentGitHead = this.getGitHead(); String expectedHead = "ref: refs/heads/" + branch; assertEquals(expectedHead, currentGitHead); String sampleContent = "Line 001 \n Line 002 \n"; Path outputSampleFile = Paths.get(this.rootWorkspace.getAbsolutePath(), "samplefile.txt"); Files.write(outputSampleFile, sampleContent.getBytes()); vcs.commitAllChanges("Sample commit"); } /** * Try to sync the local changes (after test1 and test2) to the remote server using * invalid credentials; that way an exception is expected * @throws VersionControlSystemException */ @Test(expected = VersionControlSystemException.class) public void test3_trySyncServerInvalidCredentials() throws VersionControlSystemException { GitController vcs = null; try{ URL githubRepo = new URL(IntegrationTestInfo.REMOTE_REPO); String username = "nonono"; String password = "<PASSWORD>"; String baseBranch = "DummyBranch"; vcs = (GitController) this.getFactory() .createVCSController(IntegrationTestInfo.REMOTE_REPO, username, password, baseBranch); vcs.setWorkspacePath(this.rootWorkspace); vcs.setupRepositoryInitialState(); }catch (Exception ex){ logger.error("Error during test of Sync server with wrong credentials", ex); } vcs.syncChangesToServer(); } }
<reponame>imgag/ProgrammierProjekt2019 #include "impactfilter.h" #include "ui_impactfilter.h" #include "filterdialog.h" #include "annotationservice.h" impactFilter::impactFilter(QWidget *parent) : QDialog(parent), ui(new Ui::impactFilter) { ui->setupUi(this); } impactFilter::~impactFilter() { delete ui; } /** * @brief impactFilter::open opens the impact filter dialog, showing the specified region * @param region */ void impactFilter::openWindow(QString region) { ui->label_region->setText(region); this->filterByImpact = true; } double impactFilter::getHigh(){ if (!ui->ShowHIGH->isChecked()) { return -1; } return ui->HIGH_impact->value(); } double impactFilter::getModerate(){ if (!ui->ShowMODERATE->isChecked()) { return -1; } return ui->MODERATE_impact->value(); } double impactFilter::getLow(){ if (!ui->ShowLOW->isChecked()) { return -1; } return ui->LOW_impact->value(); } double impactFilter::getModifier(){ qDebug() << "boxChecked=" << ui->ShowMODIFIER; if (!ui->ShowMODIFIER->isChecked()) { return -1; } return ui->MODIFIER_impact->value(); } double impactFilter::getUnknown(){ if (!ui->ShowUNKNOWN->isChecked()) { return -1; } return ui->unknown_impact->value(); } bool impactFilter::isFilterByImpact(){ return this->filterByImpact; } void impactFilter::on_resetButton_clicked(){ filterByImpact = false; }
/** * Deserializes XML-based output from cTAKES to jCas object. * @param stream of XML file. * @param jcas {@see jCas} object used to keep the jcas from XML file. */ public static void deserialize(InputStream stream, JCas jcas) { try { XmiCasDeserializer.deserialize(new BufferedInputStream(stream), jcas.getCas()); } catch (FileNotFoundException fnfe) { fnfe.printStackTrace(); } catch (SAXException ioe) { ioe.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally { try { stream.close(); } catch (IOException ioe) { ioe.printStackTrace(); } } }
# -*- coding: utf-8 -*- """ Created on Sun Apr 12 07:49:36 2020 @author: naoki """ N = int(input()) xl = [list(map(int, input().split())) for _ in range(N)] left_right = [[row[0]-row[1], row[0]+row[1]] for row in xl] left_right = sorted(left_right, key = lambda x:int(x[1])) count = 0 now = -100000000000 for row in (left_right): if row[0] < now: continue else: count += 1 now = row[1] print(count)
<reponame>fritzminor/schnellsuchfeldbhh import { FC } from "react"; import { AppState } from "../store/AppState"; import { Store } from "../store/Store"; import { ModalAnalysis } from "./ModalAnalysis"; import { ModalMessage } from "./ModalMessage"; export type ModalInfoProps = { modalInfo: AppState["modalInfo"]; hideUserMessage: Store["hideUserMessage"]; }; export const ModalInfo: FC<ModalInfoProps> = ( { modalInfo, hideUserMessage }) => { if (modalInfo) return (typeof modalInfo === "string") ? <ModalMessage active={!!modalInfo} hideModal={ () => { hideUserMessage(); }} > <div className="notification">{modalInfo}</div> </ModalMessage> : <ModalAnalysis modalInfo={modalInfo} hideModal={ () => { hideUserMessage(); }} > </ModalAnalysis>; else return <></>; };
<reponame>leozz37/makani<filename>avionics/firmware/params/param_util.c /* * Copyright 2020 Makani Technologies LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "avionics/firmware/params/param_util.h" #include <ctype.h> #include <stdlib.h> #include <string.h> #define MAC_BYTES 6 #define MAC_HEX_STRING_LEN (MAC_BYTES * 2) #define MAC_MAX_VALUE ((1ULL << (MAC_BYTES * 8)) - 1) bool ParamStringToEthernetAddress(const char *str, EthernetAddress *addr) { const uint32_t len = (uint32_t)strlen(str); if (len != MAC_HEX_STRING_LEN) { return false; } for (uint32_t i = 0; i < len; ++i) { if (!isxdigit((unsigned char)str[i])) { return false; } } uint64_t intval = strtoull(str, NULL, 16); if (intval > MAC_MAX_VALUE) { return false; } if (addr != NULL) { *addr = (EthernetAddress) { (uint8_t)((intval >> 40) & 0xff), (uint8_t)((intval >> 32) & 0xff), (uint8_t)((intval >> 24) & 0xff), (uint8_t)((intval >> 16) & 0xff), (uint8_t)((intval >> 8) & 0xff), (uint8_t)((intval >> 0) & 0xff) }; } return true; }
. In the last years, the development of immunophenotypic and molecular analyses allowed to recognize several cases of hybrid acute leukemia (AL), whose blast cell display both lymphoid and myeloid features. Hybrid, or mixed-lineage, AL seems to have distinct clinical manifestations and hematological findings, and is mainly characterized by resistance to chemotherapy and poor prognosis. We report on a patient with AL, which showed a very rapid switch from the lymphoblastic phenotype exhibited at presentation to a myelomonoblastic one, appeared at first relapse, and lastly progressed to an undifferentiated leukemia in the terminal phase. Together with this morphologic and cytochemical evolution, leukemic cells expressed, besides the primary early-B antigens, new immunological markers related to T-lymphocytic and myeloid lineages. Based on this observation and current understanding of the ontogenesis of hematologic malignancies, we discuss biological mechanisms which are likely to underlie hybrid leukemia.
<filename>NativeRenderingPlugin/PluginSource/source/ShaderProp.h #pragma once #include "PlatformBase.h" #include <assert.h> enum PropType { Float, Vector2, Vector3, Vector4, Matrix, FloatBlock }; const std::string propTypeStrings[] = { "Float", "Vector2", "Vector3", "Vector4", "Matrix", }; struct ShaderProp { ShaderProp(PropType type_, std::string name_) : type(type_) , name(name_) { #if SUPPORT_OPENGL_UNIFIED || SUPPORT_OPENGL_LEGACY uniformIndex = UNIFORM_UNSET; #endif offset = 0; size = 0; arraySize = 0; } PropType type; const std::string typeString() { return propTypeStrings[(size_t)type]; } std::string name; /* float value(int n) { if (!constantBuffer || size == 0) return 0.0f; return *((float*)(constantBuffer + offset + n * sizeof(float))); } */ #if SUPPORT_OPENGL_UNIFIED || SUPPORT_OPENGL_LEGACY static const int UNIFORM_UNSET = -2; static const int UNIFORM_INVALID = -1; int uniformIndex; #endif uint16_t offset; uint16_t size; uint16_t arraySize; static PropType typeForSize(uint16_t size) { switch (size) { case sizeof(float) : return Float; case 2 * sizeof(float) : return Vector2; case 3 * sizeof(float) : return Vector3; case 4 * sizeof(float) : return Vector4; case 16 * sizeof(float) : return Matrix; default: return FloatBlock; } } static uint16_t sizeForType(PropType type) { switch (type) { case Float: return sizeof(float); case Vector2: return 2 * sizeof(float); case Vector3: return 3 * sizeof(float); case Vector4: return 4 * sizeof(float); case Matrix: return 16 * sizeof(float); default: assert(false); return 0; } } };
/** * As the default WS-Addressing binding since JAXB 2.1 uses the * {@link W3CEndpointReference} class, we must also use this class, otherwise * JAXB would complain, that there are 2 contexts for the same namespace+element * combination.<br> * The issue with {@link W3CEndpointReference} is that it can easily be created * using the {@link W3CEndpointReferenceBuilder} class, but it's not possible to * extract information from it (get....). This class offers a <b>*HACK*</b> * workaround by using reflection (incl. setAccessible) to access private fields * of {@link W3CEndpointReference}. This was only tested on Sun JDKs, so use at * your own risk!!! * * @author PEPPOL.AT, BRZ, Philip Helger */ public final class W3CEndpointReferenceUtils { private static Field s_aFieldAddress; private static Field s_aFieldReferenceParameters; private static Field s_aFieldAddressURI; private static Field s_aFieldElementsElements; static { // Resolve inner classes final String sAddress = W3CEndpointReference.class.getName() + "$Address"; final String sElements = W3CEndpointReference.class.getName() + "$Elements"; Class<?> aClassAddress = null; Class<?> aClassElements = null; // for all inner classes for (final Class<?> aClass : W3CEndpointReference.class .getDeclaredClasses()) { final String sClassName = aClass.getName(); if (sClassName.equals(sAddress)) aClassAddress = aClass; else if (sClassName.equals(sElements)) aClassElements = aClass; } if (aClassAddress == null) throw new RuntimeException("Failed to resolve class " + sAddress); if (aClassElements == null) throw new RuntimeException("Failed to resolve class " + sElements); try { // Resolve required fields and make them accessible s_aFieldAddress = W3CEndpointReference.class .getDeclaredField("address"); // TODO use PrivilegedAction in phloc-commons >= 3.0.1 s_aFieldAddress.setAccessible(true); s_aFieldReferenceParameters = W3CEndpointReference.class .getDeclaredField("referenceParameters"); s_aFieldReferenceParameters.setAccessible(true); s_aFieldAddressURI = aClassAddress.getDeclaredField("uri"); s_aFieldAddressURI.setAccessible(true); s_aFieldElementsElements = aClassElements .getDeclaredField("elements"); s_aFieldElementsElements.setAccessible(true); } catch (final Throwable t) { throw new RuntimeException( "Failed to init W3CEndpointReference Fields for reflection"); } } private W3CEndpointReferenceUtils() { } /** * Create a new endpoint reference for the given address without reference * parameters. * * @param sAddress * The address to use. May not be <code>null</code>. * @return The non-<code>null</code> endpoint reference for the given * address */ public static W3CEndpointReference createEndpointReference( final String sAddress) { return new W3CEndpointReferenceBuilder().address(sAddress).build(); } /** * Create a new endpoint reference for the given address, using the * specified reference parameters. * * @param sAddress * The address to use. May not be <code>null</code>. * @param aReferenceParameters * The non-<code>null</code> list of reference parameters. May * not be <code>null</code>. * @return The non-<code>null</code> endpoint reference for the given * address */ public static W3CEndpointReference createEndpointReference( final String sAddress, final List<Element> aReferenceParameters) { W3CEndpointReferenceBuilder aBuilder = new W3CEndpointReferenceBuilder() .address(sAddress); for (final Element aReferenceParameter : aReferenceParameters) { aBuilder = aBuilder.referenceParameter(aReferenceParameter); } return aBuilder.build(); } /** * Get the address contained in the passed endpoint reference. * * @param aEndpointReference * The endpoint reference to retrieve the address from. May not * be <code>null</code>. * @return The contained address. */ public static String getAddress( final W3CEndpointReference aEndpointReference) { try { // Get the "address" value of the endpoint reference final Object aAddress = s_aFieldAddress.get(aEndpointReference); if (aAddress == null) return null; // Get the "uri" out of the "address" field return (String) s_aFieldAddressURI.get(aAddress); } catch (final Throwable t) { throw new IllegalStateException(t); } } /** * Get a list of all reference parameters contained in the passed endpoint * reference. * * @param aEndpointReference * The endpoint reference to retrieve the reference parameters. * May not be <code>null</code>. * @return A mutable element list */ @SuppressWarnings("unchecked") public static List<Element> getReferenceParameters( final W3CEndpointReference aEndpointReference) { try { // Get the "referenceParameters" value of the endpoint reference final Object aReferenceParameters = s_aFieldReferenceParameters .get(aEndpointReference); if (aReferenceParameters == null) return null; // Get the "elements" out of the "referenceParameters" field return (List<Element>) s_aFieldElementsElements .get(aReferenceParameters); } catch (final Throwable t) { throw new IllegalStateException(t); } } /** * Get the reference parameter at the given index * * @param aEndpointReference * The object to retrieve the reference parameter from. May not * be <code>null</code>. * @param nIndex * The index to retrieve. Should not be negative. * @return <code>null</code> if the index is invalid */ public static Element getReferenceParameter( final W3CEndpointReference aEndpointReference, final int nIndex) { // Get all reference parameters final List<Element> aReferenceParameters = getReferenceParameters(aEndpointReference); // And extract the one at the desired index. return aReferenceParameters.get(nIndex); } }
<reponame>ulgom/usergrid-stack package org.usergrid.system; import me.prettyprint.hector.api.Cluster; import me.prettyprint.hector.api.exceptions.HectorException; import org.codehaus.jackson.JsonNode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.usergrid.utils.JsonUtils; import org.usergrid.utils.MapUtils; import org.usergrid.utils.TimeUtils; import java.util.Properties; /** * Provide a single spot for monitoring usergrid system health * * @author zznate */ public class UsergridSystemMonitor { private static final String TIMER_THRESHOLD_TRIGGERED_MSG = "TimerThreshold triggered on duration: %d \n%s\n----------------"; private static Logger logger = LoggerFactory.getLogger(UsergridSystemMonitor.class); private final String buildNumber; private final Cluster cluster; /** The trigger point for printing debugging information. {@see #maybeLogPayload}*/ private long timerLogThreshold = 15*1000; public static final String LOG_THRESHOLD_PROPERTY = "metering.request.timer.log.threshold"; /** * Must be instantiated with a build number and a cluster to be of any use. Properties can be null. * Threshold property must be a form compatible with {@link TimeUtils#millisFromDuration(String)} */ public UsergridSystemMonitor(String buildNumber, Cluster cluster, Properties properties) { this.buildNumber = buildNumber; this.cluster = cluster; if ( properties != null ) { timerLogThreshold = TimeUtils.millisFromDuration( properties.getProperty(LOG_THRESHOLD_PROPERTY,"15s")); } } /** * Wraps "describe_thrift_version API call as this hits a static string in Cassandra. * This is the most lightweight way to assure that Hector is alive and talking to the * cluster. * @return true if we have a lit connection to the cluster. */ public boolean getIsCassandraAlive() { boolean isAlive = false; try { isAlive = cluster.describeThriftVersion() != null; } catch (HectorException he) { logger.error("Could not communicate with Cassandra cluster",he); } return isAlive; } /** * * @return a string representing the build number */ public String getBuildNumber() { return buildNumber; } /** * Uses {@link JsonUtils#mapToFormattedJsonString(Object)} against the object if the duration is * greater than {@link #timerLogThreshold}. When using the varargs form, the number of elements * must be even such that key,value,key,value mapping via {@link MapUtils#map(Object...)} can * collect all the elements. * * Conversion to a map this way let's us lazy create the map if and only if the triggering threshold is true * or we are in debug mode. * * @param duration * @param objects */ public void maybeLogPayload(long duration, Object... objects) { if ( duration > timerLogThreshold || logger.isDebugEnabled() ) { String message; if ( objects.length > 1) { message = formatMessage(duration, MapUtils.map(objects)); }else { message = formatMessage(duration, objects); } logger.info(message); } } static String formatMessage(long duration, Object object) { return String.format(TIMER_THRESHOLD_TRIGGERED_MSG, duration, JsonUtils.mapToFormattedJsonString(object)); } }
<filename>voicebuilder/scripts/python/voice_build.py #!/usr/bin/python3 # -*- coding:utf-8 -*- import os import re import sys import subprocess import shlex import getopt import logging import traceback from pathlib import Path from shutil import copyfile marytts_home = os.environ['MARYTTS_HOME'] marytts_version = os.environ['MARYTTS_VERSION'] marytts_builder_base = os.path.join(marytts_home, 'target', 'marytts-builder-' + marytts_version) voices_builder_base = os.path.join(marytts_home,'voicebuilder') voices_home = os.environ['MARYTTS_VOICES_HOME'] logging.getLogger().setLevel(logging.INFO) def get_silence_file(samplerate): silence_file = os.path.join(voices_builder_base, "silence_%skHz.wav" % samplerate) if not os.path.isfile(silence_file): cmd = "sox -n -r %s -b 16 -c 1 %s trim 0.0 2.0" % (samplerate, silence_file,) subprocess.Popen(shlex.split(cmd)).wait() return silence_file def is_valid_wav(wavfile): if not wavfile.endswith(".wav"): return False try: sox_output = subprocess.check_output(["sox", wavfile, "-n", "stat"], stderr=subprocess.STDOUT).decode('utf-8') if 'WARN' in sox_output: return False except: return False return True def valid_pitch_pointers(wavfile): if not wavfile.endswith(".wav"): return False try: praat_script = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'pitch.praat') cmd = "praat --run %s %s 75 300" % (praat_script, wavfile) praat_output = subprocess.check_output(shlex.split(cmd), stderr=subprocess.STDOUT) if len(praat_output) >0 and b'Error' in praat_output: return False except Exception: traceback.print_exc() return False return True def is_silent(wavfile): # Typical sox output.... (not necessarily of a silent file) # Samples read: 57920 # Length (seconds): 3.620000 # Scaled by: 2147483647.0 # Maximum amplitude: 0.555573 # Minimum amplitude: -0.720520 # Midline amplitude: -0.082474 # Mean norm: 0.029584 # Mean amplitude: 0.000014 # RMS amplitude: 0.074104 # Maximum delta: 0.677338 # Minimum delta: 0.000000 # Mean delta: 0.005242 # RMS delta: 0.013970 # Rough frequency: 480 # Volume adjustment: 1.388 try: sox_output = subprocess.check_output(["sox", wavfile, "-n", "stat"], stderr=subprocess.STDOUT).decode('utf-8') for attribute in sox_output.split('\n'): attribute = re.sub(' +', ' ', attribute) if attribute.startswith("Mean amplitude:"): mean_amplitude = float(attribute.split(':')[1].strip()) if mean_amplitude < 0.000002 and mean_amplitude > 0.0: return True if attribute.startswith("Maximum amplitude:"): max_volume = float(attribute.split(':')[1].strip()) if max_volume < 0.0001: return True return False except: return True def pad_with_silence(wavfile): file_details = str(subprocess.check_output(["file", wavfile])) if '48000 Hz' in file_details: silence_file = get_silence_file('48000') tmpwavfile = wavfile.replace(".wav","_tmp.wav") os.rename(wavfile, tmpwavfile) cmd = "sox %s %s %s %s" % (silence_file, tmpwavfile, silence_file, wavfile) subprocess.Popen(shlex.split(cmd)).wait() os.remove(tmpwavfile) return True def execute_java_cmd(cmd): try: logging.info(cmd) cmd_output = subprocess.check_output(shlex.split(cmd)).decode('utf-8') except Exception as ex: logging.info("voice_build.py::execute_java_cmd exception " + ex) raise if 'Exception' in cmd_output: logging.info("voice_build.py::execute_java_cmd, 'Exception' in output: " + cmd_output) return False logging.info("voice_build.py::execute_java_cmd completed successfully") return True def rename_file_extension(filepath, newextension): base = os.path.splitext(filepath)[0] os.rename(filepath, base + newextension) def init_voice_build(voice_build_dir, voice_name, locale): logging.info("init_voice_build: voice_build_dir %s, voice_name %s - started" % (voice_build_dir, voice_name)) txt_done_data = {} voice_wavs_dir = os.path.join(voice_build_dir, 'wav') voice_prompts_dir = os.path.join(voice_build_dir, 'data') # renaming will exclude wav files from MaryTTS basenamelist for file in os.listdir(voice_wavs_dir): if file.endswith(".wav"): wavfile = os.path.join(voice_wavs_dir, file) txtfile = os.path.join(voice_prompts_dir, file.replace(".wav",".txt")) if not os.path.isfile(txtfile): logging.info("voice_build.py couldn't find txtfile %s " % txtfile) rename_file_extension(wavfile, ".notextfile") continue if not is_valid_wav(wavfile): logging.info("voice_build.py found that %s not a valid wavfile " % wavfile) rename_file_extension(wavfile, ".notvalidwavfile") continue #if is_silent(wavfile): #logging.info("voice_build.py found that %s is silent" % wavfile) #rename_file_extension(wavfile, ".silentwavfile") #continue if not valid_pitch_pointers(wavfile): logging.info("voice_build.py found that %s has invalid pitch pointers" % wavfile) rename_file_extension(wavfile, ".invalidpitchpointers") continue pad_with_silence(wavfile) key=file.replace('.wav','') with open(txtfile, 'r', encoding='utf-8') as f: value=f.read() txt_done_data[key]=value.strip() with open(os.path.join(voice_build_dir, 'txt.done.data'), 'w', encoding='utf-8') as txtdone: for key,value in txt_done_data.items(): txtdone.write("( " + key + " \"" + value + "\" )\n") # logging.info("voice_build.py::init_voice_build %s copying templates.." % voice_name) # importMain.config copyfile(os.path.join(voices_builder_base ,'templates', 'importMain.config.template'), os.path.join(voice_build_dir,'importMain.config')) # database.config with open(os.path.join(voices_builder_base, 'templates', 'database.config.template'), 'r', encoding='utf-8') as src: lines = src.readlines() with open(os.path.join(voice_build_dir, 'database.config'), 'w', encoding='utf-8') as trgt: for line in lines: line = line.replace('VOICE_BUILD_DIR', voice_build_dir) line = line.replace('VOICENAME', voice_name) line = line.replace('VOICE_LOCALE', locale) trgt.write(line) logging.info("voice_build.py::init_voice_build %s completed" % voice_name) def audio_converter(voice_build_recordings_dir, voice_build_dir, voice_name): logging.info("audio_converter %s " % voice_build_dir) voice_build_wavs_dir = os.path.join(voice_build_dir, "wav") Path(voice_build_wavs_dir).mkdir(parents=True, exist_ok=True) #cmd = 'java -showversion -Xmx1024m -cp "%s/lib/*" -Dmary.base="%s" marytts.util.data.audio.AudioConverterHeadless %s %s' % (marytts_builder_base, marytts_builder_base, voice_build_recordings_dir, voice_build_wavs_dir) cmd = 'java -cp "%s/lib/*" -Dmary.base="%s" marytts.util.data.audio.AudioConverterHeadless %s %s' % (marytts_builder_base, marytts_builder_base, voice_build_recordings_dir, voice_build_wavs_dir) return execute_java_cmd(cmd) def voice_import(voice_name): logging.info("voice_build.py::voice import starting %s" % voice_name) voice_build_dir = os.path.join(voices_home, voice_name) cmd = 'java -cp "%s/lib/*" -Dmary.base="%s" marytts.tools.voiceimport.DatabaseImportMainHeadless %s' % (marytts_builder_base, marytts_builder_base, voice_build_dir) return execute_java_cmd(cmd) def generate_voice(audio_source_dir, voice_name, locale, peform_speech_analysis=False): logging.info("generate_voice: source_dir %s, voice_name %s, locale %s" % (audio_source_dir, voice_name, locale)) success = False try: voice_build_dir = os.path.join(voices_home, voice_name) logging.info("Creating voice in dir %s" % voice_build_dir) if audio_converter(audio_source_dir, voice_build_dir, voice_name): init_voice_build(voice_build_dir, voice_name, locale) if voice_import(voice_name): logging.info("voice built successfully") success = True except: logging.error("Unexpected exception: %s", sys.exc_info()[0]) return success def display_help(): print ("") print ("Build a marytts voice from the command line") print ("") print ("Usage:") print ("") print ("$ voice-build.py -v <voice name> -l <locale>") def main(argv): try: opts, args = getopt.getopt(argv,"hv:s:l:", ["voice=","source=","locale="]) except getopt.GetoptError: display_help() return if len(opts) < 2: display_help() return source_audio_dir, voice_name, locale = '','','' for opt, arg in opts: if opt == '-h': display_help() elif opt in ("-v","--voice"): voice_name = arg elif opt in ("-s","--source"): source_audio_dir = arg elif opt in ("-l","--locale"): locale = arg if len(source_audio_dir) > 0 and len(voice_name) > 0: generate_voice(source_audio_dir, voice_name, locale, False) else: display_help() if __name__ == "__main__": main(sys.argv[1:])
#include <bits/stdc++.h> using namespace std; #define IOS ios::sync_with_stdio(0); cin.tie(0); cout.tie(0); //#define endl "\n" #define int long long #define f(i,p) for(int i=0;i<p;i++) #define el const int N=1e9+7; const int P = 998244353; int cnt[100005]; typedef struct dot{ int ind; int elem; double div; dot() {}; }dot; bool sortbyf(pair<int,int> &a, pair<int,int> &b) { if(a.first < b.first) { return true; } else if(a.first == b.first) { return (a.second < b.second); } return false; } #define MAXB 200007 #define MOD 1000000007 int32_t main() { int n;cin>>n; int x, a=0, b=0, ansp=0; for(int i=0;i<n;i++){ cin>>x; a++; if(x<0) swap(a,b); ansp+=a; } cout<<(n*(n+1))/2 - ansp<<" "<<ansp<<endl; }
eps = 1e-7 def f(d): l, r = 2.0, d while r - l > eps: mid = (l + r) * 0.5 if mid ** 2 / (mid - 1) < d: l = mid else: r = mid return (l + r) * 0.5 t = int(input()) for _ in range(t): a = int(input()) if a == 0: print('Y', 0.0, 0.0) elif a in [1, 2, 3]: print('N') else: x = f(a) print('Y', x, x / (x - 1.0))
/** * Lists content of current directory, and subdirectories * @param dir given directory * @param path path of parent directory * @return content of current directory, and subdirectories */ public String recrusiveListing(Directory dir, String path) { String content = dir.getContent(); StringBuilder output = new StringBuilder(path.equals("") ? content : path + ": \n" + content); output.append(content.equals("") ? "\n" : "\n\n"); for (FileSystemObject child: dir) { if (child instanceof Directory) { String childPath = path + "/" + child.getName(); output.append(recrusiveListing((Directory) child, childPath)); } } return output.toString(); }
Care for Vulnerable Elderly in Cardiology: A Program for Daily Practice Background: The percentage of cardiac patients aged ≥75 has increased considerably over the past decades. To optimize multidisciplinary care for these frail elderly, a program of intensive medical and nursing care was started at Noord West Ziekenhuisgroep department of cardiology. Methods: Patients over 70 years of age, admitted to the department of cardiology, were included and treated by the advanced practice nurse according to a redesigned care process that focused on expedite mobilization and care by an advanced practice nurse-headed team including the first outpatient visit. Results: A total of 951 patients over 70 years were included in the frail elderly project. The average length of stay of the frail elderly was 6 days (SD 5). In the first 30 days, after discharge, 12% of these patients were readmitted with heart failure and 2% with dehydration. Mortality during admission was 3%, and 11% died within 3 months after discharge. Conclusions: This observational study shows, during a 4-year period, the vulnerability of aged cardiac patients. They were mainly admitted for (diastolic) heart failure, usually in combination with atrial fibrillation and hypertension. Their length of stay was on average 6 days with 11% mortality at 90 days follow-up.
If you made a list of the factors that landed Donald Trump in the White House, Trump campaign digital director Brad Parscale would put Facebook near the top. “Facebook now lets you get to places—and places possibly that you would never go with TV ads,” Parscale told CBS earlier this month. “Now, I can find, you know, 15 people in the Florida Panhandle that I would never buy a TV commercial for. And, we took opportunities that I think the other side didn't.” The relationship with Silicon Valley wasn’t one sided: as major tech companies face mounting criticism for allowing political disinformation to proliferate on their platforms, a new study suggests that employees at Facebook, Google, and Twitter also took on crucial roles within the Trump campaign, acting more like political strategists than on-site salespeople. The collaboration allowed Team Trump to shore up its digital operations in a way that would have been difficult to accomplish on its own, according to Politico, which got an early look at the study. Embedded tech employees took on responsibilities such as targeting hard-to-reach voters and coming up with responses to probable lines of attack during debates. “Facebook, Twitter, and Google [went] beyond promoting their services and facilitating digital advertising buys,” the peer-reviewed paper concludes. The companies “actively [shaped] campaign communications through their close collaboration with political staffers.” The Clinton campaign turned down the assistance, which Facebook, Google, and Twitter all offered to 2016 candidates free of charge. (One tech company employee in the study said her campaign “viewed us as vendors rather than consultants.”) The Trump campaign, on the other hand, used the “embeds” extensively during the general election. Ultimately, the work each company did for Trump—Google recommending geographically targeted ads, Twitter analyzing the success of tweet-based fundraising efforts, and Facebook identifying which pictures performed best on Instagram, for instance—helped close the gap between him and Clinton, experts cited in the study conclude. The collaboration likely proved lucrative for all three companies. Online political-ad spending during the 2016 election totaled $1.4 billion—the Trump campaign spent $70 million on Facebook alone, making client services a valuable extension of Facebook’s ad product. The collaboration also conferred additional benefits, as Politico points out: national exposure, a testing ground for new features and products, and the chance to build a relationship with a candidate who might end up holding the regulatory reins once in office. In the wake of Trump’s victory, Silicon Valley is facing difficult questions about that symbiotic relationship, and a potential regulatory reckoning. It also underscores a nearly universal truth about how the tech and media industries treated the 2016 presidential race: employees at Facebook and Twitter, among other companies in the overwhelmingly liberal Bay Area, never really expected Trump to win.
On August 15, 2005, Israel began closing its small settlements in Gaza and withdrawing the troops that protected them as well as the forces that tried to stop smuggling along the Gaza-Egypt border. The last Israeli soldier left a month later, ending an Israeli presence that had begun with the 1967 war. Ten years later, Gaza is a quasi-state, with Hamas at its helm: seemingly Israel’s worst nightmare. Yet the Gaza withdrawal was in many ways a success, and Israelis and their American allies would do well to reflect on the benefits and the costs as they contemplate future policies. When Ariel Sharon became prime minister in 2001, withdrawal did not seem in the cards. As a government minister in the past, Sharon had been a prominent supporter of the construction of Israeli settlements in the West Bank, and he won election in part because of his reputation for being tough on Palestinians. So when Sharon announced plans for the unilateral withdrawal, it seemed the Israeli presence in Gaza came at a high cost with little reward: Some 3,000 soldiers protected 8,500 settlers who lived among a population of 1.4 million Palestinians. Since the beginning of the second intifada in 2000, Israel had lost 230 soldiers in Gaza and paid tens of millions a year. The occupation as a whole was a blemish on Israel’s international reputation. But the withdrawal was about more than merely shedding part of an albatross. Sharon heralded the withdrawal as a “bold move to end the stalemate in the peace process.” U.S. President George W. Bush declared the withdrawal a “courageous initiative” and noted, “this is the opportunity for the world to help the Palestinians stand up a peaceful society and a hopeful society.” Many inside and outside of the country hoped that a successful Gaza withdrawal would pave the way for further unilateral withdrawals in the West Bank — part of Sharon’s broader policy of aligning Israel’s Jewish demographics and its borders. These hopes received a boost when Israel also evacuated several small settlements in the West Bank shortly thereafter. The withdrawal was unilateral and there were no attendant negotiations with a Palestinian partner. Indeed, in a letter to Bush, Sharon declared “there exists no Palestinian partner with whom to advance peacefully toward a settlement.” This enabled Sharon to both withdraw from the costly occupation in Gaza while continuing to claim he didn’t trust the Palestinians and wouldn’t support a deal. This insulated Sharon from claims he was a freier — a “sucker” —the ultimate political insult for Israelis, many of whom believe Yasser Arafat played them for fools during the peace negotiations of the 1990s. One former Sharon advisor even portrayed the withdrawal as “formaldehyde” for the peace process. By ending Israel’s control over more than a million Gazans, the advisor claimed Israel would also be able to fend off pressure to withdraw from the West Bank and delay or perhaps even defuse the “demographic time bomb,” which refers to the point at which Arabs under Israeli control would outnumber the country’s Jews. The Gaza disengagement was bumpy, both politically and operationally, but ultimately successful. Politically, the Israeli right was particularly critical. Benjamin Netanyahu in 2005 resigned from Sharon’s cabinet, refusing “to be a partner to a move which ignores reality, and proceeds blindly toward turning the Gaza Strip into a base for Islamic terrorism which will threaten the state.” (As a member of the cabinet, Netanyahu had previously went along with the disengagement — he was for the withdrawal before he was against it.) On the ground, some settlers in Gaza left as ordered, but many others refused to go. Israeli soldiers forced these settlers to leave, often dragging them kicking and screaming; other settlers protested. There were calls for soldiers to disobey orders. But despite some blood-curdling rhetoric from extremists within the settler community, there was little violence, and the army proved loyal. The Gaza withdrawal fueled hopes for additional, unilateral withdrawals from the West Bank, but instead stands as the last major shift in territorial ownership in a conflict that began over 60 years ago. Gaza and its problems did not go away. Rather, Israel’s withdrawal from Gaza left a vacuum that was filled by Hamas and a new set of security threats. The withdrawal also created confusion about what Israel should do in response. As rockets from Gaza rained down, many Israelis questioned the decision to leave, believing they made a major concession only to see violence grow. After Netanyahu became prime minister in 2009, there were calls for reversing the withdrawal. The right-wing politician and head of the Israel Beiteinu party (which represents many Jews from the former Soviet Union), Avigdor Lieberman, when he was Foreign Minister, declared that the only way to stop more attacks was “a full occupation of the Gaza Strip.” Rise of Hamas as Quasi-State The second intifada claimed approximately 1,000 Israeli and 3,000 Palestinian lives — huge numbers for small communities — and left a dark legacy. Each side became convinced the other was not, and had never been, serious about peace. Israel, however, seemed the victor as Palestinian groups sued for peace. By 2005, Hamas was decimated, with its leaders dead and thousands of cadre in Israeli prisons. Yet even as its military strength was depleted, Hamas was growing stronger politically. Yasser Arafat’s death in 2004 removed the dominant political presence of the Palestinian national movement, and left Hamas’s more secular rival Fatah without its historic leader. Mahmoud Abbas, Fatah’s new leader, did not have Arafat’s charisma or stature. Although Fatah’s militant offshoot, the Al Aqsa Martyrs’ Brigade, often led the charge against Israel in the second intifada, the organization appeared bankrupt as a resistance movement. In the West Bank, many of its leaders were more warlords than warriors, extorting and abusing local Palestinians. Some Fatah leaders like Abbas embraced peace talks, but skepticism about the peace process made this seem idealistic at best and foolish at worst. Because Sharon had refused to negotiate with more moderate Palestinians when Israel left Gaza, Hamas was able to claim that violence, not negotiations, led to the Israeli withdrawal. Hamas also portrayed itself as an honest and competent administrator and steadfast in its resistance, in contrast to Fatah which had a deserved reputation for corruption. Finally, Hamas proved to be a better organized political party than Fatah. When elections were held in Gaza in 2006, Hamas won — surprising Western observers, Israelis, and Hamas itself. Hamas rose to the challenge, supplanting Fatah cadre, establishing a more effective police force, and otherwise expanding its hold. Hamas completed its takeover of Gaza from the Ramallah-based Palestinian Authority in 2007, seizing power and taking brutal revenge on Fatah security officials for their past abuses. Abbas meanwhile consolidated his position in the West Bank and harshly suppressed Hamas in collaboration with Israel. Palestinians were divided against themselves. From Israel’s point of view, its worst nightmare had come true: A terrorist group with ties to Iran was now a quasi-state right on Israel’s borders. Although many Hamas leaders made statements calling for prudence and playing down the conflict with Israel, the organization refused to abandon violence or openly embrace peace talks. Hamas leaders made noises about peace one day, and hurled anti-Semitic epithets the next. Debates persist over whether Hamas leaders are pragmatists trying to maintain credibility among the rank-and-file or patient men prepared to continue waging war until they destroy the Jewish state. In either case, as Hamas developed its political institutions, it also transformed guerrilla forces into an army and acquired a massive rocket arsenal with help from Iran. Israel’s Uneasy Gaza Policy Israel has tried to meet the threat emanating from Gaza through military strikes, economic pressure, and isolation, but it finds it difficult to calibrate this pressure and deal lasting setbacks to Hamas. For Israel, the biggest threat from Gaza was, and remains, rockets. From 2007 through June 2015, over 10,000 rockets and mortars have hit Israel — and over half of these strikes occurred outside the three wars. Many of the attacks between the wars were from groups like Palestine Islamic Jihad or other Hamas rivals. Rocket and mortar attacks killed over 40 Israelis between 2001 and the end of August 2014. In response to these and other provocations, Israel has gone to war with Hamas repeatedly since 2006. Israeli forces have hit Gaza hard, killing almost 1,400 Gazans in 2008-2009, 167 in 2012, and over 2,000 in 2014 — and losing roughly 80 soldiers of their own in these wars combined. Nor was there quiet for Gazans in between the conflicts. Since Hamas took power, Israeli forces regularly struck targets in Gaza and killed militants and their leaders, often in response to rocket fire from Gaza or acting on tactical intelligence of imminent attacks. Military strikes on those involved in launching rockets would disrupt attacks and send a message to the military wing that they would suffer directly if they threatened Israel. Israel uses economic pressure and control of the border to prevent Hamas from gaining access to more advanced rockets and to coerce Hamas into stopping attacks by threatening its ability to provide economically for Gazans. Israel tightly controls the crossing points between Gaza and Israel, inspecting cargo and limiting goods, including dual-use items as well as arms. Israel and Egypt also cooperated to restrict the flow of goods and people from Gaza to Egypt. Under Mubarak, cooperation ebbed and flowed based on the overall bilateral relationship and the Mubarak regime’s political needs of the moment; when the Muslim Brotherhood took over, they still cooperated though both Hamas and Israel expected the new government to be more supportive of its ideological offspring in Gaza. Israel also reportedly attacked arms supplies in Sudan en route to Gaza. Larger uses of force like the 2014 operation also remind Hamas that Israel can devastate Gaza’s economy if pushed. Politically, Israel has also tried to prevent Hamas from joining forces with Abbas, fearing the former might gain the upper hand politically and radicalize the broader Palestinian national movement. The general policy of isolation went on steroids after the coup against the Egyptian Muslim Brotherhood government in Egypt in 2013. Egypt went from Hamas’s potential friend under Mohammad Morsi to its most bitter enemy under Abdel Fattah el-Sisi, believing the group’s origins in the Muslim Brotherhood make it an automatic enemy. The Egyptian military has shut down the Egypt-Gaza border, destroying most of the tunnels that helped keep Gaza’s economy afloat and allowed for the resupply of Hamas’s arms after conflicts. Food security in the Strip is low, electricity sporadic, and unemployment high. With the exception of Qatar, supporters in the Gulf have also turned against Hamas, sharing Egypt’s fear of the Brotherhood. Israel, of course, did not orchestrate this diplomatic campaign, but it has moved closer to some of Hamas’s Arab adversaries. Hamas even lost some backing from Iran, an enemy of Israel and the Gulf states, which reduced support for Hamas when they picked opposing sides in the Syria conflict. They are again at odds after Hamas refused to condemn Saudi Arabia’s 2015 intervention in Yemen against Shia militants. At times, Israeli pressure worked. After these limited wars, Hamas often reduced or temporarily ended its own rocket attacks and tried to stop rival groups from striking Israel. Israeli pressure has also kept Hamas weak and often willing to compromise or limit its own military operations. But the glass is at best half full. In some instances, Hamas was unable to stop rival groups from attacking. More commonly, the problem was political: Hamas wanted to assure its own rank-and-file and the broader militant community that it remained committed to striking Israel, or, at the very least, that it was not going to act as Israel’s policeman like Fatah and stop other groups from fighting the good fight. When economic pressure grew intense and Hamas felt there was no chance of this easing through negotiations, rocket attacks were a way to remind Israel and the world that Hamas had cards of its own to play and that Gaza could not be left on the world’s back burner. Yet some Israelis recognize that too much pressure can backfire. Hamas is not the most extreme enemy Israel faces in Gaza. The Strip is home to far more radical factions, ranging from small numbers of Islamic State admirers to disaffected members of Hamas’s military wing who reject any accommodation with Israel. If Hamas falls, it may not be Abbas or other peace-inclined voices who will take his place. Nor is Hamas helpless in the face of Israeli pressure. If Hamas is denied the chance to advance its political and economic program, it can force a crisis. Rocket attacks also shore up Hamas’s resistance bona fides, enabling it to gain ground with an important constituency. Politically, as peace negotiations with Abbas stumbled and then collapsed, Hamas’s calls for “resistance” gained more credibility, particularly on the West Bank where Palestinians could admire Hamas attacks without having to suffer Hamas’s rule and associated isolation and Israeli military campaigns. Israel finds it hard to balance deterrence with its identity as a democracy. To keep Hamas off balance and fearful, Israel hits Gaza hard. And to keep domestic support strong, Israel does so in a way that minimizes Israel’s own casualties. This logic, however, goes against the perception that Israel’s response is proportionate to its own suffering, making Israel’s activities look illegal and cruel in the eyes of many in the world community — an EU Council declared itself “particularly appalled” by one operation. Israelis may claim to be inured to such criticism, but the constant Israeli concerns that Palestinians are undermining their legitimacy suggest that Israel cares about being seen as in the right. No Going Back For many Israelis, the lesson of the Gaza withdrawal is that Palestinians, if left to their own devices, are apt to elect terrorists to be their leaders. And if it happened in Gaza, it could happen in the West Bank, whose borders are near many of Israel’s major cities. Learning only this lesson would be a mistake, however, as on balance the withdrawal from Gaza was good for Israel. Had Israel not withdrawn, the soldiers and settlers in the Strip would have remained vulnerable. Israel would have continued to pay a heavy price to protect them. Although rockets remain a problem, they are far less dangerous than suicide bombers, roadside bombs, or other threats Israel would face if its army and citizens were in Gaza instead of behind the security barrier. And with the development of the Iron Dome anti-rocket system, Israel is able to further reduce, though not eliminate, the danger rockets pose. Withdrawal also put Hamas in a tough position. When Hamas controlled no territory, it focused on fighting Israel and excoriated those who proposed any compromise. Palestinians now judge it on how well it governs in addition to how well it fights Israel, and its failures and financial problems weigh heavily. Now Hamas is criticized both for being too violent and for being too restrained: It pays a higher political price when it uses violence. Hamas often observes lasting ceasefires with Israel, and some voices within Hamas, though by no means the entire organization, would favor a long-term ceasefire to be allowed greater freedom to govern. Not surprisingly, Hamas is less eager to govern as its track record is poor, and it is more than willing to share the disaster that is Gaza with the Palestinian Authority. It’s too soon to say that Hamas is transforming into an exclusively political movement or becoming less ambitious in its aims to supplant Abbas and Fatah, but it’s a mistake to ignore how the organization has shifted since 2005. The way out of the Gaza mess lies, in part, in the West Bank. To prevent Hamas from making further political gains, Israel needs to show Palestinians that a commitment to negotiations can reap rewards. As long as there is no peace deal with moderate Palestinians on the West Bank, Hamas’s political message of “resistance” will remain strong. At the same time, Israel needs to allow the more pragmatic voices in Hamas to pull the group toward governance over resistance — an approach that only works if Hamas has incentives for good behavior as well as punishments for violence. Hamas, after all, is not the Islamic State (which has a nascent presence in Gaza) — it is violent and anti-Israel, but also willing to negotiate. Hamas kidnapped Sergeant Gilad Shalit to cut a deal, not to behead him. Finally, pressure should be put on Abbas to provide forces to man border crossings and conduct credible inspections of trade, a compromise that allows for Palestinian sovereignty in Gaza, and lifts the burden on ordinary Gazans, but does not strengthen Hamas. Gaza lacks an optimal solution, so it is not surprising that short-term approaches dominate policy. Yet short-term logic has led Israel into war after war, with victory always elusive. Israel has often ignored the political implications of its actions. By weakening Abbas politically, Hamas has often emerged with more support. Recognizing that Hamas can gain political victory from military defeat is a necessary step for successfully confronting it. Policies that recognize this strange reality can move Israel and Hamas away from another war and validate Sharon’s decision to pull out of Gaza. The clock is ticking on the Obama administration and neither Israel nor the United States appears eager to kick the dead peace process horse. But such cynicism about reengaging in peace talks and other efforts to end or mitigate the dispute is dangerous in the long-term. Administration officials should try to lay the groundwork for the next administration to resume serious peace talks. More immediately, they should work with Israel and Palestinian Authority officials to resume the Authority’s presence in Gaza. In the absence of progress, the United States must prepare for another Gaza war with all the suffering and disruption that entails. Daniel Byman is a professor in the security studies program at Georgetown University and the research director of the Center for Middle East Policy at the Brookings Institution. He is the author of Al Qaeda, the Islamic State, and the Global Jihadist Movement: What Everyone Needs to Know. Follow him @dbyman. Photo credit: Israel Defense Forces
Anti-lymphocyte globulin stimulates normal human T cells to proliferate and to release lymphokines in vitro. A study at the clonal level. Human peripheral blood mononuclear cells (PBMC) were stimulated in vitro with anti-lymphocyte globulin (ALG), and the phenotypic and functional properties of the blasts obtained were investigated. When stained with monoclonal antibodies (MoAbs), all of the blasts were identified as T cells that expressed predominantly the CD4 phenotype (70% of the cells). The remaining blasts were CD8+. These findings demonstrate that ALG stimulates both helper-inducer and cytotoxic-suppressor cells at random since the CD4 to CD8 ratio in the stimulated blasts was the same as in resting PBMC. This ratio is different from that observed in short-term cultures of T cells stimulated with phytohemagglutinin (PHA) under the same conditions (CD4 to CD8 ratio less than 1). ALG-stimulated T cells were cloned by limiting dilution in the presence of recombinant Interleukin-2 (rIL-2). The clones obtained were expanded and maintained in long term cultures with rIL-2. Thirty-two clones were tested for their capacity of producing colony stimulating activity (CSA) or burst promoting activity (BPA). Twenty-eight of them produced CSA and 12 produced BPA. No correlation was found between the surface phenotype and the ability of the clones to produce CSA or BPA (ie, both the CD4+ and CD8+ clones released the cytokines). When 16 of the same clones were tested for II-2 and gamma interferon (gamma IFN) production, 12 were found to be gamma INF and IL-2 producers. All of the gamma IFN producers also released IL-2, whereas in the single clones no correlation was found with the capacity of releasing BPA and CSA. Supernatants from selected T-cell clones were also tested for hematopoietic growth factor activities in the presence of neutralizing antisera to human granulocyte-macrophage colony stimulating factor (GM-CSF) or to Interleukin-3 (IL-3). It was found that most CSA was attributable to GM-CSF, whereas BPA was mainly related to the presence of IL-3.
/** * Return the count of the number of times the user has the collection permission. * @see edu.ur.ir.institution.InstitutionalCollectionSecurityService#hasPermission(edu.ur.ir.institution.InstitutionalCollection, edu.ur.ir.user.IrUser, edu.ur.ir.institution.InstitutionalCollectionPermission) */ public long hasPermission(InstitutionalCollection collection, IrUser user, InstitutionalCollectionPermission collectionPermission) { return securityService.hasPermission( collection, user, collectionPermission.getPermission()); }
def combine_tables(self, filename, dir_path=os.getcwd(), name='combined'): location = glob.glob(dir_path+"*"+filename+"*") if location == None: raise NameError('Table not located in the given path') else: tables = map(self._initialise, location) if type(tables[0])!= at.Table: raise TypeError('Astropy table could not be created') combined_table = reduce(self._combine, tables) if len(combined_table): self._make_combined_table = True try: combined_table.writeto(name+'.fits') except: txt_file=open('download_rsync.txt', 'w') c = combined_table _list = [i for i in c[c.colnames[0]]] txt_file.write("%s\n" %_list) return glob.glob(dir_path + name +'.fits')
<filename>tests/fitness_test2.py # tests/fitness_test2.py import source.fitness.fitness as fit import os fit_app = fit.Fitness() test1 = fit_app.open_session() if not test1: print("ERROR: session should exist") else: print("Test1 passed") fit_app.print_fitness_plan() fit_app.save_session() test2 = os.path.exists('./pickles/exercises.pk') if test2: print("test2 passed") else: print("ERROR: session was not saved")
// Clearly we have performance issue due to : recursion public Integer calculate_fibonacci(int inputNumber){ if (inputNumber == 0 || inputNumber ==1){ return 1; } else { return calculate_fibonacci(inputNumber -1 ) + calculate_fibonacci(inputNumber - 2); } }
About the author (NewsTarget) Scientists are finding that those who choose to eat according to the principles of the Mediterranean diet have a lowered rate of death from all causes. Research has shown that the Mediterranean diet has a beneficial effect against cardiovascular disease and is a preventative against a second heart attack. Now we are seeing that this diet extends longevity by reducing deaths from all diseases including cancer.Researchers reported in the December 10, 2007a prospective study to investigate the Mediterranean dietary pattern in relation to mortality, confirming suggestions that the diet plays a beneficial role for health and longevity.The study participants included 214,284 men and 166,012 women in the National Institutes of Health-AARP Diet and Health Study. During follow up for all-cause mortality from 1995 to 2005, 27,799 deaths were documented. In the first 5 years of follow up 5,985 cancer deaths and 3,451 cardiovascular disease deaths were reported. The researchers used a nine point score to assess conformity with the Mediterranean diet pattern with components including vegetables, legumes, fruits, nuts, whole grains, fish, monounsaturated fat-saturated fat ratio, alcohol, and meat. They calculated hazard ratios and 95% confidence intervals using age and multivariate adjusted Cox models.Results indicated that the Mediterranean diet was associated with reduced all-cause and cause-specific mortality . In men, the multivariate hazard ratios comparing high to low conformity for all-causes, CVD, and cancer mortality were 0.79, 0.78, and 0.83. In women, an inverse association was seen with high conformity within this pattern: decreased risks that ranged from 12% for cancer mortality to 20% for all-cause mortality.Results from this study provide strong evidence of a beneficial effect from higher conformity with the Mediterranean dietary pattern on risk of death from all causes, including deaths due to cardiovascular disease and cancer in the a U.S. population.Americans tend to associate the word diet with restriction and deprivation. But forget this definition because the Mediterranean diet is based on the abundance of foods found in the countries of the Mediterranean Basin. The word diet in the title is used in the traditional sense, meaning a way or style of eating.The most commonly understood version of the diet was presented by Dr. Walter Willett of Harvard University's School of Public Health in the mid-1990s. It is a diet based on "food patterns typical of Crete, much of the rest of Greece, and southern Italy in the early 1960s", according to Willett.The traditional Mediterranean diet has been interpreted into a Pyramid with daily physical activity at its base. Regular physical activity is seen as essential for promoting healthy weight, fitness and well-being. Typical exercises of the Mediterranean's might include walking, house cleaning, running, soccer, tennis, golf, swimming, hiking, scuba diving, ball games, skiing, surfing, yard work, dancing, weight lifting, and love making.In ascending order, the Pyramid also includes:* An abundance of food from plant sources, including fruits and vegetables, potatoes, breads and grains, beans, nuts and seeds. Common foods on this step include pasta, rice, couscous, and polenta.* Emphasis on a variety of minimally processed and, wherever possible, seasonally fresh and locally grown foods. Common foods include olives, avocados, grapes, spinach, eggplant, tomatoes, broccoli, peppers, mushrooms, garlic, capers, almonds, walnuts, chick peas, white beans, lentils and other beans, and peanuts.* Olive oil as the principle fat . Total fat can range from less than 25 percent to over 35 percent of calories, with saturated fat no more than 7 to 8 percent of calories.* Daily consumption of low to moderate amounts of cheese and yogurt* Weekly consumption of low to moderate amounts of fish. Common fish are shellfish and sardines.* Weekly consumption of poultry, and from zero to four eggs per week including those used in cooking and baking.* Sweets. Common sweets are pastries, ice cream and cookies* Meat. Common meats are veal and lamb.It's quite interesting that the base of the U.S. diet is often meat, but meat is at the top of the Mediterranean diet, recommended to be eaten less frequently than even sweets.Alcohol, particularly red wine, may be consumed in moderation and with meals.One of the main explanations for the beneficial effects of the diet is thought to be the large amount of olive oil which is seen as lowering cholesterol levels in the blood. It is also known to lower blood sugar levels and blood pressure. Research indicates that olive oil prevents peptic ulcers and is effective in treatment of peptic ulcer disease, and may be a factor in preventing cancer. The consumption of red wine is considered a possible factor, as it contains flavonoids with powerful antioxidant properties. Others suspect that is it not any one particular nutrient that confers the benefits, but rather the combination of nutrients found in this diet comprised of unprocessed foods.The olive oil, nuts and fish of the diet contain beneficial omega-3 fatty acids. Studies have shown that omega-3 fatty acids lower triglycerides and may provide an anti-inflammatory effect helping to stabilize blood vessel lining.The Seven Countries Study found that Cretan men had exceptionally low death rates from heart disease, despite moderate to high intake of fat. The Cretan diet is similar to other traditional Mediterranean diets, consisting mostly of olive oil, bread, fish, moderate amounts of dairy food and wine, and an abundance of fruit and vegetables.The Lyon Diet Heart Study began as a copy of the Cretan diet, but resistance from the participants resulted in it taking a more pragmatic approach. Since the people were reluctant to move from butter to olive oil, they used a margarine based on rapeseed (canola) oil. The dietary change also included a 20% increase in vitamin C rich fruit and bread, and decreases in processed foods and red meat. This diet resulted in mortality from all causes being reduced by 70%. The study was so successful that an ethics committee decided to stop it prematurely so the results could be made immediately available to the public.Since olive oil was not part of the diet in the Lyon Diet Heart Study, it would appear that it is not the single most important ingredient in the Mediterranean diet that it is often reported to be.The principles of the diet can become part of your lifestyle based on the way you shop. Here are some things to remember.All types of olive oil provide monounsaturated fat, but "extra virgin" olive oil is the least processed form and contains the highest levels of the protective plant compounds that provide antioxidant effects.Walnuts contain high levels of omega-3 fatty acids. All nuts are very nutritionally dense foods, so they should not be eaten in large amounts. Two ounces of nuts a day is plenty. Choose soaked or sprouted nuts. They are available from several online dealers. Buy natural peanut better, preferably the kind you grind yourself at the store. This is also available online. Keep nuts on hand for a quick snack.Eat a variety of whole fruits and vegetables every day. Shop by color. Your selection of fruits and vegetables should reflect all the colors in the produce section. Don't try to stock up a week's worth of fruits and vegetables. Shop more frequently so your selection is as fresh as you can get it.Substitute wild caught fish and natural chicken for all other meats except your monthly dose of red meat.Choose yogurt and cheeses made according to tradition. If you want low or no fat cheese, choose mozzarella or any cheese that has been traditionally made from skim milk. Stay away from any yogurt or cheese that advertises itself as reduced fat, low fat, or fat free.And don't forget that the Mediterranean is a very sunny warm place where people feel at ease outside. Let the sun shine on you, and let a breeze kiss your skin when you can.Additional Sources: www.oldwayspt.org ) "Mediterranean Diet Pyramid" www.wikipedia.org ) "Mediterranean Diet" www.mayoclinic.com ) "Mediterranean Diet for Heart Health"Barbara is a school psychologist, a published author in the area of personal finance, a breast cancer survivor using "alternative" treatments, a born existentialist, and a student of nature and all things natural.
/** ----- R E A D R A D I U S ----- */ public static double readRadius(String fileName) { In in = new In(fileName); in.readInt(); double universeRadius = in.readDouble(); return universeRadius; }
/** * Reader to read log buffer files. */ public class LogBufferReader implements Closeable { private static final String FILE_SUFFIX = ".buf"; private final int batchSize; private final String baseDir; private final long maxFileId; private long currFileId; private LogBufferEventReader eventReader; private boolean skipFirstEvent; /** * Creates log buffer reader responsible for reading log buffer files. * * @param baseDir base directory for log buffer * @param batchSize max number of log events to read in one batch * @param maxFileId max file id to which recovery should happen * @param currFileId current log buffer file id * @param currPos position in current log buffer file * @throws IOException if there is any error while opening file to read */ public LogBufferReader(String baseDir, int batchSize, long maxFileId, long currFileId, long currPos) throws IOException { this.baseDir = baseDir; this.batchSize = batchSize; // if no checkpoints are written, currFileId and currPos will be -1. In that case the first event should not be // skipped. However, if currFileId and currPos are non negative, that means first event should be skipped as // atleast one log event has been persisted. this.currFileId = currFileId < 0 ? 0 : currFileId; this.skipFirstEvent = currFileId >= 0; this.maxFileId = maxFileId; this.eventReader = new LogBufferEventReader(baseDir, this.currFileId, currPos < 0 ? 0 : currPos); } /** * Reads next batch of events from log buffer. * * @param eventList events list to which events will be added * @return number of events added to eventList * @throws IOException error while reading events from buffer files */ public int readEvents(List<LogBufferEvent> eventList) throws IOException { // there are no files in the log buffer directory. So return 0. if (maxFileId < 0) { return 0; } // iterate over all the remaining events. while (eventList.size() < batchSize && currFileId <= maxFileId) { try { if (eventReader == null) { eventReader = new LogBufferEventReader(baseDir, currFileId); } // skip the first event if skipFirstEvent is true. This is needed because log buffer offset represents offset // till which log events have been processed. Meaning current event is already processed by log buffer pipeline. if (skipFirstEvent) { eventReader.read(); skipFirstEvent = false; } eventList.add(eventReader.read()); } catch (FileNotFoundException e) { // move to next file in case file pointed by currFileId was not found currFileId++; } catch (EOFException e) { // reached eof on this event reader. So close it, move to next file eventReader.close(); eventReader = null; currFileId++; } } return eventList.size(); } @Override public void close() throws IOException { if (eventReader != null) { eventReader.close(); } } /** * Log buffer event reader to read log events from a log buffer file. */ private static final class LogBufferEventReader implements Closeable { private static final int BUFFER_SIZE = 32 * 1024; // 32k buffer private final DataInputStream inputStream; private final LoggingEventSerializer serializer; private long fileId; private long pos; LogBufferEventReader(String baseDir, long fileId) throws IOException { this(baseDir, fileId, 0); } LogBufferEventReader(String baseDir, long fileId, long pos) throws IOException { this.fileId = fileId; this.pos = pos; FileInputStream fis = new FileInputStream(new File(baseDir, fileId + FILE_SUFFIX)); // seek to the position if the position is not zero if (pos != 0) { fis.getChannel().position(pos); } this.inputStream = new DataInputStream(new BufferedInputStream(fis, BUFFER_SIZE)); this.serializer = new LoggingEventSerializer(); } /** * Reads next event from log buffer file pointed by this reader. * * @return log buffer event * @throws IOException error while reading log buffer file */ LogBufferEvent read() throws IOException { int length = inputStream.readInt(); byte[] eventBytes = new byte[length]; inputStream.read(eventBytes); LogBufferEvent event = new LogBufferEvent(serializer.fromBytes(ByteBuffer.wrap(eventBytes)), eventBytes.length, new LogBufferFileOffset(fileId, pos)); // update curr position to point to next event pos = pos + Bytes.SIZEOF_INT + length; return event; } /** * Closes this reader. */ public void close() { // close input stream wrapped by this reader Closeables.closeQuietly(inputStream); } } }
<reponame>sakuli/sakuli export async function wait(milliseconds: number) { await new Promise<void>((res) => { setTimeout(() => res(), milliseconds); }); }
<gh_stars>0 /* Problem Link: https://www.hackerearth.com/practice/basic-programming/input-output/basics-of-input-output/practice-problems/algorithm/find-product/description/ */ #include <iostream> using namespace std; using ulli = unsigned long long int; int main() { ios::sync_with_stdio(false); ulli n; cin >> n; ulli res = 1, input; for (int i = 0; i < n; ++i) { cin >> input; res = (res * input) % 1000000007; } cout << res << endl; return 0; }
Histologic characteristics enhance predictive value of American Joint Committee on Cancer staging in resectable pancreas cancer American Joint Committee on Cancer (AJCC) anatomic stage group is considered relatively nondiscriminatory for predicting differences in survival after pancreatectomy for ductal adenocarcinoma, a perception confirmed in the authors' patients and by other reports. The authors' aim was to investigate the potential for improving the predictive value of AJCC staging by incorporating individually predictive histologic features into AJCC tumor‐node‐metastasis classification of anatomic extent, and determine the simplest combination of tumor characteristics predicting survival.
<gh_stars>0 export default interface IHashProvider{ generateHash(payload:string):Promise<string>; compareHash(payload:string,hashed:string):Promise<boolean>; }
/* * MIT LICENSE * Copyright 2000-2019 Simplified Logic, Inc * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: The above copyright * notice and this permission notice shall be included in all copies or * substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", * WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED * TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF * CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.simplifiedlogic.nitro.jshell.json; import java.util.HashMap; import java.util.Hashtable; import java.util.Map; import com.fasterxml.jackson.databind.ObjectMapper; import com.simplifiedlogic.nitro.jlink.intf.JShellProvider; import com.simplifiedlogic.nitro.jshell.json.handler.JLJsonBomHandler; import com.simplifiedlogic.nitro.jshell.json.handler.JLJsonCommandHandler; import com.simplifiedlogic.nitro.jshell.json.handler.JLJsonConnectionHandler; import com.simplifiedlogic.nitro.jshell.json.handler.JLJsonCreoHandler; import com.simplifiedlogic.nitro.jshell.json.handler.JLJsonDimensionHandler; import com.simplifiedlogic.nitro.jshell.json.handler.JLJsonDrawingHandler; import com.simplifiedlogic.nitro.jshell.json.handler.JLJsonFamilyTableHandler; import com.simplifiedlogic.nitro.jshell.json.handler.JLJsonFeatureHandler; import com.simplifiedlogic.nitro.jshell.json.handler.JLJsonFileHandler; import com.simplifiedlogic.nitro.jshell.json.handler.JLJsonGeometryHandler; import com.simplifiedlogic.nitro.jshell.json.handler.JLJsonInterfaceHandler; import com.simplifiedlogic.nitro.jshell.json.handler.JLJsonLayerHandler; import com.simplifiedlogic.nitro.jshell.json.handler.JLJsonNoteHandler; import com.simplifiedlogic.nitro.jshell.json.handler.JLJsonParameterHandler; import com.simplifiedlogic.nitro.jshell.json.handler.JLJsonViewHandler; import com.simplifiedlogic.nitro.jshell.json.handler.JLJsonWindchillHandler; import com.simplifiedlogic.nitro.jshell.json.request.BaseRequest; import com.simplifiedlogic.nitro.jshell.json.response.BaseResponse; import com.simplifiedlogic.nitro.jshell.json.response.JLConnectResponseParams; import com.simplifiedlogic.nitro.jshell.json.response.ServiceStatus; import com.simplifiedlogic.nitro.rpc.JLIException; /** * This is the main entry point for the JSON wrapper for the JShell library. * * <p>JSON requests are received here and forwarded to the appropriate handler * for the given function family. * * @author <NAME> * */ public class JShellJsonHandler { /** * Whether to always include the ServerStatus object on all responses, even * if there is no special status to return. */ public static final boolean alwaysIncludeStatus = true; /** * The collection of function handlers for the various function families. Keyed by command name. */ private Map<String, JLJsonCommandHandler> commands = new HashMap<String, JLJsonCommandHandler>(); /** * Object for converting JSON strings to java objects and back again */ private ObjectMapper mapper = new ObjectMapper(); public JShellJsonHandler() { JShellProvider jp = JShellProvider.getInstance(); jp.initializeStandalone(); if (jp!=null) { // assemble the collection of function handlers commands.put(JLJsonConnectionHandler.COMMAND, new JLJsonConnectionHandler(jp.getJLConnection())); commands.put(JLJsonFileHandler.COMMAND, new JLJsonFileHandler(jp.getJLFile())); commands.put(JLJsonParameterHandler.COMMAND, new JLJsonParameterHandler(jp.getJLParameter())); commands.put(JLJsonCreoHandler.COMMAND, new JLJsonCreoHandler(jp.getJLProe())); commands.put(JLJsonFamilyTableHandler.COMMAND, new JLJsonFamilyTableHandler(jp.getJLFamilyTable())); commands.put(JLJsonFeatureHandler.COMMAND, new JLJsonFeatureHandler(jp.getJLFeature())); commands.put(JLJsonGeometryHandler.COMMAND, new JLJsonGeometryHandler(jp.getJLGeometry())); commands.put(JLJsonNoteHandler.COMMAND, new JLJsonNoteHandler(jp.getJLNote())); commands.put(JLJsonInterfaceHandler.COMMAND, new JLJsonInterfaceHandler(jp.getJLTransfer())); commands.put(JLJsonViewHandler.COMMAND, new JLJsonViewHandler(jp.getJLView())); commands.put(JLJsonLayerHandler.COMMAND, new JLJsonLayerHandler(jp.getJLLayer())); commands.put(JLJsonDrawingHandler.COMMAND, new JLJsonDrawingHandler(jp.getJLDrawing())); commands.put(JLJsonWindchillHandler.COMMAND, new JLJsonWindchillHandler(jp.getJLWindchill())); commands.put(JLJsonDimensionHandler.COMMAND, new JLJsonDimensionHandler(jp.getJLDimension())); commands.put(JLJsonBomHandler.COMMAND, new JLJsonBomHandler(jp.getJLBom())); } } /** * Handle a JSON request and return its results * @param reqString The JSON request string * @return The JSON response string */ public String handleRequest(String reqString) { BaseRequest req = null; BaseResponse output = null; // turn the JSON string into a standard request object try { // parse json req = (BaseRequest)mapper.readValue(reqString, BaseRequest.class); } catch (Exception e) { output = new BaseResponse(); createError(output, "Invalid JSON input: " + reqString); try { return mapper.writeValueAsString(output); } catch (Exception ex) { ex.printStackTrace(); return null; } } // pass the request to the handler and receive a response output = handleRequest(req); if (output==null) output = new BaseResponse(); if (output.getStatus()==null) output.setStatus(new ServiceStatus()); if (req.isEcho()) output.setEchoString(reqString); // turn the response object into a JSON string and return try { // unparse json String resp = mapper.writeValueAsString(output); return resp; } catch (Exception e) { createError(output, "Invalid JSON output: " + reqString); try { return mapper.writeValueAsString(output); } catch (Exception ex) { ex.printStackTrace(); return null; } } } /** * Handle a standard request by passing it on to a handler class, and return its results * @param req The request in the form of a standard request object * @return The response in the form of a standard response object */ public BaseResponse handleRequest(BaseRequest req) { BaseResponse resp = new BaseResponse(); // check for empty or invalid request if (req==null) { createError(resp, "Empty request"); return resp; } if (req.getCommand()==null) { createError(resp, "Request is missing the 'command' property"); return resp; } JLJsonCommandHandler handler = commands.get(req.getCommand()); // check for invalid command if (handler==null) { createError(resp, "Invalid command: " + req.getCommand()); return resp; } if (req.getFunction()==null) { createError(resp, "Request is missing the 'function' property"); return resp; } try { // pass the request to an external handler Hashtable<String, Object> output = handler.handleFunction(req.getSessionId(), req.getFunction(), req.getData()); if (output!=null) { // special handling for when a new session ID is returned in the data; // set it into the response and remove it from the data if (output.get(JLConnectResponseParams.OUTPUT_SESSIONID)!=null) { resp.setSessionId(output.get(JLConnectResponseParams.OUTPUT_SESSIONID).toString()); output.remove(JLConnectResponseParams.OUTPUT_SESSIONID); if (output.size()==0) output = null; } } // insert the results into the data portion of the response object resp.setData(output); if (alwaysIncludeStatus) { // force a status object to always be returned, to make things easier on the user // when checking for errors ServiceStatus status = new ServiceStatus(); status.setError(false); resp.setStatus(status); } } catch (JLIException e) { createError(resp, e.getMessage()); } catch (Exception e) { e.printStackTrace(); String msg = e.getMessage(); if (e instanceof NullPointerException) msg = "Unexpected null value"; createError(resp, "Error handling request: " + msg); } return resp; } /** * Generate an error status return * @param resp The response object to receive the error status * @param msg The error message */ private void createError(BaseResponse resp, String msg) { ServiceStatus status = new ServiceStatus(); status.setMessage(msg); status.setError(true); resp.setStatus(status); } }
import time import sqlite3 import pandas as pd import numpy as np import scipy as sp from scipy import stats import matplotlib.mlab as mlab import matplotlib.pyplot as plt ''' from sklearn.datasets import make_classification from sklearn.linear_model import LogisticRegression from sklearn.ensemble import (RandomTreesEmbedding, RandomForestClassifier, GradientBoostingClassifier) from sklearn.preprocessing import OneHotEncoder from sklearn.model_selection import train_test_split from sklearn.metrics import roc_curve from sklearn.pipeline import make_pipeline ''' traindataframes = [] testDataFrame = [] #def predict_next_day(): #return 0 def gradient_descent(): global traindataframes global testDataFrame prediction_frame = testDataFrame[0] prediction_frame = prediction_frame['Current_price'] prediction_frame = prediction_frame[0:50] #prediction_frame = (prediction_frame - prediction_frame.mean())/prediction_frame.std() #prediction_frame = np.array(prediction_frame) frames = [] for i in traindataframes: temp = i['Current_price'] frames.append(temp[0:50]) ''' frames = [] for i in traindataframes: temp = i['Today_price'] for i in temp: if '+' in i: t = i.replace('+','') temp = temp.replace(str(i),t) for i in temp: temp = temp.replace(str(i),float(i)) frames.append(temp[0:50]) ''' #print(frames) data_df = pd.concat(frames,axis=1) data_df.columns = ['1','2','3','4','5','6','7','8','9','10','11','12','13','14','15','16','17','18','19','20','21'] #get features features = data_df features = (features - features.mean())/features.std() features_array = np.array(features) #values_array = np.random.random_sample(50) values = prediction_frame values = (values - values.mean())/values.std() values_array = np.array(values) m = len(values_array) alpha = 0.01 num_iterations = 100 theta_descent = np.zeros(len(features.columns)) cost_history = [] for i in range(num_iterations): predicted_value = np.dot(features_array, theta_descent) theta_descent = theta_descent + alpha/m * np.dot(values_array - predicted_value, features_array) sum_of_square_errors = np.square(np.dot(features_array, theta_descent) - values_array).sum() cost = sum_of_square_errors / (2 * m) cost_history.append(cost) #all output and debugging cost_history = pd.Series(cost_history) predictions = np.dot(features, theta_descent).transpose() print('============================================') print('Cost History: ', cost_history) print('Theta Descent: ',theta_descent) print('Alpha: ', alpha) print('Iterations: ',num_iterations) data_predictions = np.sum((values_array - predictions)**2) mean = np.mean(values_array) sq_mean = np.sum((values_array - mean)**2) r = 1 - data_predictions / sq_mean print('R: ', r) #denormalize data features = ((features * data_df.std()) + data_df.mean()) print(features) predictions = np.dot(features, theta_descent).transpose() print('Predictions: ',predictions) print('============================================') fig, ax = plt.subplots() ax.plot(prediction_frame,'o',markersize = 1, color = 'green', label = 'Actual Price') ax.plot(predictions,'o',markersize = 1, color = 'blue', label = 'Predicted Price') #ax.plot(features,'o',markersize = 1, color = 'red', label = 'Price Previously') fig2, ax2 = plt.subplots() ax2.plot(cost_history,'o',markersize = 1, color = 'blue') plt.show() def get_Data(): global traindataframes global testDataFrame tables = [] con = sqlite3.connect("GE_Data.db") cur = con.cursor() table = cur.execute("select name from sqlite_master where type = 'table'") for i in table.fetchall(): tables.append(i[0]) for i in tables[:-1]: # print('DFs: ' + str(i)) q = "select * from " + i + " ORDER BY Id" traindataframes.append(pd.read_sql(q,con)) for i in tables[-1:]: # print('TestDF: ' + str(i)) q = "select * from " + i + " ORDER BY Id" testDataFrame.append(pd.read_sql(q,con)) cur.close() con.close() get_Data() gradient_descent() #predict()
from django.conf import settings from django.core.cache import cache from channels import Channel, Group from channels.sessions import channel_session, enforce_ordering from channels.auth import http_session_user, channel_session_user, channel_session_user_from_http from channels.handler import AsgiHandler, AsgiRequest from isubscribe.tasks import alert_rules, sensu_entity_list, sensu_event_list, slack_user_detect, slack_user_nag, user_register, alert_handler, ack_handler, sensu_client_list, alert_history, sensu_check_list, notify_history, trends_build, user_rules from isubscribe.models import Subscribe from isubscribe.views import entities from isubscribe.notify import Notify import json, datetime, re import logging logger = logging.getLogger('isubscribe.consumers') # Listens on http.request (example - not in use) @channel_session def http_consumer(message): #channel_session_user = True #http_user = True # Decode the request from message format to a Request object django_request = AsgiRequest(message) # Run view django_response = entities(django_request) # Encode the response into message format for chunk in AsgiHandler.encode_response(django_response): message.reply_channel.send(chunk) def user_register_job(message): logger.info('slack_user_detect begin') user_register(message) logger.info('slack_user_detect completed') def slack_detect(message): logger.info('slack_user_detect begin') slack_user_detect() logger.info('slack_user_detect completed') def slack_nag(message): logger.info('slack_user_nagg begin') slack_user_nag(message) logger.info('slack_user_nagg completed') def build_rules(message): logger.info('alert_rules begin') alert_rules() logger.info('alert_rules completed') def update_entities(message): logger.info('sensu_entity_list begin') sensu_entity_list() logger.info('sensu_entity_list completed') def update_events(message): logger.info('sensu_event_list begin') sensu_event_list() logger.info('sensu_event_list completed') def update_clients(message): logger.info('update_clients begin') sensu_client_list() logger.info('update_clients completed') def update_checks(message): logger.info('update_checks begin') sensu_check_list() logger.info('update_checks completed') def update_trends(message): logger.info('update_trends begin') trends_build() logger.info('update_trends completed') #@enforce_ordering(slight=True) def escalator(message): logger.debug('escalator message = %s', message) #@enforce_ordering(slight=True) def build_entity_rules(message): logger.info('building_entity_rules begin: %s' % message['entity']) entity_status_friends = {} for obj in Subscribe.objects.filter(entity=message['entity']).all(): if obj.status not in entity_status_friends: entity_status_friends[obj.status] = [] for user in obj.friends.all(): entity_status_friends[obj.status].append(user.pk) cache.set('rule_' + obj.entity, entity_status_friends, timeout=None) logger.info('build_entity_rules completed: %s' % message['entity']) def build_user_rules(message): logger.info('building_user_rules begin user_id: %s' % message['user_id']) user_rules(message) logger.info('building_user_rules completed user_id: %s' % message['user_id']) #@enforce_ordering(slight=True) def notifier_hisotry(message): logger.info('notifier_hisotry consumer: %s' % message['entity']) notify_history(message) #@enforce_ordering(slight=True) def alert(message): try: logger.info('alert consumer - entity: %s status: %s output: %s occurrences: %s' % (message['entity'], message['status'], message['output'], message['occurrences'])) except: pass silenced = False silent_by = '' silent_comment = '' acked = False ack_by = '' ack_comment = '' if int(message['status']) != 0 and 'ack_' + message['entity'] in cache.keys("ack_*"): ack = cache.get('ack_' + message['entity']) ack_by = ack['user_name'] ack_comment = ack['ack_comment'] acked = True if 'silent_' + message['entity'] in cache.keys("silent_*"): silent = cache.get('silent_' + message['entity']) silent_by = silent['user_name'] silent_comment = silent['silent_comment'] silenced = True Group("notifications").send({ "text": json.dumps({ "timestamp": message['timestamp'], "entity": message['entity'], "status": message['status'], "output": message['output'], "ack": acked, "ack_by": ack_by, "ack_comment": ack_comment, "silent": silenced, "silent_by": silent_by, "silent_comment": silent_comment }) }) if int(message['status']) == 0: cache.delete("event_" + message['entity']) if 'ack_' + message['entity'] in cache.keys("ack_*"): cache.delete('ack_' + message['entity']) else: client_name, check_name = message['entity'].split(':') event_data = { 'timestamp': int(message['timestamp']), 'client': { 'name': client_name }, 'check': { 'name': check_name, 'status': int(message['status']), 'output': message['output'] }, } cache.set("event_" + message['entity'], event_data, timeout=None) alert_history(message) if silenced == False and acked == False: logger.debug('alert consumer - sending to alert_handler - entity: %s status: %s output: %s' % (message['entity'], message['status'], message['output'])) alert_handler(message) else: logger.info('alert consumer - skipping handler for acknowledged entity: %s status: %s output: %s occurrences: %s' % (message['entity'], message['status'], message['output'], message['occurrences'])) #@enforce_ordering(slight=True) def onduty_handler(message): notifier = Notify(message) if int(message['status']) >= settings.ON_DUTY_STATUS_LEVEL: notifier.notify_onduty() elif int(message['status']) == 0: if 'history' in message: message['history'].pop() for i in range(len(message['history']), 0, -1): if int(message['history'][i-1]) == 0: break if int(message['history'][i-1]) >= settings.ON_DUTY_STATUS_LEVEL: notifier.notify_onduty() #@enforce_ordering(slight=True) def ack(message): logger.info('ack begin') ack_handler(message) logger.info('ack completed') # Connected to websocket.connect and websocket.keepalive @channel_session_user_from_http def websocket_connect_events(message): if not message.user.is_authenticated(): logger.error('websocket_connect_events. user = %s is NOT authenticated', message.user) return message.channel_session['username'] = message.user.username #message.http_session.set_expiry(3600) logger.debug('websocket_connect_events. user: %s path: %s' % (message.user, message.content['path'])) Group("notifications").add(message.reply_channel) Group("announcement").add(message.reply_channel) message.reply_channel.send({"accept": True}) # Connected to websocket.keepalive @channel_session_user def websocket_keepalive_events(message): if not message.user.is_authenticated(): logger.error('websocket_connect_events. user = %s is NOT authenticated', message.user) return logger.debug('websocket_keepalive_events. message = %s', message) Group("notifications").add(message.reply_channel) Group("announcement").add(message.reply_channel) # Connected to websocket.disconnect @channel_session_user def websocket_disconnect_events(message): if not message.user.is_authenticated(): logger.error('websocket_connect_events. user = %s is NOT authenticated', message.user) return logger.debug('websocket_disconnect_events. message = %s', message.user) Group("notifications").discard(message.reply_channel) Group("announcement").discard(message.reply_channel) # Connected to websocket.connect and websocket.keepalive @channel_session_user_from_http def websocket_connect_entities(message): if not message.user.is_authenticated(): logger.error('websocket_connect_events. user = %s is NOT authenticated', message.user) return logger.debug('websocket_connect_entities. user = %s', message.user) Group("entities-private-%s" % message.user.id).add(message.reply_channel) Group("announcement").add(message.reply_channel) message.reply_channel.send({"accept": True}) # Connected to websocket.keepalive @channel_session_user def websocket_keepalive_entities(message): if not message.user.is_authenticated(): logger.error('websocket_connect_events. user = %s is NOT authenticated', message.user) return logger.debug('websocket_keepalive_entities. message = %s', message.user) Group("entities-private-%s" % message.user.id).add(message.reply_channel) Group("announcement").add(message.reply_channel) # Connected to websocket.disconnect @channel_session_user def websocket_disconnect_entities(message): if not message.user.is_authenticated(): logger.error('websocket_connect_events. user = %s is NOT authenticated', message.user) return logger.debug('websocket_disconnect_entities. message = %s', message.user) Group("entities-private-%s" % message.user.id).discard(message.reply_channel) Group("announcement").discard(message.reply_channel) # Connected to websocket.connect and websocket.keepalive @channel_session_user_from_http def websocket_connect_onduty(message): if not message.user.is_authenticated(): logger.error('websocket_connect_onduty. user = %s is NOT authenticated', message.user) return logger.debug('websocket_connect_events. user = %s', message.user) Group("on-duty").add(message.reply_channel) Group("announcement").add(message.reply_channel) message.reply_channel.send({"accept": True}) # Connected to websocket.keepalive @channel_session_user def websocket_keepalive_onduty(message): if not message.user.is_authenticated(): logger.error('websocket_keepalive_onduty. user = %s is NOT authenticated', message.user) return logger.debug('websocket_keepalive_events. message = %s', message) Group("on-duty").add(message.reply_channel) Group("announcement").add(message.reply_channel) # Connected to websocket.disconnect @channel_session_user def websocket_disconnect_onduty(message): if not message.user.is_authenticated(): logger.error('websocket_disconnect_onduty. user = %s is NOT authenticated', message.user) return logger.debug('websocket_disconnect_events. message = %s', message.user) Group("on-duty").discard(message.reply_channel) Group("announcement").discard(message.reply_channel)
<gh_stars>10-100 /* * Copyright 2015 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.digitalpetri.opcua.stack.examples.client; import java.security.KeyPair; import java.security.cert.X509Certificate; import java.util.Arrays; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicLong; import com.digitalpetri.opcua.stack.client.UaTcpStackClient; import com.digitalpetri.opcua.stack.client.config.UaTcpStackClientConfig; import com.digitalpetri.opcua.stack.core.application.UaStackClient; import com.digitalpetri.opcua.stack.core.types.builtin.DateTime; import com.digitalpetri.opcua.stack.core.types.builtin.LocalizedText; import com.digitalpetri.opcua.stack.core.types.builtin.NodeId; import com.digitalpetri.opcua.stack.core.types.builtin.Variant; import com.digitalpetri.opcua.stack.core.types.structured.EndpointDescription; import com.digitalpetri.opcua.stack.core.types.structured.RequestHeader; import com.digitalpetri.opcua.stack.core.types.structured.TestStackRequest; import com.digitalpetri.opcua.stack.core.types.structured.TestStackResponse; import static com.digitalpetri.opcua.stack.core.types.builtin.unsigned.Unsigned.uint; public class ClientExample { private final AtomicLong requestHandle = new AtomicLong(1L); private final UaTcpStackClient client; public ClientExample(X509Certificate certificate, KeyPair keyPair) throws Exception { // Query endpoints and select highest security level. EndpointDescription[] endpoints = UaTcpStackClient.getEndpoints("opc.tcp://localhost:12685/example").get(); EndpointDescription endpoint = Arrays.stream(endpoints) .sorted((e1, e2) -> e2.getSecurityLevel().intValue() - e1.getSecurityLevel().intValue()) .findFirst() .orElseThrow(() -> new Exception("no endpoints returned")); UaTcpStackClientConfig config = UaTcpStackClientConfig.builder() .setApplicationName(LocalizedText.english("Stack Example Client")) .setApplicationUri(String.format("urn:example-client:%s", UUID.randomUUID())) .setCertificate(certificate) .setKeyPair(keyPair) .setEndpoint(endpoint) .build(); client = new UaTcpStackClient(config); } public CompletableFuture<TestStackResponse> testStack(int input) { RequestHeader header = new RequestHeader( NodeId.NULL_VALUE, DateTime.now(), uint(requestHandle.getAndIncrement()), uint(0), null, uint(60), null); TestStackRequest request = new TestStackRequest(header, uint(0), 1, new Variant(input)); return client.sendRequest(request); } public CompletableFuture<UaStackClient> disconnect() { return client.disconnect(); } }
/** * Default progress bar renderer (see {@link ProgressBarRenderer}). * @author Tongfei Chen * @author Muhammet Sakarya * @since 0.8.0 */ public class DefaultProgressBarRenderer implements ProgressBarRenderer { private ProgressBarStyle style; private String unitName; private long unitSize; private boolean isSpeedShown; private DecimalFormat speedFormat; private ChronoUnit speedUnit; protected DefaultProgressBarRenderer( ProgressBarStyle style, String unitName, long unitSize, boolean isSpeedShown, DecimalFormat speedFormat, ChronoUnit speedUnit ) { this.style = style; this.unitName = unitName; this.unitSize = unitSize; this.isSpeedShown = isSpeedShown; this.speedFormat = speedFormat; this.speedUnit = speedUnit; } // Number of full blocks protected int progressIntegralPart(ProgressState progress, int length) { return (int)(progress.getNormalizedProgress() * length); } protected int progressFractionalPart(ProgressState progress, int length) { double p = progress.getNormalizedProgress() * length; double fraction = (p - Math.floor(p)) * style.fractionSymbols.length(); return (int) Math.floor(fraction); } protected String eta(ProgressState progress, Duration elapsed) { if (progress.max <= 0 || progress.indefinite) return "?"; else if (progress.current - progress.start == 0) return "?"; else return Util.formatDuration( elapsed.dividedBy(progress.current - progress.start).multipliedBy(progress.max - progress.current) ); } protected String percentage(ProgressState progress) { String res; if (progress.max <= 0 || progress.indefinite) res = "? %"; else res = String.valueOf((int) Math.floor(100.0 * progress.current / progress.max)) + "%"; return Util.repeat(' ', 4 - res.length()) + res; } protected String ratio(ProgressState progress) { String m = progress.indefinite ? "?" : String.valueOf(progress.max / unitSize); String c = String.valueOf(progress.current / unitSize); return Util.repeat(' ', m.length() - c.length()) + c + "/" + m + unitName; } protected String speed(ProgressState progress, Duration elapsed) { String suffix = "/s"; double elapsedSeconds = elapsed.getSeconds(); double elapsedInUnit = elapsedSeconds; if (null != speedUnit) switch (speedUnit) { case MINUTES: suffix = "/min"; elapsedInUnit /= 60; break; case HOURS: suffix = "/h"; elapsedInUnit /= (60 * 60); break; case DAYS: suffix = "/d"; elapsedInUnit /= (60 * 60 * 24); break; } if (elapsedSeconds == 0) return "?" + unitName + suffix; double speed = (double) (progress.current - progress.start) / elapsedInUnit; double speedWithUnit = speed / unitSize; return speedFormat.format(speedWithUnit) + unitName + suffix; } public String render(ProgressState progress, int maxLength) { Instant currTime = Instant.now(); Duration elapsed = Duration.between(progress.startInstant, currTime); String prefix = progress.taskName + " " + percentage(progress) + " " + style.leftBracket; if (prefix.length() > maxLength) prefix = prefix.substring(0, maxLength - 1); // length of progress should be at least 1 int maxSuffixLength = Math.max(maxLength - prefix.length() - 1, 0); String speedString = isSpeedShown ? speed(progress, elapsed) : ""; String suffix = style.rightBracket + " " + ratio(progress) + " (" + Util.formatDuration(elapsed) + " / " + eta(progress, elapsed) + ") " + speedString + progress.extraMessage; // trim excessive suffix if (suffix.length() > maxSuffixLength) suffix = suffix.substring(0, maxSuffixLength); int length = maxLength - prefix.length() - suffix.length(); StringBuilder sb = new StringBuilder(); sb.append(prefix); // case of indefinite progress bars if (progress.indefinite) { int pos = (int)(progress.current % length); sb.append(Util.repeat(style.space, pos)); sb.append(style.block); sb.append(Util.repeat(style.space, length - pos - 1)); } // case of definite progress bars else { sb.append(Util.repeat(style.block, progressIntegralPart(progress, length))); if (progress.current < progress.max) { sb.append(style.fractionSymbols.charAt(progressFractionalPart(progress, length))); sb.append(Util.repeat(style.space, length - progressIntegralPart(progress, length) - 1)); } } sb.append(suffix); return sb.toString(); } }
//******** OS_Launch *************** // start the scheduler, enable interrupts // Inputs: number of 12.5ns clock cycles for each time slice // you may select the units of this parameter // Outputs: none (does not return) // In Lab 2, you can ignore the theTimeSlice field // In Lab 3, you should implement the user-defined TimeSlice field // It is ok to limit the range of theTimeSlice to match the 24-bit SysTick void OS_Launch(unsigned long theTimeSlice){ systemPeriod = theTimeSlice; #ifdef SYSTICK_EN NVIC_ST_RELOAD_R = theTimeSlice - 1; NVIC_ST_CTRL_R = 0x00000007; #endif StartOS(); }
// We don't want ::CreateThread() calls scattered throughout the source. So gather // them all here. BOOL Thread::CreateNewThread(SIZE_T stackSize, LPTHREAD_START_ROUTINE start, void *args, LPCWSTR pName) { CONTRACTL { NOTHROW; GC_TRIGGERS; }
package service import ( "github.com/iodsp/user_center/context" "github.com/iodsp/user_center/models/iodsp" "github.com/iodsp/user_center/params" "github.com/jinzhu/gorm" "time" ) //given an originals path and a db instance. type Resource struct { db *gorm.DB } //returns a new Resource type with a given path and db instance func NewResource(conf *context.Config) *Resource { db := conf.Db() if conf.Debug() { db.LogMode(true) } instance := &Resource{ db: db, } return instance } // insert a new Resource record func (r *Resource) StoreResource(params params.ResourceParams) error { ResourceDb := r.db.NewScope(nil).DB() createTime := time.Now() updateTime := time.Now() insertErr := ResourceDb.Create(&iodsp.Resource{ DomainId: params.DomainId, DomainName: params.DomainName, Name: params.Name, Url: params.Url, Desc: params.Desc, CreatedAt: createTime, UpdatedAt: updateTime, }).Error return insertErr } //find a Resource by id func (r *Resource) Show(id int) (resource iodsp.Resource) { r.db.Where(&iodsp.Resource{Id: id}).First(&resource) return resource } //find a Resource by url func (r *Resource) ShowByUrl(url string)(resource iodsp.Resource) { r.db.Where(&iodsp.Resource{Url: url}).First(&resource) return resource } //find a Resource by name func (r *Resource) ShowByName(name string)(resource iodsp.Resource) { r.db.Where(&iodsp.Resource{Name: name}).First(&resource) return resource } //Resource list func (r *Resource) List() (resources []iodsp.Resource) { r.db.Model(&iodsp.Resource{}).Order("id desc").Find(&resources) return resources } //update Resource func (r *Resource) Update(resource iodsp.Resource) error { updateErr := r.db.Save(&resource).Error return updateErr } //delete Resource func (r *Resource) Delete(resource iodsp.Resource) error { deleteError := r.db.Delete(&resource).Error return deleteError } //find a Resource by url not id func (r *Resource) ShowResourceByUrlNotId(url string, id int) (resource iodsp.Resource){ r.db.Where(&iodsp.Resource{Url: url}).Not("id", id).First(&resource) return resource } //find a Resource by name not id func (r *Resource) ShowResourceByNameNotId(name string, id int) (resource iodsp.Resource){ r.db.Where(&iodsp.Resource{Name: name}).Not("id", id).First(&resource) return resource }
Been busy, but I managed to squeeze in some time to finish a scene for my own indulgent reasons. Oh, and Gwen long went through the gritty redesign grinder and now she's all shaggy!They're both karran desperi, or vagabonds if you will, adopted by former seer Taega, who died under unfortuante circumstances, but they inherited her wagon, her horses, her belongings... Both have different talents and complete eachother, I guess. Left is Gwen, right is Yvette. She is May's.They also meet Mordecai, but that's a different story.I've been mostly doodling a lot lately and I've been upping the backlog of it! I'm not sure if this is a good place to spam with the Noodly McDoodly, but if you're interested you can view it on our blog or on my Twitter account!
from django.core.urlresolvers import reverse from django.db import models class Group(models.Model): """ Representing a Group that orders food. """ name = models.CharField(max_length=255) """ Name to represent the group. Will be shown at any place where the group is represented in the website. Is used for the default ordering of a list of groups. """ enclosure = models.BooleanField(default=False, verbose_name="Einlage bezahlt") """ Only groups that have given a enclosure can order food. This attribute saves the state of this payment. If it is False, the group can not order food. """ class Meta: ordering = ['name'] def __str__(self): return self.name def get_absolute_url(self): """ Returns the default url for the object. A Group does not have a DetailView, so the url UpdateView is returned instead. """ return reverse('order_group_update', args=[self.pk]) class Unit(models.Model): """ A model representing a Unit in which the food is ordered. E.G. KG, Liter etc. The model differs between the unit for the price and the unit for the order. For example, the price could be in KG but you should order in Gram. In this case the divisor attribute has to be an integer, which can be used to calculate the price for the order. In the example above, it would be 1000. To show the name of a unit, you should not use the db-values, but the attributes self.price and self.order. """ name = models.CharField(max_length=255, unique=True) """ Name for the Unit. If order- and price-unit differce. This is the attribute for the price. """ order_name = models.CharField(max_length=255, blank=True) """ Name of the unit shound behinde the a order. """ divisor = models.PositiveIntegerField(default=1) """ Integer used to calculate the price for a order """ def __str__(self): return self.name @property def price(self): """ Shows the name of the unit for a price. This is always the db-value self.name """ return self.name @property def order(self): """ Shows the name of the unit for an order. This is self.name if the name for price and order are the same, else it is the db-field order_name. """ return self.order_name or self.name class Product(models.Model): """ Model to representing a product (food). """ name = models.CharField(max_length=255, unique=True) """ The Name of the product. Lists of products are soted by this field. """ unit = models.ForeignKey(Unit, verbose_name="Einheit") """ The unit in which the product is ordered. """ price = models.DecimalField(max_digits=10, decimal_places=2, null=True, blank=True, verbose_name="Preis") # TODO: use a custom Integerfield """ Price of one unit of the product. """ available = models.BooleanField(default=True, verbose_name="Verfügbar") """ Flag to save, if the product can be ordered. If False, it will not be shound in the order-table. """ class Meta: ordering = ['name'] def __str__(self): return self.name def get_absolute_url(self): """ Returns the UpdateView for a product, because there is no DetailView for a product. """ return reverse('order_product_update', args=[self.pk]) @property def multiplier(self): """ Returns a value to calculate the price for the order. For example by a price of 1 EUR for a KG, it returns 0.001 (EUR for Gram) """ if self.price is None: return 0 return self.price / self.unit.divisor class Bundle(models.Model): """ Model to represent all orders from each group for a specific time. """ start = models.DateTimeField(auto_now_add=True) """ Time of the order/bundle. Sort-attribute for a list of bundles. """ open = models.BooleanField(default=True) """ Flag to show, if there can still be orders, or if the time for orders is finished. If open == False, no more orders can be added. """ class Meta: get_latest_by = 'start' def __str__(self): return "Bestellung vom {}".format(self.start.strftime('%d.%m.%Y')) def get_absolute_url(self): return reverse('order_bundle_detail', args=[self.pk]) def has_unknown_price(self, group=None, delivered=False): """ Returns True or False, if there is a relevant product in the bundle, which has no price. if group is set to a group, relevant products are only those, ordered from the group. if delivered is True, products where nothing was delivered are ignored """ kwargs = {'delivered': 0} if delivered else {'amount': 0} query = self.orders.exclude(**kwargs).filter(product__price=None) if group is None: return query.exists() else: return query.filter(group=group).exists() def has_unknown_price_delivered(self): """ Method for the template where the attribute delivered can not be set """ return self.has_unknown_price(delivered=True) def price_for_group(self, group, delivered=False): """ Returns the full price for all products for a specific group. This software differes between the order of a group, and the amount of products that are actual delivered. If the attribute delivered is False, the order-price is returned. If delivered is True, the price is shouwn, that the group has to pay. """ # TODO: Maybe this has to be done in JS, so the method can be deleted. query = self.orders.filter(group=group).select_related('product__unit') if delivered: return sum(order.product.multiplier * order.get_delivered() for order in query) else: return sum(order.product.multiplier * order.amount for order in query) def price_for_all(self, delivered=False): """ Returns the price for all groups. For the attribute delivered, see the method price_for_group. """ # TODO: Maybe this has to be done in JS query = self.orders.select_related('product__unit') if delivered: return sum(order.product.multiplier * order.get_delivered() for order in query) else: return sum(order.product.multiplier * order.amount for order in query) class Order(models.Model): """ Model representing the order of one group for one product for one bundle. """ group = models.ForeignKey(Group) product = models.ForeignKey(Product) bundle = models.ForeignKey(Bundle, related_name='orders') amount = models.PositiveIntegerField(default=0, blank=True) delivered = models.PositiveIntegerField(null=True, blank=True) """ The model differentiate between the amount ordered and the amount that was actual delivered. The attribute delivered should not be used directly, but with the method get_delivered. """ class Meta: unique_together = ('group', 'product', 'bundle') def __str__(self): # TODO: nicht auf foreignkeys verweisen return "{:<10} {:5} x {}".format("%s:" % self.group, self.amount, self.product) def get_delivered(self): """ Returns the db-value delivered if it is not None, else the db-value amount. """ return self.delivered if self.delivered is not None else self.amount
/** * Created by songlihuang on 2017/7/13. */ public class FileProgressManager implements ProgressManager { private static final String DATE_FMT = "yyyy-MM-dd HH:mm:ss"; private File baseFile; public void init() { if (baseFile == null) { baseFile = new File(SystemUtil.USER_HOME + "/transfer/client/progress"); } if (!baseFile.exists()) { baseFile.mkdirs(); } } @Override public void save(String taskName, TableProgress tableProgress) throws IOException { File file = new File(baseFile, taskName); if (!file.getParentFile().exists()) { file.getParentFile().mkdirs(); } byte[] data = encode(tableProgress); FileUtil.writeContent(file, data); } @Override public TableProgress load(String taskName) throws IOException { File file = new File(baseFile, taskName); if (!file.exists()) { return null; } byte[] data = FileUtil.readContent(file); if (data == null) { return null; } return decode(data); } protected byte[] encode(TableProgress tableProgress) { Properties properties = new Properties(); properties.put("taskName", tableProgress.getTaskName()); properties.put("remoteDbIndex", String.valueOf(tableProgress.getRemoteDbIndex())); properties.put("remoteTable", String.valueOf(tableProgress.getRemoteTable())); properties.put("nextLastModifiedValue", DateUtil.formatDate(tableProgress.getNextLastModifiedValue(), DATE_FMT)); Pagination pagination = tableProgress.getPagination(); properties.put("pagination-pageSize", String.valueOf(pagination.getPageSize())); properties.put("pagination-value", String.valueOf(pagination.getValue())); if (pagination.getValue() instanceof String) { properties.put("pagination-valueType", "string"); } else if (pagination.getValue() instanceof Long) { properties.put("pagination-valueType", "long"); } else if (pagination.getValue() instanceof Integer) { properties.put("pagination-valueType", "int"); } else if (pagination.getValue() == null) { properties.put("pagination-valueType", "finish"); } else { throw new RuntimeException("not support value type, value:" + pagination.getValue()); } properties.put("pagination-lastModified", DateUtil.formatDate(pagination.getLastModified(), DATE_FMT)); FastByteArrayOutputStream bos = new FastByteArrayOutputStream(1024); try { properties.store(bos, "progress"); } catch (IOException e) { throw new RuntimeException(e); } return bos.toByteArray(); } protected TableProgress decode(byte[] b) { Properties properties = new Properties(); try { properties.load(new FastByteArrayInputStream(b)); } catch (IOException e) { throw new RuntimeException(e); } TableProgress tableProgress = new TableProgress(); tableProgress.setTaskName(properties.getProperty("taskName")); tableProgress.setRemoteDbIndex(getPropertyInt(properties, "remoteDbIndex")); tableProgress.setRemoteTable(properties.getProperty("remoteTable")); tableProgress.setNextLastModifiedValue(getPropertyDate(properties, "nextLastModifiedValue")); Pagination pagination = new Pagination(); pagination.setPageSize(getPropertyInt(properties, "pagination-pageSize")); String type = properties.getProperty("pagination-valueType"); if (StringUtil.equals("string", type)) { pagination.setValue(properties.getProperty("pagination-value")); } else if (StringUtil.equals("long", type)) { pagination.setValue(getPropertyLong(properties, "pagination-value")); } else if (StringUtil.equals("int", type)) { pagination.setValue(getPropertyInt(properties, "pagination-value")); } else if (StringUtil.equals("finish", type)) { pagination.setValue(null); } else { throw new RuntimeException("not support pagination-valueType:" + type); } pagination.setLastModified(getPropertyDate(properties, "pagination-lastModified")); tableProgress.setPagination(pagination); return tableProgress; } private static int getPropertyInt(Properties properties, String name) { String value = properties.getProperty(name); if (value == null) { throw new RuntimeException("not value for properties:" + name); } return Integer.parseInt(value); } private static long getPropertyLong(Properties properties, String name) { String value = properties.getProperty(name); if (value == null) { throw new RuntimeException("not value for properties:" + name); } return Long.parseLong(value); } private static Date getPropertyDate(Properties properties, String name) { String value = properties.getProperty(name); if (value == null) { throw new RuntimeException("not value for properties:" + name); } return DateUtil.parseDate(value, DATE_FMT); } public void setBaseFile(String fileName) { baseFile = new File(fileName); } }
/* * Copyright 2019 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.media2.player.exoplayer; import static androidx.annotation.RestrictTo.Scope.LIBRARY_GROUP_PREFIX; import android.annotation.SuppressLint; import android.content.Context; import android.media.MediaDrm; import android.os.Handler; import android.os.HandlerThread; import android.os.PersistableBundle; import android.util.Log; import android.util.Pair; import android.view.Surface; import androidx.annotation.GuardedBy; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; import androidx.annotation.RestrictTo; import androidx.concurrent.futures.ResolvableFuture; import androidx.core.util.ObjectsCompat; import androidx.core.util.Preconditions; import androidx.media.AudioAttributesCompat; import androidx.media2.common.MediaItem; import androidx.media2.common.SubtitleData; import androidx.media2.exoplayer.external.Player; import androidx.media2.player.MediaPlayer2; import androidx.media2.player.MediaTimestamp; import androidx.media2.player.PlaybackParams; import androidx.media2.player.TimedMetaData; import java.io.IOException; import java.util.ArrayDeque; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.RejectedExecutionException; /** * An implementation of {@link MediaPlayer2} based on a repackaged version of ExoPlayer. * * @hide */ @RestrictTo(LIBRARY_GROUP_PREFIX) @SuppressLint("RestrictedApi") // TODO(b/68398926): Remove once RestrictedApi checks are fixed. public final class ExoPlayerMediaPlayer2Impl extends MediaPlayer2 implements ExoPlayerWrapper.Listener { private static final String TAG = "ExoPlayerMediaPlayer2"; @SuppressWarnings("WeakerAccess") /* synthetic access */ final ExoPlayerWrapper mPlayer; private final Handler mTaskHandler; @SuppressWarnings("WeakerAccess") /* synthetic access */ @GuardedBy("mTaskLock") final ArrayDeque<Task> mPendingTasks; @SuppressWarnings("WeakerAccess") /* synthetic access */ final Object mTaskLock; @SuppressWarnings("WeakerAccess") /* synthetic access */ @GuardedBy("mTaskLock") Task mCurrentTask; @SuppressWarnings("WeakerAccess") /* synthetic access */ final Object mLock; @GuardedBy("mLock") private Pair<Executor, EventCallback> mExecutorAndEventCallback; @SuppressWarnings("unused") @GuardedBy("mLock") private Pair<Executor, DrmEventCallback> mExecutorAndDrmEventCallback; @GuardedBy("mLock") private HandlerThread mHandlerThread; /** Creates a new ExoPlayer wrapper using the specified context. */ public ExoPlayerMediaPlayer2Impl(@NonNull Context context) { mHandlerThread = new HandlerThread("ExoMediaPlayer2Thread"); mHandlerThread.start(); mPlayer = new ExoPlayerWrapper( context.getApplicationContext(), /* listener= */ this, mHandlerThread.getLooper()); // Player callbacks will be called on the task handler thread. mTaskHandler = new Handler(mPlayer.getLooper()); mPendingTasks = new ArrayDeque<>(); mTaskLock = new Object(); mLock = new Object(); resetPlayer(); } // Command queue and events implementation. // TODO: Consider refactoring to share implementation with MediaPlayer2Impl. @Override public Object notifyWhenCommandLabelReached(@NonNull final Object label) { return addTask(new Task(CALL_COMPLETED_NOTIFY_WHEN_COMMAND_LABEL_REACHED, false) { @Override void process() { notifyMediaPlayer2Event(new Mp2EventNotifier() { @Override public void notify(EventCallback cb) { cb.onCommandLabelReached(ExoPlayerMediaPlayer2Impl.this, label); } }); } }); } @Override public void clearPendingCommands() { synchronized (mTaskLock) { mPendingTasks.clear(); } } @Override public boolean cancel(Object token) { synchronized (mTaskLock) { return mPendingTasks.remove(token); } } private Object addTask(Task task) { synchronized (mTaskLock) { mPendingTasks.add(task); processPendingTask(); } return task; } @GuardedBy("mTaskLock") @SuppressWarnings("WeakerAccess") /* synthetic access */ void processPendingTask() { if (mCurrentTask == null && !mPendingTasks.isEmpty()) { Task task = mPendingTasks.removeFirst(); mCurrentTask = task; mTaskHandler.post(task); } } @Override public void setEventCallback(@NonNull Executor executor, @NonNull EventCallback eventCallback) { Preconditions.checkNotNull(executor); Preconditions.checkNotNull(eventCallback); synchronized (mLock) { mExecutorAndEventCallback = Pair.create(executor, eventCallback); } } @Override public void clearEventCallback() { synchronized (mLock) { mExecutorAndEventCallback = null; } } @Override public void setDrmEventCallback(@NonNull Executor executor, @NonNull DrmEventCallback eventCallback) { Preconditions.checkNotNull(executor); Preconditions.checkNotNull(eventCallback); synchronized (mLock) { mExecutorAndDrmEventCallback = Pair.create(executor, eventCallback); } } @Override public void clearDrmEventCallback() { synchronized (mLock) { mExecutorAndDrmEventCallback = null; } } @SuppressWarnings("WeakerAccess") /* synthetic access */ void notifyMediaPlayer2Event(final Mp2EventNotifier notifier) { final Pair<Executor, EventCallback> executorAndEventCallback; synchronized (mLock) { executorAndEventCallback = mExecutorAndEventCallback; } if (executorAndEventCallback != null) { Executor executor = executorAndEventCallback.first; final EventCallback eventCallback = executorAndEventCallback.second; try { executor.execute(new Runnable() { @Override public void run() { notifier.notify(eventCallback); } }); } catch (RejectedExecutionException e) { // The given executor is shutting down. Log.w(TAG, "The given executor is shutting down. Ignoring the player event."); } } } // Player implementation. @Override public Object setAudioSessionId(final int sessionId) { return addTask(new Task(CALL_COMPLETED_SET_AUDIO_SESSION_ID, false) { @Override void process() { mPlayer.setAudioSessionId(sessionId); } }); } @Override public Object setMediaItem(@NonNull final MediaItem item) { return addTask(new Task(CALL_COMPLETED_SET_DATA_SOURCE, false) { @Override void process() { mPlayer.setMediaItem(item); } }); } @Override public MediaItem getCurrentMediaItem() { return runPlayerCallableBlocking(new Callable<MediaItem>() { @Override public MediaItem call() throws Exception { return mPlayer.getCurrentMediaItem(); } }); } @Override public Object prepare() { return addTask(new Task(CALL_COMPLETED_PREPARE, true) { @Override void process() { mPlayer.prepare(); } }); } @Override public Object play() { return addTask(new Task(CALL_COMPLETED_PLAY, false) { @Override void process() { mPlayer.play(); } }); } @Override public Object pause() { return addTask(new Task(CALL_COMPLETED_PAUSE, false) { @Override void process() { mPlayer.pause(); } }); } @Override public Object seekTo(final long msec, final int mode) { return addTask(new Task(CALL_COMPLETED_SEEK_TO, true) { @Override void process() { mPlayer.seekTo(msec, mode); } }); } @Override public long getCurrentPosition() { return runPlayerCallableBlocking(new Callable<Long>() { @Override public Long call() throws Exception { return mPlayer.getCurrentPosition(); } }); } @Override public long getDuration() { return runPlayerCallableBlocking(new Callable<Long>() { @Override public Long call() throws Exception { return mPlayer.getDuration(); } }); } @Override public long getBufferedPosition() { return runPlayerCallableBlocking(new Callable<Long>() { @Override public Long call() throws Exception { return mPlayer.getBufferedPosition(); } }); } @Override public @MediaPlayer2.MediaPlayer2State int getState() { return runPlayerCallableBlocking(new Callable<Integer>() { @Override public Integer call() throws Exception { return mPlayer.getState(); } }); } @Override public Object loopCurrent(final boolean loop) { return addTask(new Task(CALL_COMPLETED_LOOP_CURRENT, false) { @Override void process() { mPlayer.loopCurrent(loop); } }); } @Override public Object skipToNext() { return addTask(new Task(CALL_COMPLETED_SKIP_TO_NEXT, false) { @Override void process() { mPlayer.skipToNext(); } }); } @Override public Object setNextMediaItem(@NonNull final MediaItem item) { return addTask(new Task(CALL_COMPLETED_SET_NEXT_DATA_SOURCE, false) { @Override void process() { mPlayer.setNextMediaItem(item); } }); } @Override public Object setNextMediaItems(@NonNull final List<MediaItem> items) { return addTask(new Task(CALL_COMPLETED_SET_NEXT_DATA_SOURCES, false) { @Override void process() { mPlayer.setNextMediaItems(items); } }); } @Override public Object setAudioAttributes(@NonNull final AudioAttributesCompat attributes) { return addTask(new Task(CALL_COMPLETED_SET_AUDIO_ATTRIBUTES, false) { @Override void process() { mPlayer.setAudioAttributes(attributes); } }); } @Override public AudioAttributesCompat getAudioAttributes() { return runPlayerCallableBlocking(new Callable<AudioAttributesCompat>() { @Override public AudioAttributesCompat call() throws Exception { return mPlayer.getAudioAttributes(); } }); } @Override public int getAudioSessionId() { return runPlayerCallableBlocking(new Callable<Integer>() { @Override public Integer call() throws Exception { return mPlayer.getAudioSessionId(); } }); } @Override public Object attachAuxEffect(final int effectId) { return addTask(new Task(CALL_COMPLETED_ATTACH_AUX_EFFECT, false) { @Override void process() { mPlayer.attachAuxEffect(effectId); } }); } @Override public Object setAuxEffectSendLevel(final float auxEffectSendLevel) { return addTask(new Task(CALL_COMPLETED_SET_AUX_EFFECT_SEND_LEVEL, false) { @Override void process() { mPlayer.setAuxEffectSendLevel(auxEffectSendLevel); } }); } @Override public Object setPlaybackParams(@NonNull final PlaybackParams params) { return addTask(new Task(CALL_COMPLETED_SET_PLAYBACK_PARAMS, false) { @Override void process() { mPlayer.setPlaybackParams(params); } }); } @Override @NonNull public PlaybackParams getPlaybackParams() { return runPlayerCallableBlocking(new Callable<PlaybackParams>() { @Override public PlaybackParams call() throws Exception { return mPlayer.getPlaybackParams(); } }); } @Override public int getVideoWidth() { return runPlayerCallableBlocking(new Callable<Integer>() { @Override public Integer call() throws Exception { return mPlayer.getVideoWidth(); } }); } @Override public int getVideoHeight() { return runPlayerCallableBlocking(new Callable<Integer>() { @Override public Integer call() throws Exception { return mPlayer.getVideoHeight(); } }); } @Override public Object setSurface(final Surface surface) { return addTask(new Task(CALL_COMPLETED_SET_SURFACE, false) { @Override void process() { mPlayer.setSurface(surface); } }); } @Override public Object setPlayerVolume(final float volume) { return addTask(new Task(CALL_COMPLETED_SET_PLAYER_VOLUME, false) { @Override void process() { mPlayer.setVolume(volume); } }); } @Override public float getPlayerVolume() { return runPlayerCallableBlocking(new Callable<Float>() { @Override public Float call() throws Exception { return mPlayer.getVolume(); } }); } @Override public List<TrackInfo> getTrackInfo() { return runPlayerCallableBlocking(new Callable<List<TrackInfo>>() { @Override public List<TrackInfo> call() throws Exception { return mPlayer.getTrackInfo(); } }); } @Override public int getSelectedTrack(final int trackType) { return runPlayerCallableBlocking(new Callable<Integer>() { @Override public Integer call() { return mPlayer.getSelectedTrack(trackType); } }); } @Override public Object selectTrack(final int index) { return addTask(new Task(CALL_COMPLETED_SELECT_TRACK, false) { @Override void process() { mPlayer.selectTrack(index); } }); } @Override public Object deselectTrack(final int index) { return addTask(new Task(CALL_COMPLETED_DESELECT_TRACK, false) { @Override void process() { mPlayer.deselectTrack(index); } }); } @Override @RequiresApi(21) public PersistableBundle getMetrics() { return runPlayerCallableBlocking(new Callable<PersistableBundle>() { @Override public PersistableBundle call() throws Exception { return mPlayer.getMetricsV21(); } }); } @Override public MediaTimestamp getTimestamp() { return runPlayerCallableBlocking(new Callable<MediaTimestamp>() { @Override public MediaTimestamp call() { return mPlayer.getTimestamp(); } }); } @Override public void reset() { clearPendingCommands(); // Make sure that the current task finishes. Task currentTask; synchronized (mTaskLock) { currentTask = mCurrentTask; } if (currentTask != null) { synchronized (currentTask) { try { while (!currentTask.mDone) { currentTask.wait(); } } catch (InterruptedException e) { // Suppress interruption. } } } mTaskHandler.removeCallbacksAndMessages(null); runPlayerCallableBlocking(new Callable<Void>() { @Override public Void call() { mPlayer.reset(); return null; } }); } @Override public void close() { clearEventCallback(); HandlerThread handlerThread; synchronized (mLock) { handlerThread = mHandlerThread; if (handlerThread == null) { return; } mHandlerThread = null; } runPlayerCallableBlocking(new Callable<Void>() { @Override public Void call() { mPlayer.close(); return null; } }); handlerThread.quit(); } @Override public void setOnDrmConfigHelper(OnDrmConfigHelper listener) { throw new UnsupportedOperationException(); } @Override public DrmInfo getDrmInfo() { throw new UnsupportedOperationException(); } @Override public Object prepareDrm(@NonNull final UUID uuid) { throw new UnsupportedOperationException(); } @Override public void releaseDrm() { throw new UnsupportedOperationException(); } @Override @NonNull public MediaDrm.KeyRequest getDrmKeyRequest(byte[] keySetId, byte[] initData, String mimeType, int keyType, Map<String, String> optionalParameters) { throw new UnsupportedOperationException(); } @Override public byte[] provideDrmKeyResponse(@Nullable byte[] keySetId, @NonNull byte[] response) { throw new UnsupportedOperationException(); } @Override public void restoreDrmKeys(@NonNull byte[] keySetId) { throw new UnsupportedOperationException(); } @Override @NonNull public String getDrmPropertyString(@NonNull String propertyName) { throw new UnsupportedOperationException(); } @Override public void setDrmPropertyString(@NonNull String propertyName, @NonNull String value) { throw new UnsupportedOperationException(); } // ExoPlayerWrapper.Listener implementation. @Override public void onPrepared(MediaItem mediaItem) { notifyOnInfo(mediaItem, MEDIA_INFO_PREPARED); synchronized (mTaskLock) { if (mCurrentTask != null && mCurrentTask.mMediaCallType == CALL_COMPLETED_PREPARE && ObjectsCompat.equals(mCurrentTask.mDSD, mediaItem) && mCurrentTask.mNeedToWaitForEventToComplete) { mCurrentTask.sendCompleteNotification(CALL_STATUS_NO_ERROR); mCurrentTask = null; processPendingTask(); } } } @Override public void onMetadataChanged(MediaItem mediaItem) { notifyOnInfo(mediaItem, MEDIA_INFO_METADATA_UPDATE); } @Override public void onSeekCompleted() { synchronized (mTaskLock) { if (mCurrentTask != null && mCurrentTask.mMediaCallType == CALL_COMPLETED_SEEK_TO && mCurrentTask.mNeedToWaitForEventToComplete) { mCurrentTask.sendCompleteNotification(CALL_STATUS_NO_ERROR); mCurrentTask = null; processPendingTask(); } } } @Override public void onBufferingStarted(MediaItem mediaItem) { notifyOnInfo(mediaItem, MEDIA_INFO_BUFFERING_START); } @Override public void onBufferingEnded(MediaItem mediaItem) { notifyOnInfo(mediaItem, MEDIA_INFO_BUFFERING_END); } @Override public void onBufferingUpdate(MediaItem mediaItem, int bufferingPercentage) { notifyOnInfo(mediaItem, MEDIA_INFO_BUFFERING_UPDATE, bufferingPercentage); } @Override public void onBandwidthSample(MediaItem mediaItem, int bitrateKbps) { notifyOnInfo(mediaItem, MEDIA_INFO_NETWORK_BANDWIDTH, bitrateKbps); } @Override public void onVideoRenderingStart(MediaItem mediaItem) { notifyOnInfo(mediaItem, MEDIA_INFO_VIDEO_RENDERING_START); } @Override public void onVideoSizeChanged(final MediaItem mediaItem, final int width, final int height) { notifyMediaPlayer2Event(new ExoPlayerMediaPlayer2Impl.Mp2EventNotifier() { @Override public void notify(MediaPlayer2.EventCallback callback) { callback.onVideoSizeChanged( ExoPlayerMediaPlayer2Impl.this, mediaItem, width, height); } }); } @Override public void onSubtitleData(final MediaItem mediaItem, final int trackIndex, final SubtitleData subtitleData) { notifyMediaPlayer2Event(new Mp2EventNotifier() { @Override public void notify(EventCallback cb) { cb.onSubtitleData( ExoPlayerMediaPlayer2Impl.this, mediaItem, trackIndex, subtitleData); } }); } @Override public void onTimedMetadata(final MediaItem mediaItem, final TimedMetaData timedMetaData) { notifyMediaPlayer2Event(new Mp2EventNotifier() { @Override public void notify(EventCallback cb) { cb.onTimedMetaDataAvailable( ExoPlayerMediaPlayer2Impl.this, mediaItem, timedMetaData); } }); } @Override public void onMediaItemStartedAsNext(MediaItem mediaItem) { notifyOnInfo(mediaItem, MEDIA_INFO_DATA_SOURCE_START); } @Override public void onMediaItemEnded(MediaItem mediaItem) { notifyOnInfo(mediaItem, MEDIA_INFO_DATA_SOURCE_END); } @Override public void onLoop(MediaItem mediaItem) { notifyOnInfo(mediaItem, MEDIA_INFO_DATA_SOURCE_REPEAT); } @Override public void onMediaTimeDiscontinuity( final MediaItem mediaItem, final MediaTimestamp mediaTimestamp) { notifyMediaPlayer2Event(new Mp2EventNotifier() { @Override public void notify(EventCallback cb) { cb.onMediaTimeDiscontinuity( ExoPlayerMediaPlayer2Impl.this, mediaItem, mediaTimestamp); } }); } @Override public void onPlaybackEnded(MediaItem mediaItem) { notifyOnInfo(mediaItem, MEDIA_INFO_DATA_SOURCE_LIST_END); } @Override public void onError(final MediaItem mediaItem, final int what) { synchronized (mTaskLock) { if (mCurrentTask != null && mCurrentTask.mNeedToWaitForEventToComplete) { mCurrentTask.sendCompleteNotification(CALL_STATUS_ERROR_UNKNOWN); mCurrentTask = null; processPendingTask(); } } notifyMediaPlayer2Event(new Mp2EventNotifier() { @Override public void notify(EventCallback cb) { cb.onError(ExoPlayerMediaPlayer2Impl.this, mediaItem, what, /* extra= */ 0); } }); } // Internal functionality. private void notifyOnInfo(MediaItem mediaItem, int what) { notifyOnInfo(mediaItem, what, /* extra= */ 0); } private void notifyOnInfo(final MediaItem mediaItem, final int what, final int extra) { notifyMediaPlayer2Event(new ExoPlayerMediaPlayer2Impl.Mp2EventNotifier() { @Override public void notify(MediaPlayer2.EventCallback callback) { callback.onInfo(ExoPlayerMediaPlayer2Impl.this, mediaItem, what, extra); } }); } private void resetPlayer() { runPlayerCallableBlocking(new Callable<Void>() { @Override public Void call() throws Exception { mPlayer.reset(); return null; } }); } /** * Runs the specified callable on the player thread, blocking the calling thread until a result * is returned. * * <p>Note: ExoPlayer methods apart from {@link Player#release} are asynchronous, so calling * player methods will not block the caller thread for a substantial amount of time. */ private <T> T runPlayerCallableBlocking(final Callable<T> callable) { final ResolvableFuture<T> future = ResolvableFuture.create(); boolean success = mTaskHandler.post(new Runnable() { @Override public void run() { try { future.set(callable.call()); } catch (Throwable e) { future.setException(e); } } }); Preconditions.checkState(success); try { T result; boolean wasInterrupted = false; while (true) { try { result = future.get(); break; } catch (InterruptedException e) { // We always wait for player calls to return. wasInterrupted = true; } } if (wasInterrupted) { Thread.currentThread().interrupt(); } return result; } catch (ExecutionException e) { Throwable cause = e.getCause(); Log.e(TAG, "Internal player error", new RuntimeException(cause)); throw new IllegalStateException(cause); } } private interface Mp2EventNotifier { void notify(EventCallback callback); } private abstract class Task implements Runnable { final int mMediaCallType; final boolean mNeedToWaitForEventToComplete; MediaItem mDSD; @GuardedBy("this") boolean mDone; Task(int mediaCallType, boolean needToWaitForEventToComplete) { mMediaCallType = mediaCallType; mNeedToWaitForEventToComplete = needToWaitForEventToComplete; } abstract void process() throws IOException, NoDrmSchemeException; @Override public void run() { int status = CALL_STATUS_NO_ERROR; boolean skip = false; if (mMediaCallType == CALL_COMPLETED_SEEK_TO) { synchronized (mTaskLock) { Task next = mPendingTasks.peekFirst(); if (next != null && next.mMediaCallType == CALL_COMPLETED_SEEK_TO) { skip = true; } } } if (!skip) { try { if (mMediaCallType != CALL_COMPLETED_NOTIFY_WHEN_COMMAND_LABEL_REACHED && mPlayer.hasError()) { status = CALL_STATUS_INVALID_OPERATION; } else { process(); } } catch (IllegalStateException e) { status = CALL_STATUS_INVALID_OPERATION; } catch (IllegalArgumentException e) { status = CALL_STATUS_BAD_VALUE; } catch (SecurityException e) { status = CALL_STATUS_PERMISSION_DENIED; } catch (IOException e) { status = CALL_STATUS_ERROR_IO; } catch (Exception e) { status = CALL_STATUS_ERROR_UNKNOWN; } } else { status = CALL_STATUS_SKIPPED; } mDSD = mPlayer.getCurrentMediaItem(); if (!mNeedToWaitForEventToComplete || status != CALL_STATUS_NO_ERROR || skip) { sendCompleteNotification(status); synchronized (mTaskLock) { mCurrentTask = null; processPendingTask(); } } // reset() might be waiting for this task. Notify that the task is done. synchronized (this) { mDone = true; notifyAll(); } } void sendCompleteNotification(final int status) { if (mMediaCallType >= SEPARATE_CALL_COMPLETE_CALLBACK_START) { // These methods have a separate call complete callback and it should be already // called within process(). return; } notifyMediaPlayer2Event(new Mp2EventNotifier() { @Override public void notify(EventCallback callback) { callback.onCallCompleted( ExoPlayerMediaPlayer2Impl.this, mDSD, mMediaCallType, status); } }); } } }
/** * @file drivers/entropy.h * * @brief Public APIs for the entropy driver. */ /* * Copyright (c) 2016 ARM Ltd. * Copyright (c) 2017 Intel Corporation * * SPDX-License-Identifier: Apache-2.0 */ #ifndef ZEPHYR_INCLUDE_DRIVERS_ENTROPY_H_ #define ZEPHYR_INCLUDE_DRIVERS_ENTROPY_H_ /** * @brief Entropy Interface * @defgroup entropy_interface Entropy Interface * @ingroup io_interfaces * @{ */ #include <errno.h> #include <zephyr/types.h> #include <zephyr/device.h> #ifdef __cplusplus extern "C" { #endif /** * @typedef entropy_get_entropy_t * @brief Callback API to get entropy. * * See entropy_get_entropy() for argument description */ typedef int (*entropy_get_entropy_t)(const struct device *dev, uint8_t *buffer, uint16_t length); /** * @typedef entropy_get_entropy_isr_t * @brief Callback API to get entropy from an ISR. * * See entropy_get_entropy_isr() for argument description */ typedef int (*entropy_get_entropy_isr_t)(const struct device *dev, uint8_t *buffer, uint16_t length, uint32_t flags); __subsystem struct entropy_driver_api { entropy_get_entropy_t get_entropy; entropy_get_entropy_isr_t get_entropy_isr; }; /** * @brief Fills a buffer with entropy. Blocks if required in order to * generate the necessary random data. * * @param dev Pointer to the entropy device. * @param buffer Buffer to fill with entropy. * @param length Buffer length. * @retval 0 on success. * @retval -ERRNO errno code on error. */ __syscall int entropy_get_entropy(const struct device *dev, uint8_t *buffer, uint16_t length); static inline int z_impl_entropy_get_entropy(const struct device *dev, uint8_t *buffer, uint16_t length) { const struct entropy_driver_api *api = (const struct entropy_driver_api *)dev->api; __ASSERT(api->get_entropy != NULL, "Callback pointer should not be NULL"); return api->get_entropy(dev, buffer, length); } /* Busy-wait for random data to be ready */ #define ENTROPY_BUSYWAIT BIT(0) /** * @brief Fills a buffer with entropy in a non-blocking or busy-wait manner. * Callable from ISRs. * * @param dev Pointer to the device structure. * @param buffer Buffer to fill with entropy. * @param length Buffer length. * @param flags Flags to modify the behavior of the call. * @retval number of bytes filled with entropy or -error. */ static inline int entropy_get_entropy_isr(const struct device *dev, uint8_t *buffer, uint16_t length, uint32_t flags) { const struct entropy_driver_api *api = (const struct entropy_driver_api *)dev->api; if (unlikely(!api->get_entropy_isr)) { return -ENOTSUP; } return api->get_entropy_isr(dev, buffer, length, flags); } #ifdef __cplusplus } #endif /** * @} */ #include <syscalls/entropy.h> #endif /* ZEPHYR_INCLUDE_DRIVERS_ENTROPY_H_ */
PARIS — Two art collectors from the United States, Marlene and Spencer Hays, have pledged the Musée d’Orsay here the largest foreign collection of art to be donated to France since World War II — more than 600 masterworks from the late 19th and early 20th centuries, including works by Pierre Bonnard, Edouard Vuillard, Amedeo Modigliani and Henri Matisse. The couple, both 80 years old, signed an agreement with the French state on Saturday at the Élysée Palace in a ceremony in which President François Hollande named them both commanders of the Legion of Honor. In 2013, 187 works valued at 173 million euros ($188 million) were shown at the museum in “A Passion for France: The Marlene and Spencer Hays Collection,” an exhibition organized in part by Guy Cogeval, the president of the Musée d’Orsay and Musée de l’Orangerie and a key player in securing the donation. The works will be transferred to France upon the couple’s deaths, and the Musée d’Orsay has agreed to display their collection intact in a dedicated space in the museum rather than dispersing the works throughout the galleries.
<gh_stars>1-10 package fastgelf import ( "encoding/json" "net" "reflect" "testing" "time" ) func createListener(t testing.TB) *net.UDPConn { addr, err := net.ResolveUDPAddr("udp", "127.0.0.1:0") if err != nil { t.Fatal(err) } udpConn, err := net.ListenUDP("udp", addr) if err != nil { t.Fatal(err) } return udpConn } func Test_UDPWriter_WriteMessage(t *testing.T) { listener := createListener(t) defer listener.Close() msg := &Message{ Version: "1.1", TimeUnix: float64(time.Unix(1000, 100000000).UnixNano()) / float64(time.Second), Host: "myhost", Short: "123456", Facility: "kernel", Full: "full msg", Level: 3, Extra: map[string]interface{}{ "abcdef": "ghijkl", "foo": "bar", "xyz": "baz", }, RawExtra: []byte(`{"ABCD": "EFGH"}`), } w, err := NewUDPWriter(listener.LocalAddr().String()) if err != nil { t.Fatal(err) } err = w.WriteMessage(msg) if err != nil { t.Fatal(err) } err = w.Close() if err != nil { t.Fatal(err) } listener.SetReadDeadline(time.Now().Add(2 * time.Second)) buffer := make([]byte, 1024) n, err := listener.Read(buffer) if err != nil { t.Fatal(err) } var gotMsg map[string]interface{} err = json.Unmarshal(buffer[:n], &gotMsg) if err != nil { t.Fatal(err) } expected := map[string]interface{}{ "version": "1.1", "host": "myhost", "short_message": "123456", "timestamp": 1000.1, "facility": "kernel", "level": 3.0, "full_message": "full msg", "abcdef": "ghijkl", "foo": "bar", "xyz": "baz", "ABCD": "EFGH", } if !reflect.DeepEqual(gotMsg, expected) { t.Fatalf("unexpected message received.\nexpected: %#v\ngot: %#v", expected, gotMsg) } }
/** * Tests the direct reference to data set. * * @throws Exception * if any exception */ public void testDataSetReference( ) throws Exception { openDesign( "DesignWithElementReferenceLibrary.xml" ); DataSetHandle dataSet2 = designHandle.findDataSet( "dataSet2" ); DataSetHandle libADataSet1 = designHandle.findDataSet( "LibA.dataSet1" ); DataSetHandle dataSet4 = designHandle.findDataSet( "dataSet4" ); assertEquals( "LibA.dataSet1", dataSet4.getExtends( ) .getQualifiedName( ) ); TableHandle table1 = (TableHandle) designHandle.findElement( "table1" ); assertNotNull( table1.getDataSet( ) ); assertEquals( "LibA.dataSet1", table1 .getProperty( IReportItemModel.DATA_SET_PROP ) ); assertNotNull( table1.getDataSet( ) ); table1.setDataSet( dataSet2 ); assertEquals( "dataSet2", table1.getStringProperty( TableHandle.DATA_SET_PROP ) ); table1.setDataSet( libADataSet1 ); assertEquals( "LibA.dataSet1", table1 .getStringProperty( IReportItemModel.DATA_SET_PROP ) ); assertNotNull( table1.getDataSet( ) ); designHandle.getCommandStack( ).undo( ); assertEquals( "dataSet2", table1.getStringProperty( TableHandle.DATA_SET_PROP ) ); assertEquals( dataSet2, table1.getDataSet( ) ); designHandle.getCommandStack( ).undo( ); assertEquals( "LibA.dataSet1", table1.getStringProperty( TableHandle.DATA_SET_PROP ) ); assertNotNull( table1.getDataSet( ) ); designHandle.getCommandStack( ).redo( ); assertEquals( "dataSet2", table1.getStringProperty( TableHandle.DATA_SET_PROP ) ); assertEquals( dataSet2, table1.getDataSet( ) ); designHandle.getCommandStack( ).redo( ); assertEquals( "LibA.dataSet1", table1 .getStringProperty( TableHandle.DATA_SET_PROP ) ); TableHandle table2 = (TableHandle) designHandle.findElement( "table2" ); assertNotNull( table2.getDataSet( ) ); assertEquals( "dataSet2", table2.getDataSet( ).getName( ) ); table2.getDataSet( ).drop( ); assertNull( table2.getDataSet( ) ); assertEquals( "dataSet2", table2.getStringProperty( TableHandle.DATA_SET_PROP ) ); TableHandle table4 = (TableHandle) designHandle.findElement( "table4" ); assertNotNull( table4.getDataSet( ) ); assertEquals( "dataSet4", table4.getDataSet( ).getName( ) ); }
n = input() problems = map(int,raw_input().split()) sum = 0 for x in problems: sum += x index = 0 total = 0 while total < sum/2.0: total += problems[index] index += 1 print index
// NewRSAReaderWriter creates RSAReaderWriter // publicKey - key of the entity I am sending message to // privateKey - my privateKey func NewRSAReaderWriter(publicKey *rsa.PublicKey, privateKey *rsa.PrivateKey, conn Conn) (rw *RSAReaderWriter) { return &RSAReaderWriter{ publicKey: publicKey, privateKey: privateKey, readerWriter: conn, } }
Bitcoin exchange SimpleFX mailing system hacked, investigation underway If you are a user of the SimpleFX bitcoin exchange and received two strange emails and wondered why, it’s because the exchange’s mailing system was compromised. SimpleFX which is based in Saint Vincent and the Grenadines, is a trading platform which allows users to buy and sell bitcoins with each other using their online trading engine. Bitcoin is about to hit 550$ The two emails sent were short and to the point, advertising another website with speculative titles like, “Bitcoin is about to hit 550$.” The emails were signed by simplefx.com and appear legit. In a tweet sent out a few hours ago, SimpleFX confirmed that the emails are not from them and that their email system has been hacked. The exchange said that no user coins have been compromised, but it’s not yet clear on the totality of the event. It’s obvious user email addresses have been compromised, which probably also include user first and last names. The investigation is underway according to the exchange which may reveal more information once it’s available. The emails Here are the two emails. Do not go the web address in the emails as it is likely a phishing site or malware site. Users of the exchange should change their passwords and be on the lookout for more phishing emails.
/** * Created by blackmaple on 2017/5/9. */ public class CourseLoginRunnable extends BaseRunnable { public CourseLoginRunnable(Handler handler) { super(handler); } @Override public void run() { try { String result = CourseConnector.loginCourse(); sendRefreshMessage(result); } catch (Exception e) { sendErrorMessage(e.getMessage()); } } }
<reponame>shayansm2/metaalgolib<filename>src/algorithms/lib/AlgorithmCalculator.py from abc import ABC, abstractmethod from src.lib.Calculator import Calculator class AlgorithmCalculator(Calculator, ABC): @abstractmethod def get_cost_function(self, *args): pass
package me.cchao.insomnia.api.business; import org.apache.commons.lang3.StringUtils; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.function.Function; import java.util.regex.Pattern; import java.util.stream.Collectors; import me.cchao.insomnia.api.bean.resp.user.UpdateUser; import me.cchao.insomnia.api.config.GlobalConfig; import me.cchao.insomnia.api.domain.FallImage; import me.cchao.insomnia.api.domain.FallMusic; import me.cchao.insomnia.api.domain.User; /** * 图片资源的绝对路径追加 */ public class ImagePathConvert { private static final Pattern COMPILE = Pattern.compile("(?<!:)//"); /** * 拼接成绝对路径 * * @param relativePath 相对路径 * @return */ public static String joinRemotePath(String relativePath) { if (StringUtils.isEmpty(relativePath)) { return ""; } // 以 http 开头默认已经不是相对路径 if (relativePath.startsWith("http")) { return relativePath; } String absPath = GlobalConfig.sourceServerPath + relativePath; // 重复的// if (absPath.lastIndexOf("//") > 7) { absPath = COMPILE.matcher(absPath).replaceAll("/"); } return absPath; } /** * 图片或资源,拼接远程路径, * * @param object 传入类型,通过 instanceof 判断拼接 * @param <T> 类型 * @return T */ public static <T> T joinRemotePath(T object) { if (object == null) { return null; } if (object instanceof User) { User user = (User) object; user.setAvatar(joinRemotePath(user.getAvatar())); } else if (object instanceof FallMusic) { FallMusic item = (FallMusic) object; item.setSrc(joinRemotePath(item.getSrc())); item.setCover_img(joinRemotePath(item.getCover_img())); } else if (object instanceof FallImage) { FallImage item = (FallImage) object; item.setSrc(joinRemotePath(item.getSrc())); } return object; } /** * 将 xx,yy,zz 切割成List类型并追加图片服务器路径 返回 * * @param images 数据库中的图片相对路径字符串 * @return list */ public static List<String> convertImageList(String images) { if (StringUtils.isEmpty(images)) { return new ArrayList<>(); } List<String> list = Arrays.stream(StringUtils.split(images, ",")) .map(new Function<String, String>() { @Override public String apply(String s) { return joinRemotePath(s); } }).collect(Collectors.toList()); return list; } }
def estimateNiters(cls, ptsPerHyp, inlierPerc, confidence=0.95, stddevs=2): from math import log, sqrt inlierPerc = min(0.9, inlierPerc) wn = inlierPerc**ptsPerHyp try: n = int(log(1-confidence)/log(1-wn) + 0.5) except ZeroDivisionError: n = 1e99 if stddevs > 0: n += stddevs*int(sqrt(1-wn)/(wn+1e-5) + 0.5) return n