content
stringlengths
7
2.61M
/** Copyright (c) 2007-2013 <NAME>, <NAME>, <NAME>, and the authors indicated in the @author tags Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package bftsmart.tom.util; import bftsmart.consensus.Consensus; import bftsmart.consensus.Epoch; import bftsmart.consensus.TimestampValuePair; import bftsmart.tom.core.TOMLayer; import java.security.MessageDigest; import java.text.SimpleDateFormat; import java.util.Date; import org.slf4j.LoggerFactory; /** * Print information about the replica when it is shutdown. * */ public class ShutdownHookThread extends Thread { private final TOMLayer tomLayer; private final MessageDigest md; public ShutdownHookThread(TOMLayer tomLayer) { this.tomLayer = tomLayer; this.md = this.tomLayer.md; } @Override public void run() { StringBuffer buffer = new StringBuffer(); SimpleDateFormat sdf = new SimpleDateFormat("HH:mm:ss.SSS"); int lastCons = tomLayer.getLastExec(); int currentCons = tomLayer.getInExec(); Consensus c = null; Epoch e = null; buffer.append("\n---------- DEBUG INFO ----------\n"); buffer.append("\nCurrent time: " + sdf.format(new Date())); buffer.append("\nCurrent leader: " + tomLayer.execManager.getCurrentLeader()); buffer.append("\nCurrent regency: " + tomLayer.getSynchronizer().getLCManager().getLastReg()); buffer.append("\n\nLast finished consensus: " + (lastCons == -1 ? "None" : lastCons)); if(lastCons > -1) { c = tomLayer.execManager.getConsensus(lastCons); for (TimestampValuePair rv : c.getWriteSet()) { if (rv.getValue() != null && rv.getValue().length > 0) rv.setHashedValue(md.digest(rv.getValue())); } buffer.append("\n\n\t -- Consensus state: \n\n\t\tETS=" + c.getEts() + " \n\t\tWriteSet=["+ c.getWriteSet() + "] \n\t\t(VAL,TS)=["+c.getQuorumWrites() + "]"); e = c.getLastEpoch(); if(e != null){ buffer.append("\n\n\t -- Epoch state: \n"+e.toString()); } } buffer.append("\n\nConsensus in execution: " + (currentCons == -1 ? "None" : currentCons)); c = null; e = null; if(currentCons > -1) { c = tomLayer.execManager.getConsensus(currentCons); for (TimestampValuePair rv : c.getWriteSet()) { if (rv.getValue() != null && rv.getValue().length > 0) rv.setHashedValue(md.digest(rv.getValue())); } buffer.append("\n\n\t -- Consensus state: \n\n\t\tETS=" + c.getEts() + " \n\t\tWriteSet=["+ c.getWriteSet() + "] \n\t\t(VAL,TS)=["+c.getQuorumWrites() + "]"); e = c.getLastEpoch(); if(e != null) { buffer.append("\n\n\t -- Epoch state: \n"+e.toString()); } } buffer.append("\n\n---------- ---------- ----------\n"); LoggerFactory.getLogger(this.getClass()).info(buffer.toString()); } }
import React from 'react' import styled from 'styled-components' import Paragraph from 'src/components/layout/Paragraph' import { lg } from 'src/theme/variables' const StyledParagraph = styled(Paragraph)` && { font-size: ${lg}; } ` const IconImg = styled.img` width: 20px; margin-right: 10px; ` const TitleWrapper = styled.div` display: flex; align-items: center; ` const ModalTitle = ({ iconUrl, title }: { title: string; iconUrl: string }) => { return ( <TitleWrapper> {iconUrl && <IconImg alt={title} src={iconUrl} />} <StyledParagraph noMargin weight="bolder"> {title} </StyledParagraph> </TitleWrapper> ) } export default ModalTitle
Particulate matter concentrations and their association with COVID-19-related mortality in Mexico during June 2020 Saharan dust event The present study evaluated the impact of Saharan dust event on particulate matter (PM: PM10 and PM2.5) concentrations by analyzing the daily average PM data between Saharan dust days (June 2329, 2020) and non-Saharan dust days (June 15 to June 22 and June 30 to July 12, 2020) for four majorly affected regions in Mexico and by comparing with three major previous events (2015, 2018, and 2019). The results showed that PM10 and PM2.5 concentrations were 25 times higher during the Saharan dust event with the highest daily averages of 197 g/m3 and 94 g/m3, respectively, and exceeded the Mexican standard norm (NOM-020-SSA1-2014). When comparing with the previous Saharan dust episodes of 2015, 2018, and 2019, the levels of PM10 and PM2.5 considerably increased and more than doubled across Mexico. The correlation analysis revealed a positive association of PM levels with the number of daily COVID-19 cases and deaths during Saharan dust event. Furthermore, the human health risk assessment showed that the chronic daily intake and hazard quotient values incremented during Saharan dust days compared to non-Saharan days, indicating potential health effects and importance of taking necessary measures to ensure better air quality following the COVID-19 pandemic. Graphical abstract Supplementary Information The online version contains supplementary material available at 10.1007/s11356-021-14168-y. Introduction Air pollution remains a global environmental threat and a public health risk. The World Health Organization (WHO) estimated that exposure to polluted air alone caused around 4.2 million deaths worldwide in 2016 (WHO 2018). Particulate matter (PM) is one of the most common air pollutants which comprises particles of various sizes (PM 10 and PM 2.5 ) with associated adsorbed substances (i.e., chemicals and metals). PM can be naturally originated (i.e., sea spray, volcanoes, forests, and deserts) and anthropogenic originated (i.e., vehicles, combustion, industry, and power plants) (;). With the increase in anthropogenic activities and ambient PM concentrations, their exposure to short-term and long-term period affects human health and contributes breathing problems, respiratory diseases, chronic diseases, cancer, and premature mortality (;Loxham and Nieuwenhuijsen 2019). The impact of desert dust events on the PM concentrations and human health has received worldwide attention in the last decades. Sahara Desert is the largest source of atmospheric mineral dust and dust storms are a common meteorological phenomenon, happening especially between late Spring and early Fall, peaking in late June to mid-August (;apraz and Deniz 2020). It has been estimated that about 800 millions of metric tons of dust from North Africa travel and impact across the Atlantic Ocean, the Mediterranean Sea, and the Red Sea, to the Caribbean, South America, North America, Europe, and the Middle East every year (;apraz and Deniz 2020). Owing to the frequent long-range transport of large amounts of dust, a number of studies have evaluated the impact of Saharan dust events on PM concentrations (;;;Dimitriou and Kassomenos 2018;). It is understood from these studies that Sahara dust events greatly increase the ambient concentration of PM contributing to air pollution and may be associated with adverse health effects. According to NOAA's (National Oceanic and Atmospheric Administration) Atlantic Oceanographic and Meteorological Laboratory, the June 2020 Saharan dust event was around 60-70% dustier than an average event happened in 20 years. Most notably, the June 2020 Saharan dust occurred at a critical time when the world is already facing Coronavirus disease 2019, a global health crisis. COVID-19 is an acute respiratory disease caused by SARS-CoV-2 (WHO 2020); it has been suggested that environmental factors, such as ambient air pollution, could increase the severity of the health outcomes (e.g., hospitalization and death) among individuals with COVID-19 (). Recent researchers have corroborated the presence of SARS-CoV-2 viral RNA on coarse PM and associations with COVID-19 mortality cases (;). Several studies identified positive association between higher PM 2.5 and PM 10 and COVID-19 deaths globally (;). With the rapid emergence of the novel COVID-19 disease, which by itself is a respiratory disease, it will be important to evaluate the impact of June 2020 Saharan dust event on PM levels and to determine if any relevant associations with COVID-19 cases and deaths. The Saharan dust event occurred between June 23 and June 29, 2020 in Mexico, right after the withdrawal of COVID-19 lockdown, has drawn our attention. Air pollution has been a primary issue in Mexico, exceeding the WHO-recommended level in relation to various types of air pollutants, including the PM, in most of its major cities (). The Saharan dustaffected regions include the parts of northeastern Mexico and Yucatan Peninsula (Fig. 1), where they already have higher levels of air pollution due to industrialization and urbanization activities (;;CONAGUA 2020). Thus, the main objectives of this study are to examine the relative contribution of Saharan dust on PM 10 and PM 2.5 concentrations, to assess the variations in PM concentrations when compared with previous major dust episodes (2015, 2018, and 2019), to explore the association of PM concentrations with COVID-19 cases and deaths, and to evaluate the human health risk associated with PM exposure via inhalation. To the best of our knowledge, this is the first research to document the impact of Saharan dust event in relation to PM levels (PM 10 and PM 2.5 ) and human health in Mexico and during COVID-19 crisis. Site description and data collection In this study, the PM levels (PM 10 and PM 2.5 ) for a total of 28 days between June 15, 2020 and July 12, 2020 were assessed in four majorly hit regions of Mexico, namely, Nuevo Leon, Veracruz, Tabasco, and Yucatan (Fig. 1b). The period between June 23 and June 29, 2020 when the event took place in Mexico was considered as Saharan dust days, whereas the periods prior (June 15 to June 22) and after the event (June 30 to July 12, 2020) were collectively considered as non-Saharan dust days. For our analysis, we used the daily concentrations of PM 10 and PM 2.5 as well as the meteorological data (i.e., temperature, relative humidity, and wind speed) during the study period from 15 air monitoring stations located in Nuevo Leon (n = 11), Veracruz (n = 2), Tabasco (n = 1), and Yucatan (n = 1), respectively. The details and data availability of the monitoring stations are provided in To find associations, if any, of PM levels with COVID-19 cases and mortality, we collected the data of confirmed COVID-19 cases and deaths (June 15, 2020 to July 12, 2020) from the official website of the Government of Mexico (https://coronavirus.gob.mx/datos/). We preferred to carry out this analysis only for Nuevo Leon as the dataset available from monitoring stations (n = 11) covers the wider province comparatively higher than other states selected in this study. Additionally, it represents the third most populated region in Mexico. Statistical analysis was conducted using Statistica software (version 8.0). The whole data set was varimax normalized to minimize the number of variables with a high loading on each component. Correlation matrix with p < 0.5, 0.01, 0.001 values was obtained to investigate the relationships between the PM levels and COVID-19 cases and deaths. Air quality index Air quality index (AQI) by USEPA was employed for the effective assessment of air quality. We calculated AQI for PM 10 and PM 2.5 obtained from each monitoring stations using the following equation: where I p = index for pollutant p; C p = rounded concentration of pollutant p; BP Hi = the breakpoint that is greater than or equal to C p ; BP Lo = the breakpoint that is less than or equal to C p ; I Hi = the AQI value corresponding to BP Hi ; I Lo = the AQI value corresponding to BP Lo. The AQI ranges from 0 to 500 and categorized into following six intervals: 0-50: good (air quality is good with no risk); 51-100: moderate (air quality is acceptable; however, for some pollutants, there may be a moderate health concern like for people having respiratory diseases); 101-150: unhealthy for sensitive groups (members of sensitive groups may experience health effects); 151-200: unhealthy (everyone may begin to experience health effects); 201-300: very unhealthy (health warnings of emergency conditions and the entire population is more Human health risk assessment on exposure to particulate matter (PM 10 and PM 2.5 ) Exposure dose Human health risk assessment (USEPA 1989) was performed to understand the nature and probability of adverse health effects in humans exposed to PM during the June 2020 Saharan dust event. We concentrated on the health risk estimation through inhalation route for both children and adults. Chronic daily intake (CDI) was estimated for assessing the human health risk upon exposure to PM through inhalation pathway. It was calculated as follows (USEPA 2009): where CDI = chronic daily intake (g kg −1 day −1 ); R inh = inhalation rate at 20 m 3 day −1 for adults and 7.6 m 3 day −1 for children; F exp = exposure frequency (days year −1 ); in the present study, exposure frequency was considered as 28 days year −1 corresponding to the June 2020 Saharan dust event; T exp = the exposure duration 6 years for children and 24 years for adult; ABW = average body weight, 15 kg for children and 70 kg for adults; T avrg = averaging time, for non-carcinogens T avrg = T exp * 365 days and for carcinogens T avrg = 70 365. C is the concentration of particulate matter (g/m 3 ). C UCL estimates the reasonable maximum exposure, which is the upper limit of the 95% confidence interval for the mean. C UCL was calculated based on the central limit theorem (adjusted) by USEPA : where X = arithmetic mean; Z = statistic constant 1.645; = skewness; n = number of samples; and STD = standard deviation. Risk characterization Risk assessment for the carcinogenic and non-carcinogenic risk of PM was calculated using the parameter called hazard quotient (HQ), the ratio of CDI to reference dose (RfD) by using the following equation: HQ of 1.0 is considered safe. HQ that is < 1.0 indicates a negligible risk, i.e., the pollutant is not likely to induce adverse health effects, even to a sensitive individual. HQ > 1.0 indicates that there may be some risks to sensitive individuals as a result of exposure (USEPA 1989(USEPA, 2011. Given the lack of information regarding RfD of PM 10 and PM 2.5 in Mexico, we calculated RfD using the following equation: We used RfC values of 50 g/m 3 for PM 10 and 5 g/m 3 for PM 2.5 (de ;;) to assess the probability of adverse health impacts. Results and discussion The daily average concentration of PM 10 and PM 2.5 during the June 2020 Saharan dust event from 15 monitoring stations is shown in Figs. 2 and 3. The daily average PM 10 and PM 2.5 levels were high during Saharan dust event and exceeded the annual limit of 75 g/m 3 and 45 g/m 3 set up by the Mexican standard Norm (NOM-020-SSA1-2014; DOF 2014). It also exceeded the WHO air quality guidelines for the annual mean concentrations of 50 g/m 3 and 25 g/m 3 for PM 10 and PM 2.5, respectively (WHO 2006). In general, the PM 10 and PM 2.5 were at low concentrations before the dust event. As shown in Figs. 2 and 3, there was a significant increase in the daily average concentration of PM 10 and PM 2.5 in all the stations of Mexico under the examination period of Saharan dust event (23rd to 29th, June 2020). The elevated PM concentrations were as a result of received Saharan dust cover which is generally a rich source of PM 10 and PM 2.5. TAS and VAS 2 stations recorded the highest daily average concentration of 197 g/m 3 and 94 g/m 3 for PM 10 and PM 2.5, respectively. In contrast, MAS 6 and MAS 10 stations registered the lowest daily average concentration of 49 g/m 3 and 35 g/m 3 for PM 10 and PM 2.5, respectively. After the dust event, a considerable decrease in the PM concentrations (Figs. 2 and 3) was noted but the concentration of PM 10 and PM 2.5 remained high to those observed before the event. It can be explained by the fact that the effect of a Saharan dust event can extend to days succeeding the event as fine particulates can remain airborne for long durations. Considering all days, PM 10 (g/m 3 ) average concentrations were 47, 42, and 53 for Nuevo Leon, Veracruz, and Tabasco; PM 2.5 (g/m 3 ) average concentrations were 20, 24, and 25 for Nuevo Leon, Veracruz, and Yucatan, respectively. It is noted that the increase in the concentration of PM was more significant on Saharan dust days as compared with the non-Saharan dust days. On Saharan dust days, average concentrations were 1.2, 2.2, and 2.2 times higher for PM 10 than on non-Saharan dust days, with the values reaching 52 g/m 3, 68 g/m 3, and 86 g/m 3 for Nuevo Leon, Veracruz, and Tabasco, respectively. Compared to non-Saharan dust days, the average concentrations of PM 2.5 were 1.3, 1.8, and 2.4 times higher for Nuevo Leon, Veracruz, and Yucatan, with the values reaching 25 g/m 3, 37 g/m 3, and 44 g/m 3, respectively. The results suggest that Tabasco and Yucatan have the highest average value of PM 10 and PM 2.5, followed by Veracruz and Nuevo Leon. Next, we estimated the changes (%) in PM 10 and PM 2.5 concentrations for the period of assessment, i.e., non-Saharan dust vs Saharan dust (Fig. 4). The first thing to note is that the variations of PM concentrations were obvious among the study regions, but it was uneven. The stations located in the coastal regions of Tabasco, Veracruz, and Yucatan presented higher increase percentage of PM levels in Saharan dust days than non-Saharan days. The station that registered the greatest change percentage was VAS 1 (118%), followed by TAS (115%) for PM 10. YAS station recorded a maximum increase of about 59% for PM 2.5. In contrary, the increase percentage of PM 10 and PM 2.5 concentrations varied between 5 and 45%, respectively, in Nuevo Leon, displaying an overall increase of 20% of PM levels for the study period. For example, the increase of PM levels was higher in MAS 2 and MAS 1 between Saharan dust days and non-Saharan days, while it was least significant in MAS 10 station (Fig. 4). MAS 8 station displayed no significant variation between non-Saharan and Saharan dust days. It can be said that Nuevo Leon (located northeast) is less affected by Saharan dust event compared to other regions that are located on the southeast side of Mexico. This may be likely due to the differences in the dust intensity (significantly thicker dust), gravitational settling velocities, and distribution of Saharan dust across Mexico. Additionally, the changes (%) in PM 10 and PM 2.5 concentrations were examined with respect to previous major Saharan dust episodes in Mexico ( Table 2). The lack of data availability from few air monitoring stations for previous year events, however, rendered a complete comparison to understand the effect of PM 10 and PM 2.5 concentrations between Saharan dust episodes. With available data, the first thing to note is that the PM 10 and PM 2.5 concentrations did not show It is reasonable to assume that the amount of dust entering the atmosphere in the region could worsen by the increased particulate concentrations. Therefore, it is critical to estimate air quality index for the Saharan dust period. As shown in Fig. 5, in general, the distribution of air quality trend between the stations for PM 10 remained good for most of the days but based on PM 2.5, the dominance of moderate category was observed. In terms of PM 2.5 estimations, it is suggested that the population of study area is exposed with more than 50% of the days with significant impact on health. It is important to note an elevated value in the category, "unhealthy" for all the stations on the maximum dusty day (June 27), leading to adverse air quality. The consequences of these inflations in air quality might have impact on health, especially on elderly and sensitive groups during COVID-19 pandemic. Similar to our findings, variations in PM 10 and PM 2.5 levels during the Saharan dust events especially in the proximity of the source areas have been widely reported. Spain and Nicosia displayed PM 10 concentrations reaching 250 g/m 3 and up to 470 g/m 3 respectively, during Saharan dust events (;). Moroni et al. identified 22 dust intrusions in Monte Martano (central Italy) in 2009 and estimated the impact of dust on PM 10 at 22 g/m 3 per intrusion. Kabatas et al. also found a significant contribution of dust to high levels of PM 10 in Turkey. Likewise, Dimitriou and Kassomenos observed extreme concentrations of PM 10 in Athens (Greece) during April 2008 Saharan dust. We acknowledge here that our results of PM levels in Mexico were way lower compared to other regions during Saharan dust episodes (i.e., 2015, 2018, 2019, and 2020) due to its geographical location away (~7000 km) from the source area. In addition, the lack of investigations for North American region closer to our study area, however, hinders a detailed comparison. In relation to meteorological conditions, Saharan dust events usually occur during warmer months (i.e., summer) characterized with higher temperatures and low relative humidity (;). However, this region did not show any significant changes in temperature and relative humidity between Saharan dust days and non-Saharan dust days (Supplementary Material Table S1) similar to dust episodes witnessed in Athens, Greece, during 2001-2006(, while a decrease in wind speed was observed during Saharan dust days favoring the accumulation of dust particles in atmosphere for a prolonged time. Owing to the fact that the COVID-19, by itself a respiratory disease and spread quickly among the community and SARS-CoV-2 would remain viable and infectious in aerosols for hours (van ), this study determined the possible interrelationship between PM and COVID-19 cases and deaths for Nuevo Leon. By July 12, 2020, Nuevo Leon reported 12,322 confirmed COVID-19 cases and 694 deaths (Government of Mexico: https://coronavirus.gob.mx/datos/). The correlation analysis was performed for the entire study period (June 15, 2020 to July 12, 2020) considering the longer residence of PM levels in the atmosphere after the dust event (Figs. 2 and 3). Table 3 summarizes the association between PM and COVID-19 cases and death for the study period. Our results provided preliminary evidence showing that there is a prominent association of PM with COVID-19 cases and deaths during the Saharan dust event but only that of PM 10 is significant. The fine fraction of PM (PM 2.5 ) in our case did not present a substantial relation with COVID-19 cases and deaths (Table 3). Few studies reported similar results of less statistically significant association of PM 2.5 particles with total or specific mortality. For example, in Barcelona (Spain), the effects of short-term exposure to PM 2.5 were not significant during Saharan dust days (). It was found, in Madrid and Italy, that the daily mean PM 2.5 concentrations displayed no statistically significant association with total mortality, circulatory, and respiratory causes on Saharan dust days (;;). Under reduced anthropogenic activities during pandemic measures, PM 10 have presented strong relationship with COVID-19 mortality rate in many parts of the world (;;;). Similarly, in this study, PM 10 is positively correlated with COVID-19 cases and deaths (r 2 = 0.53; 0.50), suggesting that exposure to such PM levels may affect COVID-19 prognosis, and thus, more comprehensive studies should be conducted on this subject. Furthermore, to understand the human health risks associated with PM exposure during the study period, noncarcinogenic and carcinogenic risks in both children and adults via inhalation for Saharan dust and non-Saharan dust days were estimated by calculating the average CDI and HQ. The results are shown in Table 4. The CDI values for noncarcinogenic risk of PM in children were comparatively higher than adults during Saharan period. For instance, the maximum CDI values (g kg −1 day −1 ) of non-carcinogenic risk for PM 10 and PM 2.5 in children were 4.4 and 0.38 (Tabasco), while for adults was only 2.48 (Tabasco) and 1.16 (Yucatan), respectively. It has been documented that children are highly vulnerable to environmental pollutants than adults for numerous reasons, including their relatively higher amount of air inhalation (the air intake per weight unit of a resting infant is twice that of an adult), and their immune system and lungs not being fully developed (;). Contrarily, for carcinogenic risks, adults displayed maximum CDI values (g kg −1 day −1 ) of 0.85 (Tabasco) and 0.40 (Yucatan), and children exhibited 0.38 (Tabasco) and 0.18 (Yucatan) values for PM 10 and PM 2.5. Among regions studied, Veracruz, Tabasco, and Yucatan during Saharan dust days presented nearly one-fold to two-fold increase in CDI values for both children and adults compared to non-Saharan dust days. Nuevo Leon also presented greater CDI values; however, it was in lesser extent compared to other regions. As mentioned earlier in this study, it could be attributed to the location of Nuevo Leon (northeast), which experienced lesser impact from Saharan dust event in comparison with other three regions (southeast) in Mexico. In case of HQ, both children and adults displayed values higher for PM 2.5 compared to PM 10 (Table 4). It is important to mention here that the AQI values for PM 2.5 fell into the category of moderate-unhealthy for most Saharan dust days. Fine fraction of PM particles (PM 2.5 ) are more resident in the atmosphere and they more () which is a deep concern and demands in-depth investigation of health risks associated with PM 2.5. In general, HQ values were similar on non-Saharan days, whereas a potential increase in HQ values closer to 1 was seen in all the four studied regions during Saharan dust days. Therefore, our results from human health risk assessment about the levels and risks of PM could make useful contributions to government, environmental, and health professionals in taking good steps to protect and promote human health during this pandemic situation. Limitations of the study Although our study data and correlational analysis showed significant impacts of PM from Saharan dust in COVID-19, this short communication has a few limitations: additional information on meteorological factors such as temperature, precipitation, and relative humidity were not examined, and future studies need to explore these factors for a comprehensive investigation. PM samples from the June 2020 Saharan dust event were not analyzed by scanning electron microscopy with energy dispersive X-ray spectrometry and inductively coupled plasma mass spectrometry for morphological and chemical characterization. These results would have been greatly helpful but could not be accomplished as the COVID-19 pandemic hindered the analyses. Accordingly, the chemical composition of PM was not considered for assessing the health associated risks, and as a result, the exposure to the combination of the pollutants could not be determined. Thus, the toxic effects of these PM particles during the short-term dust episodes should be further investigated. This study could not consider population density, mobility trends from the regions studied in the analysis. Future studies can investigate on these aspects to provide more useful insights into the spread of COVID-19. The lack of studies for comparison demands future studies from other world regions that are similarly affected by the June 2020 Saharan dust event. Concluding remarks In summary, this study is the first to quantitatively assess the importance of the June 2020 Saharan dust event over PM concentrations in Mexico, as well to investigate its relationship with COVID-19 pandemic. As a consequence of the June 2020 Saharan dust event, we observed a sudden hike in both PM 10 and PM 2.5 concentrations from northeastern and southeastern regions of Mexico. Also, in these regions, the PM levels were higher in many orders of magnitude compared to previous major Saharan dust episodes. Based on our results, it is confirmed that the Saharan dust transported from longer distances had a significant effect on the PM concentrations in Mexico. The correlational analysis revealed that the Saharan dust contributions to increased PM 10 levels present positive association with the daily number of COVID-19-confirmed cases and deaths. In parallel, this study provided a valuable evaluation of the human health risks associated with exposure to PM via inhalation in both children and adults during the dust event. Overall, the main findings of this study underline that the Saharan dust events cannot be ignored during global health crisis. Taking together, this study could serve as a reference data for government authorities to design appropriate strategies for mitigating such unforeseen episodes to improve air quality.
1. Field of the Invention The present invention relates to the field of signal processing. More specifically, the present invention relates to the processing of measured signals, containing a primary signal portion and a secondary signal portion, for the removal or deviation of either the primary or secondary signal portion when little is known about either of these components. More particularly, the present invention relates to modeling the measured signals in a novel way which facilitates minimizing the correlation between the primary signal portion and the secondary signal portion in order to produce a primary and/or secondary signal. The present invention is especially useful for physiological monitoring systems including blood oxygen saturation systems. 2. Description of the Related Art Signal processors are typically employed to remove or derive either the primary or secondary signal portion from a composite measured signal including a primary signal portion and a secondary signal portion. For example, a composite signal may contain noise and desirable portions. If the secondary signal portion occupies a different frequency spectrum than the primary signal portion, then conventional filtering techniques such as low pass, band pass, and high pass filtering are available to remove or derive either the primary or the secondary signal portion from the total signal. Fixed single or multiple notch filters could also be employed if the primary and/or secondary signal portion(s) exist at a fixed frequency(s). It is often the case that an overlap in frequency spectrum between the primary and secondary signal portions exists. Complicating matters further, the statistical properties of one or both of the primary and secondary signal portions change with time. In such cases, conventional filtering techniques are ineffective in extracting either the primary or secondary signal. If, however, a description of either the primary or secondary signal portion can be derived, correlation canceling, such as adaptive noise canceling, can be employed to remove either the primary or secondary signal portion of the signal isolating the other portion. In other words, given sufficient information about one of the signal portions, that signal portion can be extracted. Conventional correlation cancelers, such as adaptive noise cancelers, dynamically change their transfer function to adapt to and remove portions of a composite signal. However, correlations cancelers require either a secondary reference or a primary reference which correlates to either the secondary signal portion only or the primary signal portion only. For instance, for a measured signal containing noise and desirable signal, the noise can be removed with a correlation canceler if a noise reference is available. This is often the case. Although the amplitude of the reference signals are not necessarily the same as the amplitude of the corresponding primary or secondary signal portions, they have a frequency spectrum which is similar to that of the primary or secondary signal portions. In many cases, nothing or very little is known about the secondary and/or primary signal portions. One area where measured signals comprising a primary signal portion and a secondary signal portion about which no information can easily be determined is physiological monitoring. Physiological monitoring generally involves measured signals derived from a physiological system, such as the human body. Measurements which are typically taken with physiological monitoring systems include electrocardiographs, blood pressure, blood gas saturation (such as oxygen saturation), capnographs, other blood constituent monitoring, heart rate, respiration rate, electroencephalograph (EEG) and depth of anesthesia, for example. Other types of measurements include those which measure the pressure and quantity of a substance within the body such as cardiac output, venous oxygen saturation, arterial oxygen saturation, bilirubin, total hemoglobin, breathalyzer testing, drug testing, cholesterol testing, glucose testing, extra vasation, and carbon dioxide testing, protein testing, carbon monoxide testing, and other in-vivo measurements, for example. Complications arising in these measurements are often due to motion of the patient, both external and internal (muscle movement, vessel movement, and probe movement, for example), during the measurement process. Many types of physiological measurements can be made by using the known properties of energy attenuation as a selected form of energy passes through a medium. A blood gas monitor is one example of a physiological monitoring system which is based upon the measurement of energy attenuated by biological tissues or substances. Blood gas monitors transmit light into the test medium and measure the attenuation of the light as a function of time. The output signal of a blood gas monitor which is sensitive to the arterial blood flow contains a component which is a waveform representative of the patient""s arterial pulse. This type of signal, which contains a component related to the patient""s pulse, is called a plethysmographic wave, and is shown in FIG. 1 as curve s. Plethysmographic waveforms are used in blood gas saturation measurements. As the heart beats, the amount of blood in the arteries increases and decreases, causing increases and decreases in energy attenuation, illustrated by the cyclic wave s in FIG. 1. Typically, a digit such as a finger, an ear lobe, or other portion of the body where blood flows close to the skin, is employed as the medium through which light energy is transmitted for blood gas attenuation measurements. The finger comprises skin, fat, bone, muscle, etc., shown schematically in FIG. 2, each of which attenuates energy incident on the finger in a generally predictable and constant manner. However, when fleshy portions of the finger are compressed erratically, for example by motion of the finger, energy attenuation becomes erratic. An example of a more realistic measured waveform S is shown in FIG. 3, illustrating the effect of motion. The primary plethysmographic waveform portion of the signal s is the waveform representative of the pulse, corresponding to the sawtooth-like pattern wave in FIG. 1. The large, secondary motion-induced excursions in signal amplitude obscure the primary plethysmographic signal s. Even small variations in amplitude make it difficult to distinguish the primary signal component s in the presence of a secondary signal component n. A pulse oximeter is a type of blood gas monitor which non-invasively measures the arterial saturation of oxygen in the blood. The pumping of the heart forces freshly oxygenated blood into the arteries causing greater energy attenuation. As well understood in the art, the arterial saturation of oxygenated blood may be determined from the depth of the valleys relative to the peaks of two plethysmographic waveforms measured at separate wavelengths. Patient movement introduces motion artifacts to the composite signal as illustrated in the plethysmographic waveform illustrated in FIG. 3. These motion artifacts distort the measured signal. This invention provides improvements upon the methods and apparatus disclosed in U.S. patent application Ser. No. 08/132,812, filed Oct. 6, 1993, entitled Signal Processing Apparatus, which earlier application has been assigned to the assignee of the instant application. The present invention involves several different embodiments using the novel signal model in accordance with the present invention to isolate either a primary signal portion or a secondary signal portion of a composite measured signal. In one embodiment, a signal processor acquires a first measured signal and a second measured signal that is correlated to the first measured signal. The first signal comprises a first primary signal portion and a first secondary signal portion. The second signal comprises a second primary signal portion and a second secondary signal portion. The signals may be acquired by propagating energy through a medium and measuring an attenuated signal after transmission or reflection. Alternatively, the signals may be acquired by measuring energy generated by the medium. In one embodiment, the first and second measured signals are processed to generate a secondary reference which does not contain the primary signal portions from either of the first or second measured signals. This secondary reference is correlated to the secondary signal portion of each of the first and second measured signals. The secondary reference is used to remove the secondary portion of each of the first and second measured signals via a correlation canceler, such as an adaptive noise canceler. The correlation canceler is a device which takes a first and second input and removes from the first input all signal components which are correlated to the second input. Any unit which performs or nearly performs this function is herein considered to be a correlation canceler. An adaptive correlation canceler can be described by analogy to a dynamic multiple notch filter which dynamically changes its transfer function in response to a reference signal and the measured signals to remove frequencies from the measured signals that are also present in the reference signal. Thus, a typical adaptive correlation canceler receives the signal from which it is desired to remove a component and receives a reference signal of the undesired portion. The output of the correlation canceler is a good approximation to the desired signal with the undesired component removed. Alternatively, the first and second measured signals may be processed to generate a primary reference which does not contain the secondary signal portions from either of the first or second measured signals. The primary reference may then be used to remove the primary portion of each of the first and second measured signals via a correlation canceler. The output of the correlation canceler is a good approximation to the secondary signal with the primary signal removed and may be used for subsequent processing in the same instrument or an auxiliary instrument. In this capacity, the approximation to the secondary signal may be used as a reference signal for input to a second correlation canceler together with either the first or second measured signals for computation of, respectively, either the first or second primary signal portions. Physiological monitors can benefit from signal processors of the present invention. Often in physiological measurements a first signal comprising a first primary portion and a first secondary portion and a second signal comprising a second primary portion and a second secondary portion are acquired. The signals may be acquired by propagating energy through a patient""s body (or a material which is derived from the body, such as breath, blood, or tissue, for example) or inside a vessel and measuring an attenuated signal after transmission or reflection. Alternatively, the signal may be acquired by measuring energy generated by a patient""s body, such as in electrocardiography. The signals are processed via the signal processor of the present invention to acquire either a secondary reference or a primary reference which is input to a correlation canceler, such as an adaptive noise canceler. One physiological monitoring apparatus which benefits from the present invention is a monitoring system which determines a signal which is representative of the arterial pulse, called a plethysmographic wave. This signal can be used in blood pressure calculations, blood constituent measurements, etc. A specific example of such a use is in pulse oximetry. Pulse oximetry involves determining the saturation of oxygen in the blood. In this configuration, the primary portion of the signal is the arterial blood contribution to attenuation of energy as it passes through a portion of the body where blood flows close to the skin. The pumping of the heart causes blood flow to increase and decrease in the arteries in a periodic fashion, causing periodic attenuation wherein the periodic waveform is the plethysmographic waveform representative of the arterial pulse. The secondary portion is noise. In accordance with the present invention, the measured signals are modeled such that this secondary portion of the signal is related to the venous blood contribution to attenuation of energy as it passes through the body. The secondary portion also includes artifacts due to patient movement which causes the venous blood to flow in an unpredictable manner, causing unpredictable attenuation and corrupting the otherwise periodic plethysmographic waveform. Respiration also causes the secondary or noise portion to vary, although typically at a lower frequency than the patients pulse rate. Accordingly, the measured signal which forms a plethysmographic waveform is modeled in accordance with the present invention such that the primary portion of the signal is representative of arterial blood contribution to attenuation and the secondary portion is due to several other parameters. A physiological monitor particularly adapted to pulse oximetry oxygen saturation measurement comprises two light emitting diodes (LED""s) which emit light at different wavelengths to produce first and second signals. A detector registers the attenuation of the two different energy signals after each passes through an absorptive media, for example a digit such as a finger, or an earlobe. The attenuated signals generally comprise both primary (arterial attenuator) and secondary (noise) signal portions. A static filtering system, such as a bandpass filter, removes a portion of the secondary signal which is outside of a known bandwidth of interest, leaving an erratic or random secondary signal portion, often caused by motion and often difficult to remove, along with the primary signal portion. A processor in accordance with one embodiment of the present invention removes the primary signal portions from the measured signals yielding a secondary reference which is a combination of the remaining secondary signal portions. The secondary reference is correlated to both of the secondary signal portions. The secondary reference and at least one of the measured signals are input to a correlation canceler, such as an adaptive noise canceler, which removes the random or erratic portion of the secondary signal. This yields a good approximation to a primary plethysmographic signal as measured at one of the measured signal wavelengths. As is known in the art, quantitative measurements of the amount of oxygenated arterial blood in the body can be determined from the plethysmographic signal in a variety of ways. The processor of the present invention may also remove the secondary signal portions from the measured signals yielding a primary reference which is a combination of the remaining primary signal portions. The primary reference is correlated to both of the primary signal portions. The primary reference and at least one of the measured signals are input to a correlation canceler which removes the primary portions of the measured signals. This yields a good approximation to the secondary signal at one of the measured signal wavelengths. This signal may be useful for removing secondary signals from an auxiliary instrument as well as determining venous blood oxygen saturation. In accordance with the signal model of the present invention, the two measured signals each having primary and secondary signal portions can be related by coefficients. By relating the two equations with respect to coefficients defined in accordance with the present invention, the coefficients provide information about the arterial oxygen saturation and about the noise (the venous oxygen saturation and other parameters). In accordance with this aspect of the present invention, the coefficients can be determined by minimizing the correlation between the primary and secondary signal portions as defined in the model. Accordingly, the signal model of the present invention can be utilized in many ways in order to obtain information about the measured signals as will be further apparent in the detailed description of the preferred embodiments. One aspect of the present invention is a method for use in a signal processor in a signal processor for processing at least two measured signals S1 and S2 each containing a primary signal portion s and a secondary signal portion n, the signals S1 and S2 being in accordance with the following relationship: S1=s1+n1 S2=s2+n2 where s1 and s2, and n1 and n2 are related by: s1=ras2 and n1=rvn2 and where ra and rv are coefficients. The method comprises a number of steps. A value of coefficient ra is determined which minimize correlation between s1 and n1. Then, at least one of the first and second signals is processed using the determined value for ra to significantly reduce n from at least one of the first or second measured signal to form a clean signal. In one embodiment, the clean signal is displayed on a display. In another embodiment, wherein the first and second signals are physiological signals, the method further comprises the step of processing the clean signal to determine a physiological parameter from the first or second measured signals. In one embodiment, the parameter is arterial oxygen saturation. In another embodiment, the parameter is an ECG signal. In yet another embodiment, wherein the first portion of the measured signals is indicative of a heart plethysmographic, the method further comprises the step of calculating the pulse rate. Another aspect of the present invention involves a physiological monitor. The monitor has a first input configured to receive a first measured signal S1 having a primary portion, s1, and a secondary portion n1. The monitor also has a second input configured to received a second measured signal S2 having a primary portion s2 and a secondary portion n2. Advantageously, the first and the second measured signals S1 and S2 are in accordance with the following relationship: S1=s1+n1 S2=s2+n2 where s1 and s2, and n1 and n2 are related by: s1=ras2 and n1=rvn2 and where ra and rv are coefficients. The monitor further has a scan reference processor, the scan reference processor responds to a plurality of possible values for ra to multiply the second measured signal by each of the possible values for ra and for each of the resulting values, to subtract the resulting values from the first measured signal to provide a plurality of output signals. A correlation canceler having a first input configured to receive the first measured signal, and having a second input configured to receive the plurality of output signals from the saturation scan reference processor, provides a plurality of output vectors corresponding to the correlation cancellation between the plurality of output signals and the first measured signal. An integrator having an input configured to receive the plurality of output vectors from the correlation canceler is responsive to the plurality of output vectors to determine a corresponding power for each output vector. An extremum detector is coupled at its input to the output of the integrator. The extremum detector is responsive to the corresponding power for each output vector to detect a selected power. In one embodiment, the plurality of possible values correspond to a plurality of possible values for a selected blood constituent. In one embodiment the, the selected blood constituent is arterial blood oxygen saturation. In another embodiment, the selected blood constituent is venous blood oxygen saturation. In yet another embodiment, the selected blood constituent is carbon monoxide. Another aspect of the present invention involves a physiological monitor. The monitor has a first input configured to receive a first measured signal S1 having a primary portion, s1, and a secondary portion, n1. The monitor also has a second input configured to received a second measured signal S2 having a primary portion s2 and a secondary portion n2. The first and the second measured signals S1 and S2 are in accordance with the following relationship: S1=s1+n1 S2=s2+n2 where s1 and s2, and n1 and n2 are related by: s1=ras2 and n1=rvn2 and where ra and rv are coefficients. A transform module is responsive to the first and the second measured signals and responsive to a plurality of possible values for ra to provide at least one power curve as an output. An extremum calculation module is responsive to the at least one power curve to select a value for ra which minimizes the correlation between s and n, and to calculate from the value for ra a corresponding saturation value as an output. A display module is responsive to the output of saturation calculation to display the saturation value.
#include "opencv2/core/core.hpp" #include "opencv2/imgproc/imgproc.hpp" #include "opencv2/highgui/highgui.hpp" #include <iostream> #include <cmath> #include<windows.h> using namespace std; using namespace cv; //MATRICES: PRIMERO CENTRAS, DESPUES MATRICES TRANSFORMACION, DESPUES DESCENTRAS //WARPING 1A: 20 Y 128 //WARPING 1B: 20 Y 30 //WARPING 2A: Y0 Y EL 1 DESPUES DEL * //WARPING 2B: X0 Y0 Y 512 int interpolacionLineal(float px, float py, Mat original) { int q11 = 0, q22 = 0, q21 = 0; int q12 = original.at<uchar>(int(py), int(px)); if (int(py + 1) < original.rows) q11 = original.at<uchar>(int(py + 1), int(px)); if (int(px + 1) < original.cols) q22 = original.at<uchar>(int(py), int(px + 1)); if (int(px + 1) < original.cols && int(py + 1) < original.rows) q21 = original.at<uchar>(int(py + 1), int(px + 1)); int x1 = int(px); int x2 = int(px + 1); int y1 = int(py + 1); int y2 = int(py); int r1 = ((q21 - q11) / (x2 - x1)) * (px - x1) + q11; int r2 = ((q22 - q12) / (x2 - x1)) * (px - x1) + q12; int P = ((r2 - r1) / (y2 - y1)) * (py - y1) + r1; return P; } void dotProduct2D(float v[2], float matrix[2][2], int coord[2]) { int dim = 2; for (int r = 0; r < dim; r++) { v[r] = 0; for (int c = 0; c < dim; c++) { v[r] += matrix[r][c] * coord[c]; } } } void dotProduct3D(float v[3], float matrix[3][3], int coord[3]) { int dim = 2; for (int r = 0; r < dim + 1; r++) { v[r] = 0; for (int c = 0; c < dim + 1; c++) { v[r] += matrix[r][c] * coord[c]; } } } void transformMatrix(float result[3][3], float A[3][3], float B[3][3]) { int dim = 2; for (int r = 0; r < dim + 1; r++) { for (int c = 0; c < dim + 1; c++) { for (int k = 0; k < dim + 1; k++) { result[r][c] += A[r][k] * B[k][c]; } } } } void escalamiento(float Cx, float Cy,Mat interp, Mat original) { float vector[3] = {0}; float matrix[3][3] = { {1 / Cx, 0, 0}, {0, 1 / Cy, 0}, {0, 0, 1} }; int coord[3] = { 0 }; for (int i = 0; i < interp.rows; i++) { for (int j = 0; j < interp.cols; j++) { coord[0] = j; coord[1] = i; coord[2] = 1; dotProduct3D(vector, matrix, coord); interp.at<uchar>(i, j) = interpolacionLineal(vector[0], vector[1], original); } } } void traslacion(float tx, float ty, Mat trasladada, Mat original) { float vector[3] = { 0 }; float matrix[3][3] = { {1, 0, tx}, {0, 1, ty}, {0, 0, 1} }; int coord[3] = { 0 }; for (int i = 0; i < trasladada.rows; i++) { for (int j = 0; j < trasladada.cols; j++) { coord[0] = j; coord[1] = i; coord[2] = 1; dotProduct3D(vector, matrix, coord); float u = vector[0] ; float v = vector[1] ; if (u >= original.cols || u < 0 || v >= original.rows || v < 0) continue; trasladada.at<uchar>(i, j) = interpolacionLineal(u, v, original); } } } void rotacion(int angulo, Mat rotada, Mat original) { float rads = (angulo * 3.1416) / 180.0f; float vector[3] = { 0 }; float matrix[3][3] = { {cos(rads), sin(rads), 0}, {-1 * sin(rads), cos(rads), 0}, {0, 0, 1} }; int coord[3] = { 0 }; for (int i = 0; i < rotada.rows; i++) { for (int j = 0; j < rotada.cols; j++) { float x0 = j - (rotada.cols / 2); float y0 = (rotada.rows / 2) - i; coord[0] = x0; coord[1] = y0; coord[2] = 1; dotProduct3D(vector, matrix, coord); float u = vector[0] + (rotada.cols / 2); float v = (rotada.rows / 2) - vector[1]; if (u >= original.cols || u < 0 || v >= original.rows || v < 0) continue; rotada.at<uchar>(i, j) = interpolacionLineal(u, v, original); } } } void sesgadoV(float factor, Mat sesV, Mat original) { float vector[3] = { 0 }; float matrix[3][3] = { {1, 0 ,0},{factor, 1 , 0}, {0,0, 1} }; int coord[3] = { 0 }; for (int i = 0; i < sesV.rows; i++) { for (int j = 0; j < sesV.cols; j++) { int x0 = j - (sesV.cols / 2); int y0 = (sesV.rows / 2) - i; coord[0] = x0; coord[1] = y0; coord[2] = 1; dotProduct3D(vector, matrix, coord); float u = vector[0] + (sesV.cols / 2); float v = (sesV.rows / 2) - vector[1]; if (u >= original.cols || u < 0 || v >= original.rows || v < 0) continue; sesV.at<uchar>(i, j) = interpolacionLineal(u, v, original); } } } void sesgadoH(float factor, Mat sesH, Mat original) { float vector[3] = { 0 }; float matrix[3][3] = { {1, factor ,0},{0, 1 , 0}, {0,0, 1} }; int coord[3] = { 0 }; for (int i = 0; i < sesH.rows; i++) { for (int j = 0; j < sesH.cols; j++) { int x0 = j - (sesH.cols / 2); int y0 = (sesH.rows / 2) - i; coord[0] = x0; coord[1] = y0; coord[2] = 1; dotProduct3D(vector, matrix, coord); float u = vector[0] + (sesH.cols / 2); float v = (sesH.rows / 2) - vector[1]; if (u >= original.cols || u < 0 || v >= original.rows || v < 0) continue; //sesH.at<uchar>(i, j) = original.at<uchar>(v, u); sesH.at<uchar>(i, j) = interpolacionLineal(u, v, original); } } } void applyTransformMatrix(float vector[3], Mat scaled, float transformMtx[3][3], int coord[3], Mat original) { for (int i = 0; i < scaled.rows; i++) { for (int j = 0; j < scaled.cols; j++) { float x0 = j - (scaled.cols / 2); float y0 = (scaled.rows / 2) - i; coord[0] = x0; coord[1] = y0; coord[2] = 1; dotProduct3D(vector, transformMtx, coord); float u = vector[0] + (scaled.cols / 2); float v = (scaled.rows / 2) - vector[1]; if (u >= original.cols || u < 0 || v >= original.rows || v < 0) continue; scaled.at<uchar>(i, j) = interpolacionLineal(u, v, original); } } } int maxCx = 10; float Cx = 1; int slider_Cx = 1; int maxCy = 10; float Cy = 1; int slider_Cy = 1; int maxTx = 150; int maxTy = 150; int Tx = 50; int Ty = 50; int angulo = 15; int maxAngulo = 350; int maxSx = 500; float factorSx; int slider_Sx; int maxSy = 500; float factorSy; int slider_Sy; void on_trackbar(int, void*) { Cx = (float)slider_Cx / maxCx; if (Cx >= 0.5) Cx = Cx * 2; Cy = (float)slider_Cy / maxCy; if (Cy >= 0.5) Cy = Cy * 2; factorSx = (float)slider_Sx / maxSx; factorSy = (float)slider_Sy / maxSy; } int main() { Mat original = imread("wasp.png", CV_LOAD_IMAGE_GRAYSCALE); namedWindow("trackbars", WINDOW_AUTOSIZE); namedWindow("view", WINDOW_AUTOSIZE); createTrackbar("Cx", "trackbars", &slider_Cx, maxCx, on_trackbar); createTrackbar("Cy", "trackbars", &slider_Cy, maxCy, on_trackbar); createTrackbar("Tx", "trackbars", &Tx, maxTx, on_trackbar); createTrackbar("Ty", "trackbars", &Ty, maxTy, on_trackbar); createTrackbar("Angle", "trackbars", &angulo, maxAngulo, on_trackbar); createTrackbar("Sx", "trackbars", &slider_Sx, maxSx, on_trackbar); createTrackbar("Sy", "trackbars", &slider_Sy, maxSy, on_trackbar); while (1) { if (GetKeyState('S') & 0x8000) { float rads = (angulo * 3.1416) / 180.0f; float scale[3][3] = { {1 / Cx, 0, 0}, {0, 1 / Cy, 0}, {0, 0, 1} }; float translate[3][3] = { {1 , 0, Tx }, {0, 1 , Ty }, {0, 0, 1} }; float rotate[3][3] = { {cos(rads), sin(rads), 0}, {-1 * sin(rads), cos(rads), 0}, {0, 0, 1} }; float shear[3][3] = { {1, factorSx, 0}, {factorSy, 1, 0}, {0, 0, 1} }; //order: scale, translate, rotate, shear: // result1 = scale x translate float result1[3][3] = { {0, 0, 0}, {0, 0, 0}, {0, 0, 0} }; transformMatrix(result1, scale, translate); // result2 = result1 x rotate float result2[3][3] = { {0, 0, 0}, {0, 0, 0}, {0, 0, 0} }; transformMatrix(result2, result1, rotate); // result3 = result2 x shear float result3[3][3] = { {0, 0, 0}, {0, 0, 0}, {0, 0, 0} }; transformMatrix(result3, result2, shear); // final matrix obtained in result3, apply dotProduct3D: Mat scaled(int(original.rows*Cy), int(original.cols*Cx), CV_8UC1, Scalar(0)); float vector[3] = { 0 }; int coord[3] = { 0 }; applyTransformMatrix(vector, scaled, result3, coord, original); imshow("view", scaled); } if (GetKeyState(VK_RETURN) & 0x8000) break; if (waitKey(100) >= 0) continue; } }
package de.timedout.oc.browser.model; import java.util.EventListener; import java.util.Set; import java.util.concurrent.CopyOnWriteArraySet; import ca.odell.glazedlists.EventList; public abstract class AbstractLocationMediator<E extends Element> implements LocationMediator { protected E location; protected EventList<Element> list; protected Thread thread = new Thread(){ public void run(){ populateList(); for(PopulateListener listener: listeners){ listener.finished(); } keepListUpdated(); } }; protected boolean isInit = false; protected boolean isConnected = false; protected Set<PopulateListener> listeners = new CopyOnWriteArraySet<PopulateListener>(); @SuppressWarnings("unchecked") @Override public synchronized void init(EventList<Element> list, Element location) { if (!isInit) { this.location = (E)location; this.list = list; isInit = true; } else { throw new IllegalArgumentException(); } } @Override public synchronized void connect() { if (!isInit) { throw new IllegalArgumentException("Mediator has to be initialized"); } if (!isConnected) { thread.start(); isConnected = true; } } @Override public synchronized void disconnect() { if (isConnected) { isConnected = false; try { thread.interrupt(); thread.join(); } catch (InterruptedException e) { e.printStackTrace(); } } } protected abstract void populateList(); protected abstract void keepListUpdated(); @Override public void addPopulateListener(PopulateListener listener) { listeners.add(listener); } }
Pseudo natural language vs. controlled natural language Natural language is an indispensable means of communication. But it is also a serious barrier for communication, in particular between human and computers. Efforts have been made to overcome this barrier since long time ago. One of these efforts was to design controlled natural languages (CNL), which are subsets of natural languages, yet are easy to use for non-native users and can be processed by computers. First results of CNL for practical use were published in 20062007. On the other hand, we have designed and used another kind of limited natural languagethe Pseudo Natural Language (PNL) since late eighties, which have been put in use successfully and have shown their ability of helping overcome the language barrier. This talk will compare CNL with PNL in detail and discuss future direction of this area.
A major funding boost for Centre Vale Park, Todmorden, has been celebrated with ceremony, thanks to Todmorden’s Mayor Andy Hollis, and the Friends of Centre Vale Park volunteers. The Heritage Lottery funded project has created a Heritage and Wildlife Trail in the Centre Vale woodlands. The Launch of the Trail took place with a special public event this month, after more than a year of hard work. The Friends Chairman, Karen Andrews, met the mayor as he arrived at the John Fielden statue in the park. She remarked how appropriate the beautiful 1912 Cadillac was for the occasion: “1912 was exactly the year that this great park first opened as a public space for the people of the town to enjoy”, she said. The Todmorden Community Brass Band played to brighten the grey skies, and the Mayor welcomed everyone to the Centre Vale coach yard which is the start of the Heritage Trail. He thanked all those people who have worked so hard, especially the Friends volunteers, but also the Heritage Lottery Fund for supporting the vision the Friends have to bring Centre Vale’s history to life in such a creative way. History was certainly brought to life by the arrival of a farm horse and working pony in what used to be the original Centre Vale House stable yard. “Horses hooves can’t have been heard here for so many years. It adds real atmosphere to our celebrations”, said one of the Friends volunteers. The rain held off for some of the Saturday and in spite of very unsettled weather during the weekend a total of 21 people took advantage of guided walks around the Trail, lead by the Friends. “No one can control the weather but now the Trail is open I’m sure people will come and explore for themselves on better days”, said Karen, who thanked everyone for working so hard to make the launch a success. Information is available at the TIC and Todmorden Library, and also at www.friendsofcentrevalepark.org.uk.
<reponame>cs91chris/flask_response_builder import uuid from enum import Enum from decimal import Decimal from datetime import datetime import flask import pytest from flask_response_builder.dictutils import rename_keys from flask_response_builder.builders import JsonBuilder, CsvBuilder from flask_response_builder import Case, Transformer, ResponseBuilder @pytest.fixture def app(): _app = flask.Flask(__name__) _app.config['RB_HTML_DEFAULT_TEMPLATE'] = 'response.html' rb = ResponseBuilder(_app) data = { "users": [ { "id": 1, "name": "<NAME>", "email": "<EMAIL>", "phone": "1-770-736-8031 x56442", "sysdate": datetime.now(), "address": { "city": "Gwenborough", "zipcode": "92998-3874", "geo": {"lat": -37.3159, "lon": 81.1496} }, "test": [ {"a": 1, "b": 2}, {"a": 2, "b": 3}, ] }, { "id": 2, "name": "<NAME>", "email": "<EMAIL>", "phone": "010-692-6593 x09125", "sysdate": datetime.now(), "address": { "city": "Wisokyburgh", "zipcode": "90566-7771", "geo": {"lat": -43.9509, "lon": -34.4618} }, "test": [ {"a": None, "b": None} ] } ] } @_app.route('/json') @rb.json() def index_json(): return data @_app.route('/xml') @rb.xml() def index_xml(): return data @_app.route('/yaml') @rb.yaml() def index_yaml(): return data @_app.route('/html') def index_html(): builder = rb.html(name='Users', as_table=True) return builder(data=data['users']) @_app.route('/csv') def index_csv(): builder = rb.csv(filename='users') return builder(data=data['users']) @_app.route('/base64') @rb.base64() def index_base64(): return data @_app.route('/nocontent') @rb.no_content def nocontent(): return @_app.route('/nocontent/custom') @rb.no_content def nocontent_custom(): return None, 202 @_app.route('/nocontent/error') @rb.no_content def nocontent_error(): return data, 500, {'header': 'header'} @_app.route('/xhr') @rb.template_or_json('response.html') def test_xhr(): return data['users'] @_app.route('/onaccept') @rb.on_accept() def test_accept(): item = flask.request.args.get('item') if item is not None: try: return data['users'][int(item)] except IndexError: return [] return data['users'] @_app.route('/onacceptonly') @rb.on_accept(acceptable=['application/xml']) def test_acceptonly(): return data['users'] @_app.route('/customaccept') def test_customaccept(): _, builder = rb.get_mimetype_accept() return rb.build_response(builder, (data['users'][0], 206, {'header': 'header'})) @_app.route('/format') @rb.on_format() def test_format(): return data['users'] @_app.route('/decorator') @rb.response('json') def test_decorator(): old = datetime.now() class Color(Enum): red = 'red' green = 'green' blue = 'blue' resp = { "id": uuid.uuid4(), "name": "<NAME>ham", "email": "<EMAIL>", "sysdate": datetime.now(), "time": datetime.now().time(), "date": datetime.now().date(), "delta": old - datetime.now(), "color": Color.red, "address": { "city": "Gwenborough", "zipcode": "92998-3874", "geo": {"lat": Decimal(-37.3159), "lon": Decimal(81.1496)} } } resp.pop('sysdate') return resp, {'header': 'header'}, 206 @_app.route('/rename-key') @rb.response('json') def rename_key(): return rename_keys( { 'pippo': 1, 'pluto': 2 }, trans=str.upper ) @_app.route('/notation') @rb.response('json') def notation(): word = 'pippo pluto' return [ word, Case.to_camel(word), Case.to_kebab(word), Case.to_snake(word) ] @_app.route('/json2xml', methods=['POST']) def json_to_xml(): return flask.Response(Transformer.json_to_xml(flask.request.data)) @_app.route('/json2csv', methods=['POST']) def json_to_csv(): return flask.Response(Transformer.json_to_csv(flask.request.data)) @_app.route('/json2yaml', methods=['POST']) def json_to_yaml(): return flask.Response(Transformer.json_to_yaml(flask.request.data)) @_app.route('/transform') def test_transform(): b = JsonBuilder(mimetype='application/json') return flask.Response(b.transform( '"pippo";"pluto"\r\n"2";"3"\r\n', builder=CsvBuilder ), headers={'Content-Type': b.mimetype}) @_app.route('/custom/mimetype') @rb.response('json') def custom_mimetype(): return data['users'], {'Content-Type': 'application/custom+json'} @_app.route('/custom/jsonp') @rb.response('json') def custom_jsonp(): return { 'pippo': 1, 'pluto': 2 } _app.testing = True return _app @pytest.fixture def client(app): _client = app.test_client() return _client def test_app_runs(client): res = client.get('/') assert res.status_code == 404 def test_app_returns_correct_content_type(client): res = client.get('/html') assert res.status_code == 200 assert 'text/html' in res.headers['Content-Type'] res = client.get('/json') assert res.status_code == 200 assert 'application/json' in res.headers['Content-Type'] res = client.get('/json?callback=pippo') assert res.status_code == 200 assert 'application/javascript' in res.headers['Content-Type'] res = client.get('/xml') assert res.status_code == 200 assert 'application/xml' in res.headers['Content-Type'] res = client.get('/yaml') assert res.status_code == 200 assert 'application/yaml' in res.headers['Content-Type'] res = client.get('/csv') assert res.status_code == 200 assert 'text/csv' in res.headers['Content-Type'] res = client.get('/base64') assert res.status_code == 200 assert 'application/base64' in res.headers['Content-Type'] def test_no_content(client): res = client.get('/nocontent') assert res.status_code == 204 # assert res.headers.get('Content-Type') is None TODO client seems add it # assert res.headers.get('Content-Length') == 0 TODO client seems remove it res = client.get('/nocontent/custom') assert res.status_code == 202 # assert res.headers.get('Content-Type') is None TODO client seems add it # assert res.headers.get('Content-Length') == 0 TODO client seems remove it def test_no_content_error(client): res = client.get('/nocontent/error') assert res.status_code == 500 assert res.headers.get('header') == 'header' def test_on_format(client): res = client.get('/format?format=xml') assert res.status_code == 200 assert 'application/xml' in res.headers['Content-Type'] res = client.get('/format?format=yaml') assert res.status_code == 200 assert 'application/yaml' in res.headers['Content-Type'] res = client.get('/format') assert res.status_code == 200 assert 'application/json' in res.headers['Content-Type'] def test_on_accept(client): res = client.get('/onaccept', headers={'Accept': '*/*'}) assert res.status_code == 200 assert 'application/json' in res.headers['Content-Type'] res = client.get('/onaccept?item=11111', headers={'Accept': '*/*'}) assert res.status_code == 200 assert len(res.get_json()) == 0 res = client.get('/onaccept', headers={ 'Accept': 'application/xml;encoding=utf-8;q=0.8, text/csv;q=0.4' }) assert res.status_code == 200 assert 'application/xml' in res.headers['Content-Type'] res = client.get('/onaccept', headers={'Accept': 'text/csv'}) assert res.status_code == 200 assert 'text/csv' in res.headers['Content-Type'] res = client.get('/onaccept', headers={'Accept': 'custom/format'}) assert res.status_code == 406 def test_on_accept_only(client): res = client.get('/onacceptonly', headers={'Accept': 'application/xml'}) assert res.status_code == 200 assert 'application/xml' in res.headers['Content-Type'] res = client.get('/onacceptonly', headers={'Accept': 'application/json'}) assert res.status_code == 406 def test_custom_accept(client): res = client.get('/customaccept', headers={'Accept': 'application/xml'}) assert res.status_code == 206 assert 'application/xml' in res.headers['Content-Type'] assert res.headers['header'] == 'header' def test_template_or_json(client): res = client.get('/xhr') assert res.status_code == 200 assert 'application/json' in res.headers['Content-Type'] res = client.get('/xhr', headers={'X-Requested-With': 'XMLHttpRequest'}) assert res.status_code == 200 assert 'text/html' in res.headers['Content-Type'] def test_response_decorator(client): res = client.get('/decorator') assert res.status_code == 206 assert 'application/json' in res.headers['Content-Type'] assert res.headers['header'] == 'header' def test_rename_key(client): res = client.get('/rename-key') assert res.status_code == 200 assert 'application/json' in res.headers['Content-Type'] data = res.get_json() assert data['PIPPO'] == 1 assert data['PLUTO'] == 2 def test_notation(client): res = client.get('/notation') assert res.status_code == 200 data = res.get_json() assert Case.are_words(data[0]) assert Case.is_camel(data[1]) assert Case.is_kebab(data[2]) assert Case.is_snake(data[3]) def test_transformer(client): res = client.post('/json2xml', json={"pippo": 2, "pluto": 3}) assert res.status_code == 200 assert res.data == b'<?xml version="1.0" encoding="UTF-8" ?>' \ b'<root><pippo type="int">2</pippo><pluto type="int">3</pluto></root>' res = client.post('/json2csv', json=[{"pippo": 2, "pluto": 3}]) assert res.status_code == 200 assert res.data == b'"pippo";"pluto"\r\n"2";"3"\r\n' res = client.post('/json2yaml', json={"pippo": 2, "pluto": 3}) assert res.status_code == 200 assert res.data == b'pippo: 2\npluto: 3\n' def test_build_transform(client): res = client.get('/transform') assert res.status_code == 200 data = res.get_json()[0] assert data['pippo'] == '2' assert data['pluto'] == '3' def test_custom_mimetype(client): res = client.get('/custom/mimetype') assert res.status_code == 200 assert 'application/custom+json' in res.headers['Content-Type'] def test_jsonp(client): res = client.get('/custom/jsonp?callback=pippo') assert res.status_code == 200 assert 'application/javascript' in res.headers['Content-Type'] data = res.data.decode() assert data.startswith('pippo(') and data.endswith(');')
package com.arun.rx.rxsensors.Type; import android.hardware.Sensor; /** * Created by arunkumar on 19/11/18. */ public class Accelerometer implements SensorType { private Sensor sensor; private float[] data; private int accuracy; private long timestamp; private Accelerometer() { } public static Accelerometer instance() { return new Accelerometer(); } @Override public Accelerometer sensorEvents(Sensor sensor, float[] data, int accuracy, long timestamp) { this.sensor = sensor; this.data = data; this.accuracy = accuracy; this.timestamp = timestamp; return this; } @Override public int sensorType() { return Sensor.TYPE_ACCELEROMETER; } public float[] data() { return data; } public Sensor sensor() { return sensor; } public int accuracy() { return accuracy; } public long timestamp() { return timestamp; } @Override public String toString() { return data[0] + " - " + data[1] + " - " + data[2] + " - " + sensor + " - " + accuracy + " - " + timestamp; } }
Work in progress - The design and implementation of a pre-college computer science curriculum for underrepresented high school students In an effort to increase opportunities for under represented students to pursue 4-year Computer Science, Networking and Telecommunications degrees, the University of Pittsburgh initiated the Technology Leadership Institute (TLI). TLI is a six-week summer pre- college program for under represented high school students. Experience from the first year illustrates the need to create a culturally relevant curriculum that maps Computer Science to familiar experiences and interests. The limited classroom time with students over the summer forces the design of the curriculum to be concise and comprehensive, yet engaging. The curriculum includes basic and intermediate courses with content in Computer Programming, Web Design, Computer Systems, and Mathematics. This work in progress describes the preliminary design and implementation processes and focuses on what should be taught and to a lesser degree the pedagogy. TLI provides an avenue for students to learn about the vast scope of Computer Science, educational and career opportunities, in addition to obtaining transferable knowledge and skills. The curriculum is assessed by an external evaluation team that will collect, analyze and report student-identified competency, student perceptions and interests, as well as actual achievement levels. Results from this study will be useful in curriculum development of Computer Science courses at the K-12 level, as well as providing strategies to underrepresented groups to explore computer science.
<gh_stars>1-10 # TheSportsDB.com # TEAMS import uuid import json from datetime import datetime, date, time from Constants import * from StringUtils import * from Data.TheSportsDBDownloader import * spdb_abbreviation_corrections = { LEAGUE_MLB: { "WAS": "WSH", }, LEAGUE_NFL: { "OAK": "LV", } } def DownloadAllTeams(league): downloadedJson = DownloadAllTeamsForLeague(league) sportsDbTeams = json.loads(downloadedJson) teams = dict() for team in sportsDbTeams["teams"]: key = <KEY> abbrev = deunicode(team.get("strTeamShort")) fullName = deunicode(team["strTeam"]) city = None name = fullName aliases = [] if spdb_abbreviation_corrections.get(league): if spdb_abbreviation_corrections[league].get(abbrev): if league == LEAGUE_NFL and abbrev == "OAK": # Don't apply this to LV Raiders aliases. Apply it to Oakland Raiders abbreviation in history kwargs = { "key": key, "abbreviation": abbrev, "active": False, "name": "Raiders", "fullName": "<NAME>", "city": "Oakland", "SportsDBID": "%s.%s" % (str(team["idTeam"]), abbrev), } teams[key] = kwargs key = <KEY>() else: aliases.append(abbrev) abbrev = spdb_abbreviation_corrections[league][abbrev] if league == LEAGUE_NBA: if name == "Los Angeles Clippers": print("Correcting known data error in TheSportsDB.com data. Incorrect team name for %s -> LA Clippers" % (team.get("strTeam"))) aliases.append(name) fullName = "LA Clippers" city = "LA" name = "Clippers" elif league == LEAGUE_NFL: if name == "Washington": print("Correcting known data error in TheSportsDB.com data. Incorrect team name for %s -> Washington Football Team" % (team.get("strTeam"))) fullName = deunicode(team["strAlternate"]) city = name name = fullName[len(city):].strip() elif league == LEAGUE_NHL: if fullName == "<NAME>": aliases.append("TB") alternate = deunicode(team["strAlternate"]) if team.get("strAlternate") != abbrev else None if alternate: aliases += splitAndTrim(alternate) if not abbrev: if league == LEAGUE_NHL and deunicode(team.get("strAlternate")) == "Kraken ": print("Correcting known data error in TheSportsDB.com data. Missing abbreviation for %s -> SEA" % (team.get("strTeam"))) abbrev = "SEA" name = deunicode(team["strAlternate"].strip()) fullName = deunicode(team["strTeam"]) city = "Seattle" else: print("No abbbreviation for %s team %s (TheSportsDb.com)" % (team["strLeague"], team["strTeam"])) continue else: abbrev = abbrev.upper() kwargs = { "key": key, "abbreviation": abbrev, "active": True, "name": name, "fullName": fullName, "city": city, "SportsDBID": str(team["idTeam"]) } if aliases: kwargs["aliases"] = aliases assets = dict() if team.get("strTeamBadge"): assets.setdefault("badge", []) assets["badge"].append({"source": "thesportsdb", "url": deunicode(team["strTeamBadge"])}) if team.get("strTeamJersey"): assets.setdefault("jersey", []) assets["jersey"].append({"source": "thesportsdb", "url": deunicode(team["strTeamJersey"])}) if team.get("strTeamLogo"): assets.setdefault("wordmark", []) assets["wordmark"].append({"source": "thesportsdb", "url": deunicode(team["strTeamLogo"])}) for i in range(1, 5): if team.get("strTeamFanart%s" % i): assets.setdefault("fanArt", []) assets["fanArt"].append({"source": "thesportsdb", "url": deunicode(team["strTeamFanart%s" % i])}) if team.get("strBanner"): assets.setdefault("banner", []) assets["banner"].append({"source": "thesportsdb", "url": deunicode(team["strBanner"])}) if assets: kwargs["assets"] = assets teams[key] = kwargs return teams
/* * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.tools.idea.common.editor; import com.android.tools.adtui.actions.ZoomInAction; import com.android.tools.adtui.actions.ZoomLabelAction; import com.android.tools.adtui.actions.ZoomOutAction; import com.android.tools.adtui.actions.ZoomToFitAction; import com.android.tools.idea.common.surface.DesignSurface; import com.android.tools.idea.common.type.DesignerEditorFileType; import com.android.tools.idea.flags.StudioFlags; import com.intellij.openapi.Disposable; import com.intellij.openapi.actionSystem.ActionGroup; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.DefaultActionGroup; import java.util.ArrayList; import java.util.Collection; import java.util.List; import javax.swing.JComponent; import org.jetbrains.annotations.NotNull; public class ToolbarActionGroups implements Disposable { protected final DesignSurface mySurface; public ToolbarActionGroups(@NotNull DesignSurface surface) { mySurface = surface; } @NotNull protected ActionGroup getNorthGroup() { return ActionGroup.EMPTY_GROUP; } @NotNull protected ActionGroup getEastGroup() { return ActionGroup.EMPTY_GROUP; } @NotNull protected ActionGroup getNorthEastGroup() { return ActionGroup.EMPTY_GROUP; } @Override public void dispose() { } /** * Includes a trailing separator when adding a non-empty collection of {@link AnAction}s to a {@link DefaultActionGroup}. */ protected static void addActionsWithSeparator(@NotNull DefaultActionGroup group, @NotNull Collection<AnAction> actions) { if (!actions.isEmpty()) { group.addAll(actions); group.addSeparator(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.backup; import static java.util.concurrent.TimeUnit.MINUTES; import static org.apache.commons.io.FileUtils.listFiles; import static org.apache.commons.io.filefilter.DirectoryFileFilter.DIRECTORY; import static org.apache.geode.cache.RegionShortcut.PARTITION_PERSISTENT; import static org.apache.geode.test.awaitility.GeodeAwaitility.await; import static org.apache.geode.test.dunit.VM.getController; import static org.apache.geode.test.dunit.VM.getVM; import static org.assertj.core.api.Assertions.assertThat; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileFilter; import java.io.FileOutputStream; import java.io.IOException; import java.io.Serializable; import java.nio.file.Files; import java.util.Collection; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Pattern; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.io.filefilter.RegexFileFilter; import org.apache.logging.log4j.Logger; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.apache.geode.admin.internal.AdminDistributedSystemImpl; import org.apache.geode.cache.DiskStore; import org.apache.geode.cache.DiskStoreFactory; import org.apache.geode.cache.PartitionAttributesFactory; import org.apache.geode.cache.Region; import org.apache.geode.cache.RegionFactory; import org.apache.geode.cache.persistence.PersistentID; import org.apache.geode.distributed.DistributedMember; import org.apache.geode.distributed.internal.DistributionManager; import org.apache.geode.distributed.internal.MembershipListener; import org.apache.geode.distributed.internal.membership.InternalDistributedMember; import org.apache.geode.internal.ClassPathLoader; import org.apache.geode.internal.DeployedJar; import org.apache.geode.internal.cache.DiskStoreImpl; import org.apache.geode.internal.lang.SystemUtils; import org.apache.geode.internal.process.ProcessStreamReader; import org.apache.geode.internal.process.ProcessStreamReader.ReadingMode; import org.apache.geode.internal.util.TransformUtils; import org.apache.geode.logging.internal.log4j.api.LogService; import org.apache.geode.management.BackupStatus; import org.apache.geode.test.compiler.ClassBuilder; import org.apache.geode.test.dunit.VM; import org.apache.geode.test.dunit.rules.CacheRule; import org.apache.geode.test.dunit.rules.DistributedDiskDirRule; import org.apache.geode.test.dunit.rules.DistributedRule; import org.apache.geode.test.junit.rules.serializable.SerializableTemporaryFolder; import org.apache.geode.test.junit.rules.serializable.SerializableTestName; /** * Distributed tests for incremental backup. */ @SuppressWarnings("serial") public class IncrementalBackupDistributedTest implements Serializable { private static final Logger logger = LogService.getLogger(); private static final int DATA_INCREMENT = 10_000; private static final RegexFileFilter OPLOG_FILTER = new RegexFileFilter(".*\\.[kdc]rf$"); private static BackupMembershipListener backupMembershipListener = new BackupMembershipListener(); private int dataStart; private int dataEnd = dataStart + DATA_INCREMENT; private String diskStoreName1; private String diskStoreName2; private String regionName1; private String regionName2; private VM vm0; private VM vm1; private transient Process process; private transient ProcessStreamReader processReader; @Rule public DistributedRule distributedRule = new DistributedRule(); @Rule public CacheRule cacheRule = new CacheRule(); @Rule public DistributedDiskDirRule diskDirRule = new DistributedDiskDirRule(); @Rule public SerializableTemporaryFolder temporaryFolder = new SerializableTemporaryFolder(); @Rule public SerializableTestName testName = new SerializableTestName(); @Before public void setUp() throws Exception { vm0 = getVM(0); vm1 = getVM(1); String uniqueName = getClass().getSimpleName() + "_" + testName.getMethodName(); diskStoreName1 = uniqueName + "_diskStore-1"; diskStoreName2 = uniqueName + "_diskStore-2"; regionName1 = uniqueName + "_region-1"; regionName2 = uniqueName + "_region-2"; vm0.invoke(() -> createCache(diskDirRule.getDiskDirFor(vm0))); vm1.invoke(() -> createCache(diskDirRule.getDiskDirFor(vm1))); createCache(diskDirRule.getDiskDirFor(getController())); performPuts(); } @After public void tearDown() throws Exception { if (process != null && process.isAlive()) { process.destroyForcibly(); process.waitFor(2, MINUTES); } if (processReader != null && processReader.isRunning()) { processReader.stop(); } } /** * This tests the basic features of performBackupIncremental backup. This means that operation * logs that are present in both the performBackupBaseline and member's disk store should not be * copied during the performBackupIncremental backup. Additionally, the restore script should * reference and copy operation logs from the performBackupBaseline backup. */ @Test public void testIncrementalBackup() throws Exception { String memberId = vm1.invoke(() -> getModifiedMemberId()); File memberDir = diskDirRule.getDiskDirFor(vm1); // Find all of the member's oplogs in the disk directory (*.crf,*.krf,*.drf) Collection<File> memberOplogFiles = listFiles(memberDir, OPLOG_FILTER, DIRECTORY); assertThat(memberOplogFiles).isNotEmpty(); // Perform a full backup and wait for it to finish validateBackupStatus(vm1.invoke(() -> performBackup(getBaselinePath()))); vm1.invoke(() -> waitForBackup()); // Find all of the member's oplogs in the performBackupBaseline (*.crf,*.krf,*.drf) Collection<File> memberBaselineOplogs = listFiles(getBackupDirForMember(getBaselineDir(), memberId), OPLOG_FILTER, DIRECTORY); assertThat(memberBaselineOplogs).isNotEmpty(); List<String> memberBaselineOplogNames = new LinkedList<>(); TransformUtils.transform(memberBaselineOplogs, memberBaselineOplogNames, TransformUtils.fileNameTransformer); // Perform and performBackupIncremental backup and wait for it to finish performPuts(); // This preserves the new oplogs created by the performBackupBaseline backup validateBackupStatus( vm1.invoke(() -> performBackup(getIncrementalPath(), getBaselineBackupPath()))); vm1.invoke(() -> waitForBackup()); // Find all of the member's oplogs in the performBackupIncremental (*.crf,*.krf,*.drf) Collection<File> memberIncrementalOplogs = listFiles(getBackupDirForMember(getIncrementalDir(), memberId), OPLOG_FILTER, DIRECTORY); assertThat(memberIncrementalOplogs).isNotEmpty(); List<String> memberIncrementalOplogNames = new LinkedList<>(); TransformUtils.transform(memberIncrementalOplogs, memberIncrementalOplogNames, TransformUtils.fileNameTransformer); // Assert that the performBackupIncremental backup does not contain performBackupBaseline // operation logs that the member still has copies of. assertThat(memberIncrementalOplogNames).doesNotContainAnyElementsOf(memberBaselineOplogNames); // Perform a second performBackupIncremental and wait for it to finish. // Doing this preserves the new oplogs created by the performBackupIncremental backup performPuts(); validateBackupStatus( vm1.invoke(() -> performBackup(getIncremental2Path(), getIncrementalBackupPath()))); vm1.invoke(() -> waitForBackup()); Collection<File> memberIncremental2Oplogs = listFiles(getBackupDirForMember(getIncremental2Dir(), memberId), OPLOG_FILTER, DIRECTORY); assertThat(memberIncremental2Oplogs).isNotEmpty(); List<String> memberIncremental2OplogNames = new LinkedList<>(); TransformUtils.transform(memberIncremental2Oplogs, memberIncremental2OplogNames, TransformUtils.fileNameTransformer); // Assert that the second performBackupIncremental backup does not contain operation logs copied // into the performBackupBaseline. assertThat(memberIncremental2OplogNames).doesNotContainAnyElementsOf(memberBaselineOplogNames); // Also assert that the second performBackupIncremental backup does not contain operation logs // copied into the member's first performBackupIncremental backup. assertThat(memberIncremental2OplogNames) .doesNotContainAnyElementsOf(memberIncrementalOplogNames); // Shut down our member so we can perform a restore PersistentID id = vm1.invoke(() -> getPersistentID(diskStoreName1)); vm1.invoke(() -> cacheRule.getCache().close()); // Execute the restore performRestore(new File(id.getDirectory()), getBackupDirForMember(getIncremental2Dir(), memberId)); // Collect all of the restored operation logs. Collection<File> restoredOplogs = listFiles(new File(id.getDirectory()), OPLOG_FILTER, DIRECTORY); assertThat(restoredOplogs).isNotEmpty(); List<String> restoredOplogNames = new LinkedList<>(); TransformUtils.transform(restoredOplogs, restoredOplogNames, TransformUtils.fileNameTransformer); // Assert that performBackupBaseline operation logs have been copied over to the member's disk // directory. assertThat(restoredOplogNames).containsAll(memberBaselineOplogNames); // Assert that the performBackupIncremental operation logs have been copied over to the member's // disk directory. assertThat(restoredOplogNames).containsAll(memberIncrementalOplogNames); // Assert that the second performBackupIncremental operation logs have been copied over to the // member's disk directory. assertThat(restoredOplogNames).containsAll(memberIncremental2OplogNames); // Reconnect the member. vm1.invoke(() -> createCache(diskDirRule.getDiskDirFor(vm1))); } /** * Successful if a member performs a full backup when its backup data is not present in the * performBackupBaseline (for whatever reason). This also tests what happens when a member is * offline during the performBackupBaseline backup. * * <p> * The test is regarded as successful when all of the missing members oplog files are backed up * during an performBackupIncremental backup. This means that the member performed a full backup * because its oplogs were missing in the performBackupBaseline. */ @Test public void testMissingMemberInBaseline() { // Simulate the missing member by forcing a persistent member to go offline. PersistentID missingMember = vm0.invoke(() -> getPersistentID(diskStoreName1)); vm1.invoke(() -> installNewBackupMembershipListener()); vm0.invoke(() -> { cacheRule.getCache().close(); }); await() .until(() -> vm1.invoke(() -> getMissingPersistentMembers().contains(missingMember) && backupMembershipListener.hasMemberDeparted())); // Perform performBackupBaseline and make sure that list of offline disk stores contains our // missing member. BackupStatus baselineStatus = vm1.invoke(() -> performBackup(getBaselinePath())); validateBackupStatus(baselineStatus); assertThat(baselineStatus.getOfflineDiskStores()).isNotNull().hasSize(2); // Find all of the member's oplogs in the missing member's diskstore directory structure // (*.crf,*.krf,*.drf) Collection<File> missingMemberOplogFiles = listFiles(new File(missingMember.getDirectory()), OPLOG_FILTER, DIRECTORY); assertThat(missingMemberOplogFiles).isNotEmpty(); // Restart our missing member and make sure it is back online and part of the cluster vm0.invoke(() -> createCache(diskDirRule.getDiskDirFor(vm0))); // After reconnecting make sure the other members agree that the missing member is back online. await() .untilAsserted( () -> assertThat(getMissingPersistentMembers()).doesNotContain(missingMember)); // Perform performBackupIncremental and make sure we have no offline disk stores. BackupStatus incrementalStatus = vm1.invoke(() -> performBackup(getIncrementalPath(), getBaselineBackupPath())); validateBackupStatus(incrementalStatus); assertThat(incrementalStatus.getOfflineDiskStores()).isNotNull().isEmpty(); // Get the missing member's member id which is different from the PersistentID String memberId = vm0.invoke(() -> getModifiedMemberId()); // Get list of backed up oplog files in the performBackupIncremental backup for the missing // member File incrementalMemberDir = getBackupDirForMember(getIncrementalDir(), memberId); Collection<File> backupOplogFiles = listFiles(incrementalMemberDir, OPLOG_FILTER, DIRECTORY); assertThat(backupOplogFiles).isNotEmpty(); // Transform missing member oplogs to just their file names. List<String> missingMemberOplogNames = new LinkedList<>(); TransformUtils.transform(missingMemberOplogFiles, missingMemberOplogNames, TransformUtils.fileNameTransformer); // Transform missing member's performBackupIncremental backup oplogs to just their file names. List<String> backupOplogNames = new LinkedList<>(); TransformUtils.transform(backupOplogFiles, backupOplogNames, TransformUtils.fileNameTransformer); // Make sure that the performBackupIncremental backup for the missing member contains all of the // operation logs for that member. This proves that a full backup was performed for that member. assertThat(backupOplogNames).containsAll(missingMemberOplogNames); } /** * Successful if a member performs a full backup if their backup is marked as incomplete in the * performBackupBaseline. */ @Test public void testIncompleteInBaseline() { // Get the member ID for VM 1 and perform a performBackupBaseline. String memberId = vm1.invoke(() -> getModifiedMemberId()); validateBackupStatus(vm1.invoke(() -> performBackup(getBaselinePath()))); // Find all of the member's oplogs in the performBackupBaseline (*.crf,*.krf,*.drf) Collection<File> memberBaselineOplogs = listFiles(getBackupDirForMember(getBaselineDir(), memberId), OPLOG_FILTER, DIRECTORY); assertThat(memberBaselineOplogs).isNotEmpty(); List<String> memberBaselineOplogNames = new LinkedList<>(); TransformUtils.transform(memberBaselineOplogs, memberBaselineOplogNames, TransformUtils.fileNameTransformer); vm1.invoke(() -> { File backupDir = getBackupDirForMember(getBaselineDir(), getModifiedMemberId()); assertThat(backupDir).exists(); // Mark the performBackupBaseline as incomplete (even though it really isn't) File incomplete = new File(backupDir, BackupWriter.INCOMPLETE_BACKUP_FILE); assertThat(incomplete.createNewFile()).isTrue(); }); // Do an performBackupIncremental. It should discover that the performBackupBaseline is // incomplete and backup all of the operation logs that are in the performBackupBaseline. validateBackupStatus( vm1.invoke(() -> performBackup(getIncrementalPath(), getBaselineBackupPath()))); // Find all of the member's oplogs in the performBackupIncremental (*.crf,*.krf,*.drf) Collection<File> memberIncrementalOplogs = listFiles(getBackupDirForMember(getIncrementalDir(), memberId), OPLOG_FILTER, DIRECTORY); assertThat(memberIncrementalOplogs).isNotEmpty(); List<String> memberIncrementalOplogNames = new LinkedList<>(); TransformUtils.transform(memberIncrementalOplogs, memberIncrementalOplogNames, TransformUtils.fileNameTransformer); // Assert that all of the performBackupBaseline operation logs are in the // performBackupIncremental backup. If so, then the incomplete marker was discovered in the // performBackupBaseline by the performBackupIncremental backup process. assertThat(memberIncrementalOplogNames).containsAll(memberBaselineOplogNames); } /** * Successful if all members perform a full backup when they share the performBackupBaseline * directory and it is missing. */ @Test public void testMissingBaseline() throws Exception { // Get the member ID for VM 1 and perform a performBackupBaseline. String memberId = vm1.invoke(() -> getModifiedMemberId()); validateBackupStatus(vm1.invoke(() -> performBackup(getBaselinePath()))); // Find all of the member's oplogs in the performBackupBaseline (*.crf,*.krf,*.drf) Collection<File> memberBaselineOplogs = listFiles(getBackupDirForMember(getBaselineDir(), memberId), OPLOG_FILTER, DIRECTORY); assertThat(memberBaselineOplogs).isNotEmpty(); List<String> memberBaselineOplogNames = new LinkedList<>(); TransformUtils.transform(memberBaselineOplogs, memberBaselineOplogNames, TransformUtils.fileNameTransformer); // Do an performBackupIncremental after deleting the performBackupBaseline. It should discover // that the performBackupBaseline is gone and backup all of the operation logs that are in the // performBackupBaseline. FileUtils.deleteDirectory(getBaselineDir()); // Custom performBackupIncremental backup callable that retrieves the current // performBackupBaseline before deletion. vm1.invoke(() -> { new BackupOperation(cacheRule.getSystem().getDistributionManager(), cacheRule.getCache()) .backupAllMembers(getIncrementalPath(), getBaselinePath()); }); // Find all of the member's oplogs in the performBackupIncremental (*.crf,*.krf,*.drf) Collection<File> memberIncrementalOplogs = listFiles(getBackupDirForMember(getIncrementalDir(), memberId), OPLOG_FILTER, DIRECTORY); assertThat(memberIncrementalOplogs).isNotEmpty(); List<String> memberIncrementalOplogNames = new LinkedList<>(); TransformUtils.transform(memberIncrementalOplogs, memberIncrementalOplogNames, TransformUtils.fileNameTransformer); // Assert that all of the performBackupBaseline operation logs are in the // performBackupIncremental backup. If so, then the missing performBackupBaseline was discovered // by the performBackupIncremental backup process. assertThat(memberIncrementalOplogNames).containsAll(memberBaselineOplogNames); } /** * Verifies that a user deployed jar file is included as part of the backup. */ @Test public void testBackupUserDeployedJarFiles() throws Exception { String jarName = "BackupJarDeploymentDUnit"; byte[] classBytes = new ClassBuilder().createJarFromName(jarName); File jarFile = temporaryFolder.newFile(); IOUtils.copyLarge(new ByteArrayInputStream(classBytes), new FileOutputStream(jarFile)); // Deploy a "dummy" jar to the VM. File deployedJarFile = vm0.invoke(() -> { DeployedJar deployedJar = ClassPathLoader.getLatest().getJarDeployer().deploy(jarName, jarFile); return deployedJar.getFile(); }); assertThat(deployedJarFile).exists(); // Perform backup. Make sure it is successful. validateBackupStatus(vm0.invoke(() -> performBackup(getBaselinePath()))); // Make sure the user deployed jar is part of the backup. Collection<File> memberDeployedJarFiles = listFiles(getBackupDirForMember(getBaselineDir(), vm0.invoke(() -> getModifiedMemberId())), new RegexFileFilter(".*" + jarName + ".*"), DIRECTORY); assertThat(memberDeployedJarFiles).isNotEmpty(); // Shut down our member so we can perform a restore PersistentID id = vm0.invoke(() -> getPersistentID(diskStoreName1)); vm0.invoke(() -> cacheRule.getCache().close()); // Get the VM's user directory. String vmDir = vm0.invoke(() -> System.getProperty("user.dir")); File backupDir = getBackupDirForMember(getBaselineDir(), vm0.invoke(() -> getModifiedMemberId())); // Cleanup "dummy" jar from file system. deleteMatching(new File("."), Pattern.compile('^' + jarName + ".*#\\d++$")); // Execute the restore performRestore(new File(id.getDirectory()), backupDir); // Make sure the user deployed jar is part of the restore. Collection<File> restoredJars = listFiles(new File(vmDir), new RegexFileFilter(".*" + jarName + ".*"), DIRECTORY); assertThat(restoredJars).isNotEmpty(); List<String> restoredJarNames = new LinkedList<>(); TransformUtils.transform(memberDeployedJarFiles, restoredJarNames, TransformUtils.fileNameTransformer); for (String name : restoredJarNames) { assertThat(name).contains(jarName); } // Restart the member vm0.invoke(() -> createCache(diskDirRule.getDiskDirFor(vm0))); // Remove the "dummy" jar from the VM. vm0.invoke(() -> { for (DeployedJar jarClassLoader : ClassPathLoader.getLatest().getJarDeployer() .findDeployedJars()) { if (jarClassLoader.getJarName().startsWith(jarName)) { ClassPathLoader.getLatest().getJarDeployer().undeploy(jarClassLoader.getJarName()); } } }); // Cleanup "dummy" jar from file system. deleteMatching(new File(vmDir), Pattern.compile('^' + jarName + ".*#\\d++$")); } private void createCache(final File diskDir) { cacheRule.getOrCreateCache(); createDiskStore(diskStoreName1, diskDir); createDiskStore(diskStoreName2, diskDir); createRegion(regionName1, diskStoreName1); createRegion(regionName2, diskStoreName2); } private void createDiskStore(final String diskStoreName, final File diskDir) { DiskStoreFactory diskStoreFactory = cacheRule.getCache().createDiskStoreFactory(); diskStoreFactory.setDiskDirs(new File[] {diskDir}); diskStoreFactory.create(diskStoreName); } private void createRegion(final String regionName, final String diskStoreName) { PartitionAttributesFactory<Integer, String> partitionAttributesFactory = new PartitionAttributesFactory<>(); partitionAttributesFactory.setTotalNumBuckets(5); RegionFactory<Integer, String> regionFactory = cacheRule.getCache().createRegionFactory(PARTITION_PERSISTENT); regionFactory.setDiskStoreName(diskStoreName); regionFactory.setPartitionAttributes(partitionAttributesFactory.create()); regionFactory.create(regionName); } private File getBaselineDir() { File dir = new File(temporaryFolder.getRoot(), "baseline"); if (!dir.exists()) { dir.mkdirs(); } return dir; } private String getBaselinePath() { return getBaselineDir().getAbsolutePath(); } private File getIncrementalDir() { File dir = new File(temporaryFolder.getRoot(), "incremental"); if (!dir.exists()) { dir.mkdirs(); } return dir; } private String getIncrementalPath() { return getIncrementalDir().getAbsolutePath(); } private File getIncremental2Dir() { File dir = new File(temporaryFolder.getRoot(), "incremental2"); if (!dir.exists()) { dir.mkdirs(); } return dir; } private String getIncremental2Path() { return getIncremental2Dir().getAbsolutePath(); } private Set<PersistentID> getMissingPersistentMembers() { return AdminDistributedSystemImpl .getMissingPersistentMembers(cacheRule.getCache().getDistributionManager()); } private BackupStatus performBackup(final String targetDirPath) { return performBackup(targetDirPath, null); } private BackupStatus performBackup(final String targetDirPath, final String baselineDirPath) { return new BackupOperation(cacheRule.getCache().getDistributionManager(), cacheRule.getCache()) .backupAllMembers(targetDirPath, baselineDirPath); } private String getModifiedMemberId() { return cacheRule.getCache().getDistributedSystem().getDistributedMember().toString() .replaceAll("[^\\w]+", "_"); } private PersistentID getPersistentID(final String diskStoreName) { for (DiskStore diskStore : cacheRule.getCache().listDiskStores()) { if (diskStore.getName().equals(diskStoreName)) { return ((DiskStoreImpl) diskStore).getPersistentID(); } } throw new Error("Failed to find disk store " + diskStoreName); } private void waitForBackup() { Collection<DiskStore> backupInProgress = cacheRule.getCache().listDiskStores(); List<DiskStoreImpl> backupCompleteList = new LinkedList<>(); while (backupCompleteList.size() < backupInProgress.size()) { for (DiskStore diskStore : backupInProgress) { if (((DiskStoreImpl) diskStore).getInProgressBackup() == null && !backupCompleteList.contains(diskStore)) { backupCompleteList.add((DiskStoreImpl) diskStore); } } } } private String getBaselineBackupPath() { File[] dirs = getBaselineDir().listFiles((FileFilter) DIRECTORY); assertThat(dirs).hasSize(1); return dirs[0].getAbsolutePath(); } private String getIncrementalBackupPath() { File[] dirs = getIncrementalDir().listFiles((FileFilter) DIRECTORY); assertThat(dirs).hasSize(1); return dirs[0].getAbsolutePath(); } private File getBackupDirForMember(final File rootDir, final String memberId) { File[] dateDirs = rootDir.listFiles((FileFilter) DIRECTORY); assertThat(dateDirs).hasSize(1); File[] memberDirs = dateDirs[0].listFiles(file -> file.isDirectory() && file.getName().contains(memberId)); assertThat(memberDirs).hasSize(1); return memberDirs[0]; } private ReadingMode getReadingMode() { return SystemUtils.isWindows() ? ReadingMode.NON_BLOCKING : ReadingMode.BLOCKING; } private void execute(final String command) throws IOException, InterruptedException { process = new ProcessBuilder(command).redirectErrorStream(true).start(); processReader = new ProcessStreamReader.Builder(process).inputStream(process.getInputStream()) .inputListener(line -> logger.info("OUTPUT: {}", line)) .readingMode(getReadingMode()).continueReadingMillis(2 * 1000).build().start(); assertThat(process.waitFor(5, MINUTES)).isTrue(); assertThat(process.exitValue()).isEqualTo(0); } private void performRestore(final File memberDir, final File backupDir) throws IOException, InterruptedException { // The restore script will not restore if there is an if file in the copy to directory. Remove // these files first. Collection<File> ifFiles = listFiles(memberDir, new RegexFileFilter(".*\\.if$"), DIRECTORY); for (File file : ifFiles) { assertThat(file.delete()).isTrue(); } // Remove all operation logs. Collection<File> oplogs = listFiles(memberDir, OPLOG_FILTER, DIRECTORY); for (File file : oplogs) { assertThat(file.delete()).isTrue(); } // Get a hold of the restore script and make sure it is there. File restoreScript = new File(backupDir, "restore.sh"); if (!restoreScript.exists()) { restoreScript = new File(backupDir, "restore.bat"); } assertThat(restoreScript).exists(); execute(restoreScript.getAbsolutePath()); } private void performPuts() { Region<Integer, String> region = cacheRule.getCache().getRegion(regionName1); // Fill our region data for (int i = dataStart; i < dataEnd; ++i) { region.put(i, Integer.toString(i)); } Region<Integer, String> barRegion = cacheRule.getCache().getRegion(regionName2); // Fill our region data for (int i = dataStart; i < dataEnd; ++i) { barRegion.put(i, Integer.toString(i)); } dataStart += DATA_INCREMENT; dataEnd += DATA_INCREMENT; } private void validateBackupStatus(final BackupStatus backupStatus) { Map<DistributedMember, Set<PersistentID>> backupMap = backupStatus.getBackedUpDiskStores(); assertThat(backupMap).isNotEmpty(); for (DistributedMember member : backupMap.keySet()) { assertThat(backupMap.get(member)).isNotEmpty(); for (PersistentID id : backupMap.get(member)) { assertThat(id.getHost()).isNotNull(); assertThat(id.getUUID()).isNotNull(); assertThat(id.getDirectory()).isNotNull(); } } } private void deleteMatching(final File dir, final Pattern pattern) throws IOException { Collection<File> files = listFiles(dir, new RegexFileFilter(pattern), DIRECTORY); for (File file : files) { Files.delete(file.toPath()); } } public static class BackupMembershipListener implements MembershipListener { private boolean memberDeparted = false; @Override public void memberDeparted(DistributionManager distributionManager, InternalDistributedMember id, boolean crashed) { memberDeparted = true; } public boolean hasMemberDeparted() { return memberDeparted; } } public void installNewBackupMembershipListener() { if (backupMembershipListener != null) { cacheRule.getCache().getDistributionManager() .removeMembershipListener(backupMembershipListener); } backupMembershipListener = new BackupMembershipListener(); cacheRule.getCache().getDistributionManager().addMembershipListener(backupMembershipListener); } }
<reponame>bharath412/twilio package ai.api.web; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import ai.api.AIConfiguration; import ai.api.AIDataService; import ai.api.AIServiceContext; import ai.api.AIServiceContextBuilder; import ai.api.AIServiceException; import ai.api.model.AIRequest; import ai.api.model.AIResponse; /** * Basic AI service servlet. * * Must be initialized with {@link AIServiceServlet#PARAM_API_AI_KEY} parameter. * Set your <a href="https://docs.api.ai/docs/authentication#obtaining-access-tokens"> * api.ai access token</a> as a value. */ public abstract class AIServiceServlet extends HttpServlet { private static final long serialVersionUID = 1L; /** * Api.ai access token parameter name */ public static final String PARAM_API_AI_KEY = "60f688eff6f04933ad8993839ef0b745"; private AIDataService aiDataService; /** * @see HttpServlet#init(ServletConfig config) */ @Override public void init(ServletConfig config) throws ServletException { super.init(config); AIConfiguration aiConfig = new AIConfiguration(config.getInitParameter(PARAM_API_AI_KEY)); aiDataService = new AIDataService(aiConfig); } /** * Perform request to AI data service * @param aiRequest Request object. Cannot be <code>null</code>. * @param serviceContext Service context. If <code>null</code> then default context will be used. * @return Response object * @throws AIServiceException Thrown on server access error */ protected final AIResponse request(AIRequest aiRequest, AIServiceContext serviceContext) throws AIServiceException { return aiDataService.request(aiRequest, serviceContext); } /** * Perform request to AI data service * @param query Request plain text string. Cannot be <code>null</code>. * @param serviceContext Service context. If <code>null</code> then default context will be used. * @return Response object * @throws AIServiceException Thrown on server access error */ protected final AIResponse request(String query, AIServiceContext serviceContext) throws AIServiceException { return request(new AIRequest(query), serviceContext); } /** * Perform request to AI data service * @param aiRequest Request object. Cannot be <code>null</code>. * @param session Session object. If <code>null</code> then default context will be used. * @return Response object * @throws AIServiceException Thrown on server access error */ protected final AIResponse request(AIRequest aiRequest, HttpSession session) throws AIServiceException { return request(aiRequest, (session != null) ? AIServiceContextBuilder.buildFromSessionId(session.getId()) : null); } /** * Perform request to AI data service * @param query Request plain text string. Cannot be <code>null</code>. * @param session Session object. If <code>null</code> then default context will be used. * @return Response object * @throws AIServiceException Thrown on server access error */ protected AIResponse request(String query, HttpSession session) throws AIServiceException { return request(new AIRequest(query), (session != null) ? AIServiceContextBuilder.buildFromSessionId(session.getId()) : null); } /** * Perform request to AI data service * @param aiRequest Request object. Cannot be <code>null</code>. * @param sessionId Session string id. If <code>null</code> then default context will be used. * @return Response object * @throws AIServiceException Thrown on server access error */ protected final AIResponse request(AIRequest aiRequest, String sessionId) throws AIServiceException { return request(aiRequest, (sessionId != null) ? AIServiceContextBuilder.buildFromSessionId(sessionId) : null); } /** * Perform request to AI data service * @param query Request plain text string. Cannot be <code>null</code>. * @param sessionId Session string id. If <code>null</code> then default context will be used. * @return Response object * @throws AIServiceException Thrown on server access error */ protected final AIResponse request(String query, String sessionId) throws AIServiceException { return request(new AIRequest(query), (sessionId != null) ? AIServiceContextBuilder.buildFromSessionId(sessionId) : null); } }
#include <bits/stdc++.h> using namespace std; typedef long long int ll; int main() { ios_base::sync_with_stdio(false); cin.tie(NULL); ll t; cin>>t; while(t--) { string s; cin>>s; ll n=s.length(); ll i=0; int flg=0; ll a1=0,b1=0; int fl1,fl2; fl1=0; fl2=0; int flg3=0; while(i<n) { if(s[i]=='1'&&fl1==0&&flg==0) { a1=i; fl1=1; } else if(s[i]=='1'&&fl1==1&&(i-a1)>1&&flg==0) { a1=i; } else if(s[i]=='1'&&fl1==1&&flg==0&&(i-a1)<=1) { a1=i; flg=1; } else if(s[i]=='0'&&flg==1&&fl2==0) { b1=i; fl2=1; } else if(s[i]=='0'&&flg==1&&fl2==1&&(i-b1)>1) { b1=i; } else if(s[i]=='0'&&flg==1&&fl2==1&&(i-b1)<=1) { flg3=1; break; } i++; } //cout<<fl1<<" "<<fl2<<" "<<flg3; if(flg3==0) cout<<"YES"<<endl; else cout<<"NO"<<endl; } }
Share this article: A judge Thursday sided with Orange County Sheriff Sandra Hutchens in a lawsuit accusing her of laying off top managers to bring in her colleagues from a former job in the Los Angeles County Sheriff’s Department. Hutchens testified in the trial that she did not want to lay off the managers, but budget constraints forced her hand. Orange County Superior Court Judge Frederick Aguirre wrote in his ruling that the plaintiffs, “like the other 55 department employees, were part of a mass layoff due to budgetary reasons.” The judge also ruled that the defendants did not have a right to have administrative hearings as required by the Public Safety Officers Procedural Bill of Rights Act. “As no ‘punitive’ action was taken by Hutchens and the defendants, pre- termination hearings under POBRA and/or due process principles were not required,” the judge wrote. “The court notes that Hutchens could not just simply impose, for example, an across-the-board 10 percent reduction of the department’s employees,” Aguirre wrote. The judge said Hutchens was lawfully required “to provide certain core safety services to the public 24 hours per day seven days per week.” Hutchens also could not “breach” contracts with several cities for which the sheriff provides services, the judge noted. Patricia Bates, a supervisor on the board in May 2009, wrote the sheriff a letter advising her not to reduce services to the contract cities or boost fees to the clients. Bates advised Hutchens to cut from administrative positions. Hutchens testified last July in the trial that she had no issues with the work of former Assistant Sheriffs Jack Anderson and John Davis and ex- Captains Brian Cossairt, Deana Berquist and Robert Eason, who sought millions in damages as well as their jobs back. She testified that she had to let them to go to avoid laying off deputies or investigators. “I needed everybody who was there, but I had to make some difficult choices,” Hutchens testified. The former sheriff’s officials claimed Hutchens fired them to bring in colleagues from her prior job in the Los Angeles County Sheriff’s Department when she was appointed sheriff to replace Mike Carona, who was convicted of witness tampering. Anderson was the interim sheriff until the board of supervisors appointed Hutchens. A message to the plaintiffs’ attorney, Joel Baruch, was not immediately returned after business hours. Hutchens testified last July about asking the Orange County Board of Supervisors multiple times to restore $7.3 million in cuts to her department in 2008 when the county was struggling with deficits. The sheriff said she felt that if she could get the $7.3 million she could avoid making layoffs of lower- ranking deputies and investigators. Hutchens said she was told that any cuts she made among deputies and investigators below the rank of sergeant would have to be done by seniority. That wasn’t the case among the “command staff” from the ranks of lieutenant and higher. “My purpose was to get (the $7.3 million) reinstated so I wouldn’t have to do any layoffs,” Hutchens testified. When the supervisors refused to give her what she wanted, Hutchens turned her focus to a plan to lay off two assistant sheriffs and six captains. She combined various departments and had some lieutenants pick up supervisory roles as she herself personally oversaw the coroner’s division and the crime lab to save money. Baruch asked Hutchens during the trial if she was swayed by political pressure from unions representing deputies and non-sworn civilian employees, but the sheriff said the association for the deputies was backing another candidate for sheriff. Baruch also quizzed Hutchens on her promotion of multiple sergeants to lieutenant just as she was considering the plan to lay off the captains and assistant sheriffs. Baruch questioned why Hutchens didn’t forego the promotions and let the laid-off captains take demotions to lieutenant to stay on the payroll. “Those positions were in the budget and had money attached to them — that’s what you’re missing,” Hutchens replied. The sheriff said she doubted she could close her deficit by letting captains and assistant sheriffs take demotions. Hutchens also defended her hiring of former co-workers from the Los Angeles County Sheriff’s Department to senior positions. She said they were needed to fill gaps in various divisions of her department such as risk management and how to prepare for large-scale emergencies. Hutchens said the risk-management division especially needed her attention in the wake of the fatal beating of an inmate in Orange County Jail in 2006. –City News Service Judge sides with OC sheriff over layoffs decision was last modified: by >> Want to read more stories like this? Get our Free Daily Newsletters Here! Follow us:
def resNumApariciones(self,apar): self.__numapar -= apar
# -*- coding: utf-8 -*- """ This file contains functions to display the UI for choosing the input files """ import Tkinter as tk import tkFileDialog import tkMessageBox import sys fileCorrige = "" fileCopies = "" fileEleves = "" # dialog window to open a file def openFile(string, types=[('pdf files', '.pdf')]): filepath = tkFileDialog.askopenfilename(title=string,filetypes=types) return(filepath) # opens a dialog window to open a pdf file and saves the result in fileCorrige def openCorrige(root, strCorrige, buttonValidate): global fileCorrige temp = openFile('Pdf du corrigé') if temp != "": fileCorrige=temp strCorrige.set(fileCorrige.split('/')[-1]) root.update_idletasks() if fileCorrige != "" and fileCopies != "": buttonValidate.config(state=tk.NORMAL) else: buttonValidate.config(state=tk.DISABLED) # opens a dialog window to open a pdf file and saves the result in fileCopies def openCopies(root, strCopies, buttonValidate): global fileCopies temp = openFile('Pdf des copies') if temp != "": fileCopies=temp strCopies.set(fileCopies.split('/')[-1]) root.update_idletasks() if fileCorrige != "" and fileCopies != "": buttonValidate.config(state=tk.NORMAL) else: buttonValidate.config(state=tk.DISABLED) # opens a dialog window to open a txt file and saves the result in fileEleves def openEleves(root, strEleves): global fileEleves temp = openFile('Liste des élèves', [('text files', '.txt')]) if temp != "": fileEleves=temp strEleves.set(fileEleves.split('/')[-1]) root.update_idletasks() # start the program def validate(root): if fileCorrige != "" and fileCopies != "": root.destroy() # displays warning if the user wants to close window def on_closing(root): if tkMessageBox.askokcancel("Quitter", "Voulez-vous quitter ?"): root.destroy() sys.exit(0) # main function : displays the UI for choosing the input files def interface(): root = tk.Tk() root.title('Choix des pdf') root.minsize(300,10) root.iconbitmap("dev\logo.ico") # IMT logo imageEx = tk.PhotoImage(file = 'Dev/IMTLD_RVB_Baseline.gif') panelA = tk.Label(image=imageEx) panelA.image = imageEx panelA.grid(row=1, column=1) labelTitle = tk.Label(root, width=52, height=5, font=("Arial Bold", 13), text="Correction automatique de grilles de TOEIC", takefocus=0, justify=tk.LEFT, bg="#FFFFFF") labelTitle.grid(row=1, column=2) strCorrige = tk.StringVar() strCorrige.set('Aucun') strCopies= tk.StringVar() strCopies.set('Aucun') strEleves= tk.StringVar() strEleves.set('Aucun') tk.Button(root, text ='Pdf du corrigé', width=20, height=1, command=lambda:openCorrige(root, strCorrige, buttonValidate)).grid(row=2, column=1, padx=5, pady=5) l1 = tk.Label(root, textvariable =strCorrige) l1.grid(row=2, column=2, sticky='W', padx=10) tk.Button(root, text ='Pdf des copies', width=20, height=1, command=lambda:openCopies(root, strCopies, buttonValidate)).grid(row=3, column=1, padx=5, pady=5) l2 = tk.Label(root, textvariable =strCopies) l2.grid(row=3, column=2, sticky='W', padx=10) tk.Button(root, text ='Liste des élèves (optionnel)', width=20, height=1, command=lambda:openEleves(root, strEleves)).grid(row=4, column=1, padx=5, pady=5) l3 = tk.Label(root, textvariable =strEleves) l3.grid(row=4, column=2, sticky='W', padx=10) buttonValidate = tk.Button(root, text ='Valider', width=20, height=1, command=lambda:validate(root)) buttonValidate.grid(row=5, column=1, columnspan=3, padx=5, pady=5) buttonValidate.config(state=tk.DISABLED) # displays warning if the user wants to close window root.protocol("WM_DELETE_WINDOW", lambda:on_closing(root)) root.mainloop() return(fileCorrige,fileCopies, fileEleves) # for testing only if __name__ == '__main__': a=interface() print(a)
Traumatic Dental Injuries: Clinical Case Presentation and a 10-Year Epidemiological Investigation in an Italian Dental Emergency Service Traumatic dental injuries (TDIs) are very common in the world population, and international literature reports several studies which helped in the definition of international guidelines. The aim of this study is to present two clinical cases of TDI and to investigate epidemiological and etiological aspects of TDIs in patients treated in Modena, Italy, between January 2010 and December 2020. The presented case reports are two explicative clinical cases of successful TDI management with a long-term follow-up. The epidemiological analysis was performed on patients who visited the Dental Emergency Service of the Dentistry and Oral-Maxillo-Facial Surgery Unit of Modena (Italy) over a period of 10 years. Data relating to age, gender, type of trauma, and place of accident were collected. Five-hundred-sixty-five TDIs that occurred to patients from 1 to 68 years old were reported, with a total of 860 injured teeth. The peak age at which TDIs are most represented varies between 2 and 3 years old, and they occurred frequently from 1 up to 7 years old. 57.5% were male, while 42.5% were female. The most common trauma resulted to be the uncomplicated crown fracture (20%), immediately followed by lateral luxation (19%), intrusive luxation (18%), avulsion (17%), and complicated crown fracture (15%). TDIs occurred at home in 44% of cases. The need for more prevention training must be highlighted, due to the fact that many TDIs occur at home and in a preschool age. Introduction Traumatic dental injuries (TDIs) are common in the worldwide population. Although the oral cavity represents a small component of the human body, TDIs represent 5% of all health injuries and up to 17% in pediatric patients. A recent study shows that more than one billion living people have had TDIs. Scientific literature shows a great variability regarding the incidence and prevalence of TDIs in the world population. Although the International Association of Dental Traumatology (IADT) had codified precise guidelines, this variability is related to several reasons, such as the diversity in TDI classification methods and parameter recording, and the different cultural and social contexts in which the various studies had been performed. TDIs mainly affect the pediatric population, with a high percentage of preschool children and adolescents, and hardly concern elderly patients, if not in a small percentage. The most frequent TDIs in patients with deciduous teeth are periodontal injuries and luxation, while hard tissue injuries and consequent crown fractures are more specific and related to patients with permanent dentition. Trauma such as avulsion and complicated fractures determine functional and esthetic issues that could affect social relationships; numerous studies have shown how TDIs could lead to relationship difficulties and therefore be the reason for personal, domestic, and social issues. The aim of this article is to present two explicative clinical cases of TDI management and to investigate TDI frequency, patterns, and causes in patients treated in Modena, Italy, between January 2010 and December 2020. Case Presentation Making use of a digital clinical chart archiving system, TDI cases that occurred at the Dental Emergency Service of the Dental Clinic of Modena University, between 1 January 2010 and 31 December 2020, were selected. All patients have been treated according to the protocols established by the IADT guidelines, enhanced with photographic documentation, radiographic examination, pulp status evaluation, and instrumental investigations. According to the principle of minimum invasiveness and with the aim of optimizing the oral health-related quality of life, individualized treatment plans for each patient were implemented in line with scientific evidence. All the anamnestic questionnaires and the TDI-related information were collected, and the following data were obtained: demographic data (gender and age); causes of trauma (sports injuries, accidents at work, collisions, violence and abuse, unintentional falls, and road accidents); location of the trauma (home, school, leisure activities, sports environment, and work); type of trauma, classified according to IADT classification ; and teeth involved (permanent or deciduous, maxillary or mandibular, and anterior or posterior). All data were registered into Microsoft Excel version 14.1.0 for Macintosh. Among all selected TDIs, the management of two explicative and challenging clinical cases is described, following CARE reporting guidelines for case reports. This tooth had been kept in an extraoral dry environment for two hours and had a mature apex, and for this reason, nonviable soft tissues were removed from the root with a gauze and endodontic therapy was carried out prior to replantation. After local anesthesia administration, the socket was irrigated with saline solution and carefully checked to exclude the presence of bony fractures. The tooth was then replanted applying slight but firm pressure. The correct position of the tooth was verified clinically and radiographically, and it was stabilized through a passive flexible splint made by a 0.4 mm diameter metal wire bonded to the tooth and to adjacent teeth ( Figure 2). Postoperative instructions included antibiotic therapy with amoxicillin, soft diet for 2 weeks, and soft-bristle toothbrush and chlorhexidine 0.12% mouth rinses, twice a day for 2 weeks. The splint was kept in place for 2 weeks, and the patient was visited after 2, 3, and 6 months and yearly. At the 3-year follow-up visit, the tooth appeared asymptomatic, with physiological mobility, no sensitivity to percussion, and normal percussion sound. No radiotransparency and no radiographic evidence of root resorption were detected (Figure 3). At the 8-year follow-up visit instead, the tooth presented no mobility, metallic percussion sound, and clinical infraposition. Radiographically, there was evidence of ankylosisrelated resorption ( Figure 4). Clinical Case #2. A 20-year-old male patient was referred due to traumatic root fracture of his left upper central incisor after a bike accident ( Figure 5). A passive and flexible buccal splint with metal stainless steel wire and composite patches was immediately performed and kept for 4 weeks, aimed at stabilizing the mobile coronal segment and maintaining the vitality of the tooth. Soft diet was suggested for 1 week and oral hygiene instructions were delivered, brushing with a soft-bristle toothbrush and rinsing with chlorhexidine 0.12% mouthwash to prevent accumulation of plaque and biofilm. However, after one month, the tooth still did not respond to electrometric and thermal pulp testing, due to plausible pulp necrosis development. For this reason, an endodontic treatment of the coronal tooth segment to the fracture line had to be performed ( Figure 6). After a further 4 weeks, the patient still referred pain and discomfort of the area, and the decision for a surgical approach had to be taken. An apicectomy with a retrograde canal obturation was performed in order to remove the symptomatology and obtain a complete healing of the area ( Figure 7). Postsurgical instructions included prevention of further injury by avoidance of contact sports, meticulous oral hygiene, and rinsing with an antibacterial agent such as chlorhexidine gluconate 0.12%. Healing was uneventful, and the patient did not refer pain nor swelling. At the 4-year follow-up visit, clinical and radiographic analysis showed a healed area with a good ossification of the periradicular bone ( Figure 8). Epidemiological Results. Out of a total of 26355 patients who accessed the Dental Emergency Service unit in the period from January 2010 to December 2020, 565 TDIs were registered. Patients' age varied from 0 to 68 years, and the peak age at which TDIs were most represented varied between 2 and 3 years old; however, it was highly represented from 1 up to 7 years old. In 40% of the cases, only one tooth was involved, in 38% two teeth, in 15% three teeth, and in 7% four teeth, for a total of 860 injured teeth. The most frequent injury resulted in an uncomplicated crown fracture (20% of cases), immediately followed by lateral luxation. Successively, this was followed by 18% of intrusive luxation, 17% of avulsion, and 15% of complicated crown fracture (Figure 9). Crown-root fractures were represented in the 2% of the cases. The 57.5% of affected patients were male, while 42.5% were female. The most frequent TDI among males resulted in uncomplicated crown fracture, while the most frequent TDI among females resulted in lateral luxation. Trauma was caused by unintentional falls in 72% of cases, by road accidents in 8%, by accidents at work in 7%, by sports injuries in 7%, by consequences of collision in 5.5%, and by violence and abuse in the remaining 0.5% ( Figure 11). Discussion In the present epidemiological analysis, avulsions were found in 17% of the total population. When related to deciduous teeth, these represented 11.6%, while in permanent teeth these represented 5.4%, in agreement with other authors, but proportionally slightly higher than the percentage reported by Andreasen et al. (4%). In a recent comprehensive review, guidelines for the treatment of avulsed teeth were published. In these cases, the choice of treatment is related to the maturity of the root (open or closed apex) and to the condition of the periodontal ligament (PDL) cells. The survival of these cells is dependent on the time out of the mouth and on the storage liquid in which the avulsed tooth is kept, since PDL cells are nonviable after an extraalveolar dry time of 30 minutes. In Clinical Case #1, the total extraoral dry time had been more than 60 minutes, and therefore, the PDL cells were likely to be nonvi-able. Literature demonstrated that delayed replantation has a poor long-term prognosis and that the periodontal ligament becomes necrotic, is not expected to regenerate, and the expected outcome is ankylosis-related root resorption. The goal of replantation in Clinical Case #1 was to restore, although temporarily, esthetics and function while maintaining alveolar bone height, width, and contour. In this epidemiological analysis, luxation occurred in 41% of the total cases: 27.2% in deciduous teeth and 13.9% in permanent teeth. Lateral luxation represented 20% and intrusive luxation 18%, while extrusive luxation occurred only in 3% of the sample. These data represent a high frequency if compared with Andreasen et al.'s results who reported 9% frequency for nonspecific luxations. Treatment usually involves tooth reposition and semiflexible splinting, as documented by the IADT guidelines. Similarly to Andreasen et al.'s results, the present epidemiological analysis reported that complicated crown fractures occurred in 15% of the cases, showing a higher frequency than other studies. The most represented TDI was the uncomplicated crown fracture, observed in 20% of the cases, while other authors reported different frequencies, such as 13%, 86%, 94%, and 50%. In these cases, a therapeutic approach with a direct composite reconstruction should be preferably performed. In the present data collection, crown-root fractures were represented in 2% of the cases: 0.3% concerned deciduous teeth and 1.7% concerned permanent teeth. Root fractures were 3%: 0.2% concerned deciduous teeth and 2.8% concerned permanent teeth. The study by Andreasen et al. instead reports different results, such as 9% of crown-root fractures and 7% of root fractures. The treatment of crown-root fractures depends on the level of the fracture along the root, and therefore different kinds of therapies could be addressed. In accordance with Glendor et al., it was observed that in 55% of the cases, the involved teeth were permanent maxillary incisors, while in 6.5%, those involved were the permanent mandibular ones. Deciduous teeth were involved in 37% of the cases, according to Piovesan et al.'s results that reported rates between 9.4% and 41%. In 40% of the cases, only one tooth was involved; in 36%, two teeth were involved; and in 22%, more than two teeth were involved. This was different from Andreasen et al.'s study that reported a greater difference in cases involving one (80%) and two (18%) teeth. A recent comprehensive review gave useful indications for the treatment of root fractures. In detail, no endodontic treatment should be started at the emergency visit since pulp necrosis and infection could develop later and usually in the coronal fragment only. If endodontic therapy is requested, treatment of the only coronal segment is indicated, and apexification may be needed if the determination of the root canal length may be challenging due to oblique fracture lines. In Clinical Case #2, root canal therapy Case Reports in Dentistry had to be performed after one month since the tooth did not respond to thermal pulp testing nor electrometric test. Following IADT guidelines, only the coronal part of the frac-tured tooth was treated. However, after another month, a surgical approach had to be chosen in order to solve the patient's symptomatology that was still present. Concerning a patient's age, the results of this epidemiological investigation are only partially in agreement with previous researches that, although with a larger population sample, revealed a TDI average age incidence of about 15 years. In our case, the peak diagnosis of TDIs has been observed between 2 and 3 years old. The mobility skills of a child between 2 and 3 years of age are still to be developed since they must still acquire an adult confidence. However, the desire to overcome personal limits often pushes the child to test himself, thus risking his own safety, and these aspects could explain the TDI peak incidence in this age group. Other possible causes, such as lack of supervision or child abuse, have to be considered. In accordance with previous studies, about 46% of TDIs were however found in the age group between 1 and 7 years (preschool). At this stage of life, children are very dynamic and sports activities are increasingly practiced, leading to TDIs being consequently much more frequent. As demonstrated by several studies, a prevalence of TDI has been observed among male subjects (57.5%), rather than female (42.5%). This prevalence appears equal to the findings of Glendor et al.'s results that show a 2 : 1 ratio of TDIs between males and females. Concerning TDI reasons, it emerged that the main cause was an unintentional fall, especially in preschool age, which most frequently caused luxation and avulsion. Domestic falls and accidental impacts against objects, in particular interior furnishings such as tables, chairs, and sinks, were recorded in 72% of total cases. Road accidents, which range from falling from a bicycle or moped to, more rarely, car accidents, were reported in 8% of the cases. According to these data, accidents at work are also frequent along with sports injuries, causing a signif-icant incidence of avulsions. Different types of TDIs can be observed based on the type of sport activity and therefore of the impact dynamic. High-speed sports seem to lead to bone fractures, while low-speed sports lead to dental injuries. These data are in agreement with previous studies on the etiology of trauma [22,. In fact, sports injuries have been observed in patients aged 12 to 32, road accidents have been observed in patients aged 17 to 62, and workplace accidents have been observed only in adult patients, as well as collisions. In a similar way, in a recent study, the majority of TDI patients were adolescents (27.9%), or younger than 10 years (23.2%). The main cause of TDI was cycling (43.5%), followed by sports such as football and baseball. Although there are well-coded international prevention guidelines, TDIs are still very relevant in pediatric patients. Therefore, there is a need for the development of training and prevention programs for TDI, organizing adequate emergency services, and planning awareness campaigns. Data Availability Data supporting the conclusions of the study can be accessed by asking the corresponding author. Ethical Approval Clinical cases were conducted in accordance with the Declaration of Helsinki. Disclosure The funding source has no role in conceiving and performing the study. Conflicts of Interest The authors declare that there is no conflict of interest regarding the publication of this article. Case Reports in Dentistry
Incorporating covariate information in the covariance structure of misaligned spatial data Incorporating covariates in the second structure of spatial processes is an effective way of building flexible nonstationary covariance models. Fitting these covariances requires covariates to already exist at locations where there is response data. However, studies in environmental statistics often involve covariate and response data that are misaligned in space. A common strategy to remedy this is to interpolate the covariate at locations with response data. This introduces a bias in parameters estimation and prediction. To overcome issues associated with spatial misalignment, this develops a new class of covariatedependent nonstationary covariance models using basis function expansions. Specifically, both covariate and response processes are represented in terms of basis systems, and the effect of the covariate is introduced on the covariance structure through a linear model between the random coefficients of basis vectors. A spike and slab prior is used to determine the structure of the association matrix between the random coefficients of the bases. The effectiveness of this prior is assessed through a simulation study. In addition, results from a real dataset show that the proposed model possesses better spatial prediction and computational advantages over other competing models.
/* * GICv3_gicr.c - generic driver code for GICv3 redistributor * * Copyright (c) 2014-2018 Arm Limited (or its affiliates). All rights reserved. * Use, modification and redistribution of this file is subject to your possession of a * valid End User License Agreement for the Arm Product of which these examples are part of * and your compliance with all applicable terms and conditions of such licence agreement. */ #include "GICv3.h" /* * physical LPI Redistributor register map */ typedef struct { volatile uint32_t GICR_CTLR; // +0x0000 - RW - Redistributor Control Register const volatile uint32_t GICR_IIDR; // +0x0004 - RO - Implementer Identification Register const volatile uint32_t GICR_TYPER[2]; // +0x0008 - RO - Redistributor Type Register volatile uint32_t GICR_STATUSR; // +0x0010 - RW - Error Reporting Status Register, optional volatile uint32_t GICR_WAKER; // +0x0014 - RW - Redistributor Wake Register const volatile uint32_t padding1[2]; // +0x0018 - RESERVED #ifndef USE_GIC600 volatile uint32_t IMPDEF1[8]; // +0x0020 - ?? - IMPLEMENTATION DEFINED #else volatile uint32_t GICR_FCTLR; // +0x0020 - RW - Function Control Register volatile uint32_t GICR_PWRR; // +0x0024 - RW - Power Management Control Register volatile uint32_t GICR_CLASS; // +0x0028 - RW - Class Register const volatile uint32_t padding2[5]; // +0x002C - RESERVED #endif volatile uint64_t GICR_SETLPIR; // +0x0040 - WO - Set LPI Pending Register volatile uint64_t GICR_CLRLPIR; // +0x0048 - WO - Clear LPI Pending Register const volatile uint32_t padding3[8]; // +0x0050 - RESERVED volatile uint64_t GICR_PROPBASER; // +0x0070 - RW - Redistributor Properties Base Address Register volatile uint64_t GICR_PENDBASER; // +0x0078 - RW - Redistributor LPI Pending Table Base Address Register const volatile uint32_t padding4[8]; // +0x0080 - RESERVED volatile uint64_t GICR_INVLPIR; // +0x00A0 - WO - Redistributor Invalidate LPI Register const volatile uint32_t padding5[2]; // +0x00A8 - RESERVED volatile uint64_t GICR_INVALLR; // +0x00B0 - WO - Redistributor Invalidate All Register const volatile uint32_t padding6[2]; // +0x00B8 - RESERVED volatile uint64_t GICR_SYNCR; // +0x00C0 - RO - Redistributor Synchronize Register const volatile uint32_t padding7[2]; // +0x00C8 - RESERVED const volatile uint32_t padding8[12]; // +0x00D0 - RESERVED volatile uint64_t IMPDEF2; // +0x0100 - WO - IMPLEMENTATION DEFINED const volatile uint32_t padding9[2]; // +0x0108 - RESERVED volatile uint64_t IMPDEF3; // +0x0110 - WO - IMPLEMENTATION DEFINED const volatile uint32_t padding10[2]; // +0x0118 - RESERVED } GICv3_redistributor_RD; /* * SGI and PPI Redistributor register map */ typedef struct { const volatile uint32_t padding1[32]; // +0x0000 - RESERVED volatile uint32_t GICR_IGROUPR0; // +0x0080 - RW - Interrupt Group Registers (Security Registers in GICv1) const volatile uint32_t padding2[31]; // +0x0084 - RESERVED volatile uint32_t GICR_ISENABLER; // +0x0100 - RW - Interrupt Set-Enable Registers const volatile uint32_t padding3[31]; // +0x0104 - RESERVED volatile uint32_t GICR_ICENABLER; // +0x0180 - RW - Interrupt Clear-Enable Registers const volatile uint32_t padding4[31]; // +0x0184 - RESERVED volatile uint32_t GICR_ISPENDR; // +0x0200 - RW - Interrupt Set-Pending Registers const volatile uint32_t padding5[31]; // +0x0204 - RESERVED volatile uint32_t GICR_ICPENDR; // +0x0280 - RW - Interrupt Clear-Pending Registers const volatile uint32_t padding6[31]; // +0x0284 - RESERVED volatile uint32_t GICR_ISACTIVER; // +0x0300 - RW - Interrupt Set-Active Register const volatile uint32_t padding7[31]; // +0x0304 - RESERVED volatile uint32_t GICR_ICACTIVER; // +0x0380 - RW - Interrupt Clear-Active Register const volatile uint32_t padding8[31]; // +0x0184 - RESERVED volatile uint8_t GICR_IPRIORITYR[32]; // +0x0400 - RW - Interrupt Priority Registers const volatile uint32_t padding9[504]; // +0x0420 - RESERVED volatile uint32_t GICR_ICnoFGR[2]; // +0x0C00 - RW - Interrupt Configuration Registers const volatile uint32_t padding10[62]; // +0x0C08 - RESERVED volatile uint32_t GICR_IGRPMODR0; // +0x0D00 - RW - ???? const volatile uint32_t padding11[63]; // +0x0D04 - RESERVED volatile uint32_t GICR_NSACR; // +0x0E00 - RW - Non-Secure Access Control Register } GICv3_redistributor_SGI; /* * We have a multiplicity of GIC Redistributors; on the GIC-AEM and * GIC-500 they are arranged as one 128KB region per redistributor: one * 64KB page of GICR LPI registers, and one 64KB page of GICR Private * Int registers */ typedef struct { union { GICv3_redistributor_RD RD_base; uint8_t padding[64 * 1024]; } RDblock; union { GICv3_redistributor_SGI SGI_base; uint8_t padding[64 * 1024]; } SGIblock; } GICv3_GICR; /* * use the scatter file to place GIC Redistributor base address * * although this code doesn't know how many Redistributor banks * a particular system will have, we declare gicrbase as an array * to avoid unwanted compiler optimisations when calculating the * base of a particular Redistributor bank */ static const GICv3_GICR gicrbase[2] __attribute__((section (".bss.redistributor"))); /**********************************************************************/ /* * utility functions to calculate base of a particular * Redistributor bank */ static inline GICv3_redistributor_RD *const getgicrRD(uint32_t gicr) { GICv3_GICR *const arraybase = (GICv3_GICR *const)&gicrbase; return &((arraybase + gicr)->RDblock.RD_base); } static inline GICv3_redistributor_SGI *const getgicrSGI(uint32_t gicr) { GICv3_GICR *arraybase = (GICv3_GICR *)(&gicrbase); return &(arraybase[gicr].SGIblock.SGI_base); } /**********************************************************************/ void WakeupGICR(uint32_t gicr) { GICv3_redistributor_RD *const gicrRD = getgicrRD(gicr); #ifdef USE_GIC600 //Power up Re-distributor for GIC-600 gicrRD->GICR_PWRR = 0x2; #endif /* * step 1 - ensure GICR_WAKER.ProcessorSleep is off */ gicrRD->GICR_WAKER &= ~gicrwaker_ProcessorSleep; /* * step 2 - wait for children asleep to be cleared */ while ((gicrRD->GICR_WAKER & gicrwaker_ChildrenAsleep) != 0) continue; /* * OK, GICR is go */ return; } void EnablePrivateInt(uint32_t gicr, uint32_t id) { GICv3_redistributor_SGI *const gicrSGI = getgicrSGI(gicr); id &= 0x1f; gicrSGI->GICR_ISENABLER = 1 << id; } void DisablePrivateInt(uint32_t gicr, uint32_t id) { GICv3_redistributor_SGI *const gicrSGI = getgicrSGI(gicr); id &= 0x1f; gicrSGI->GICR_ICENABLER = 1 << id; } void SetPrivateIntPriority(uint32_t gicr, uint32_t id, uint32_t priority) { GICv3_redistributor_SGI *const gicrSGI = getgicrSGI(gicr); /* * GICD_IPRIORITYR has one byte-wide entry per interrupt */ id &= RANGE_LIMIT(gicrSGI->GICR_IPRIORITYR); gicrSGI->GICR_IPRIORITYR[id] = priority; } uint32_t GetPrivateIntPriority(uint32_t gicr, uint32_t id) { GICv3_redistributor_SGI *const gicrSGI = getgicrSGI(gicr); /* * GICD_IPRIORITYR has one byte-wide entry per interrupt */ id &= RANGE_LIMIT(gicrSGI->GICR_IPRIORITYR); return (uint32_t)(gicrSGI->GICR_IPRIORITYR[id]); } void SetPrivateIntPending(uint32_t gicr, uint32_t id) { GICv3_redistributor_SGI *const gicrSGI = getgicrSGI(gicr); /* * GICR_ISPENDR is one 32-bit register */ id &= 0x1f; gicrSGI->GICR_ISPENDR = 1 << id; } void ClearPrivateIntPending(uint32_t gicr, uint32_t id) { GICv3_redistributor_SGI *const gicrSGI = getgicrSGI(gicr); /* * GICR_ICPENDR is one 32-bit register */ id &= 0x1f; gicrSGI->GICR_ICPENDR = 1 << id; } uint32_t GetPrivateIntPending(uint32_t gicr, uint32_t id) { GICv3_redistributor_SGI *const gicrSGI = getgicrSGI(gicr); /* * GICR_ISPENDR is one 32-bit register */ id &= 0x1f; return (gicrSGI->GICR_ISPENDR >> id) & 0x01; } void SetPrivateIntSecurity(uint32_t gicr, uint32_t id, GICIGROUPRBits_t group) { GICv3_redistributor_SGI *const gicrSGI = getgicrSGI(gicr); uint32_t groupmod; /* * GICR_IGROUPR0 is one 32-bit register */ id &= 0x1f; /* * the single group argument is split into two separate * registers, so filter out and remove the (new to gicv3) * group modifier bit */ groupmod = (group >> 1) & 1; group &= 1; /* * either set or clear the Group bit for the interrupt as appropriate */ if (group) gicrSGI->GICR_IGROUPR0 |= 1 << id; else gicrSGI->GICR_IGROUPR0 &= ~(1 << id); /* * now deal with groupmod */ if (groupmod) gicrSGI->GICR_IGRPMODR0 |= 1 << id; else gicrSGI->GICR_IGRPMODR0 &= ~(1 << id); } void SetPrivateIntSecurityBlock(uint32_t gicr, GICIGROUPRBits_t group) { GICv3_redistributor_SGI *const gicrSGI = getgicrSGI(gicr); const uint32_t nbits = (sizeof group * 8) - 1; uint32_t groupmod; /* * get each bit of group config duplicated over all 32-bits */ groupmod = (uint32_t)(((int32_t)group << (nbits - 1)) >> 31); group = (uint32_t)(((int32_t)group << nbits) >> 31); /* * set the security state for this block of SPIs */ gicrSGI->GICR_IGROUPR0 = group; gicrSGI->GICR_IGRPMODR0 = groupmod; } /* EOF GICv3_gicr.c */
<gh_stars>0 import { GlobalStore, StoreUpdate } from '../../src/stores/GlobalStore'; function subscribeAndSend(store: GlobalStore, path: string, input: { a: string; }, subscriptionPath: string): Promise<StoreUpdate> { return new Promise((resolve, reject) => { const subscription = store.subscribe(subscriptionPath, (path: StoreUpdate) => { try { subscription.unsubscribe(); resolve(path); } catch (e) { reject(e); } }); store.send(path, input); }); } describe('GlobalStore tests', () => { it('clone object properly', () => { const store = new GlobalStore("my store"); const input = { a: "b" }; store.send("a.b.c", input); const output = store.get("a.b.c"); expect(output.a).toEqual(input.a); output.a = "dirty"; const clone = store.get("a.b.c"); expect(clone.a).toEqual(input.a); }); it('is notified properly', () => { const store = new GlobalStore("my store"); const input = { a: "b" }; const event: Promise<StoreUpdate> = subscribeAndSend(store, "a.b.c", input, "a.b.c"); const expectedObject: StoreUpdate = { path: "a.b.c", value: input }; return expect(event).resolves.toMatchObject(expectedObject); }); it('is notified properly with \'.*\'', () => { const store = new GlobalStore("my store"); const input = { a: "b" }; const event: Promise<StoreUpdate> = subscribeAndSend(store, "a.b.c", input, "a.*"); const expectedObject: StoreUpdate = { path: "a.b.c", value: input }; return expect(event).resolves.toMatchObject(expectedObject); }); it('is notified properly with \'*\'', () => { const store = new GlobalStore("my store"); const input = { a: "b" }; const event: Promise<StoreUpdate> = subscribeAndSend(store, "a.b.c", input, "*"); const expectedObject: StoreUpdate = { path: "a.b.c", value: input }; return expect(event).resolves.toMatchObject(expectedObject); }); });
package com.zss.smile.controller; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import java.util.Arrays; import java.util.List; /** * @author ZSS * @date 2021/9/3 11:06 * @desc 页面跳转控制器 */ @Slf4j @Controller public class PageController { private final static String FAVICON = "favicon.ico"; private final static List<String> FAVICONS = Arrays.asList("favicon"); private final static String INDEX = "index"; /** * 页面跳转 * * @param page page * @return page */ @GetMapping("/{page}") public String helloHtml(@PathVariable String page) { if (!StringUtils.equals(FAVICON, page)) { return page; } else { return INDEX; } } }
<gh_stars>1-10 from __future__ import absolute_import __author__ = """<NAME> (sebisebi)""" __email__ = '<EMAIL>' __version__ = '0.0.11' from .collection import Collection # Quick dataset loader methods. def RACE(*args, **kwargs): from data_mine.nlp.RACE import RACEDataset return RACEDataset(*args, **kwargs) def ALLEN_AI_DROP(*args, **kwargs): from data_mine.nlp.allen_ai_drop import DROPDataset return DROPDataset(*args, **kwargs) def CSQA(*args, **kwargs): from data_mine.nlp.CSQA import CSQADataset return CSQADataset(*args, **kwargs) def ALLEN_AI_OBQA(*args, **kwargs): from data_mine.nlp.allen_ai_obqa import OBQADataset return OBQADataset(*args, **kwargs) def HOTPOT_QA(*args, **kwargs): from data_mine.nlp.hotpot_qa import HotpotQADataset return HotpotQADataset(*args, **kwargs) def COSMOS_QA(*args, **kwargs): from data_mine.nlp.cosmos_qa import CosmosQADataset return CosmosQADataset(*args, **kwargs) def ALLEN_AI_ARC(*args, **kwargs): from data_mine.nlp.allen_ai_arc import ARCDataset return ARCDataset(*args, **kwargs) def TRIVIA_QA(*args, **kwargs): from data_mine.nlp.trivia_qa import TriviaQADataset return TriviaQADataset(*args, **kwargs)
/** * Sends a DiscardPdu * @param binary The data representing the DiscardPDU. * @param destinationAddress The destination of this discard message. * @param priority * The ACP142 priority. * @throws IOException if we fail to send. */ public void sendDiscardPdu( byte[] binary, InetAddress destinationAddress, int priority ) throws IOException { DatagramPacket packet = new DatagramPacket(binary, binary.length, destinationAddress, Configuration.getAPort()); try { Log.writeLine(Log.LOG_LEVEL_DEBUG, "Sending discardPdu to: " + destinationAddress.getHostAddress()); setTrafficClass(priority); multicastSendSocket.send(packet); multicastSendSocket.setTrafficClass(defaultTc); } catch (IOException e) { Log.writeLine(Log.LOG_LEVEL_NORMAL, "IOException in sendDiscardPdu in class UDPWrapper"); throw e; } }
<reponame>AnthonyNg404/61A<gh_stars>0 """The ants_gui module implements a GUI for Ants vs. SomeBees. You should not feel that you need to read and understand this file, because all of the game logic is instead contained within ants.py. We have provided some comments so that interested students can extend the graphics. === Optional reading beyond this point === The GUI for this game has a fixed layout specified by a series of global names that are constant throughout execution. From a design perspective, global names are a fine solution for holding constants; most problems associated with global variables arise in programs that assign to global names. The GUI layout itself is divided into a control panel that lists all implemented ants and a play area populated with places. The Hive is handled as a special case so that the player can visually inspect how many bees remain in the beehive. """ import ants import graphics from graphics import shift_point from ucb import * from math import pi import math import os import random STRATEGY_SECONDS = 3 INSECT_FILES = {'Worker': 'img/ant_harvester.gif', 'Thrower': 'img/ant_thrower.gif', 'Long': 'img/ant_longthrower.gif', 'Short': 'img/ant_shortthrower.gif', 'Harvester': 'img/ant_harvester.gif', 'Fire': 'img/ant_fire.gif', 'Bodyguard': 'img/ant_weeds.gif', 'Hungry': 'img/ant_hungry.gif', 'Slow': 'img/ant_freeze.gif', 'Scary': 'img/ant_scary.gif', 'Ninja': 'img/ant_ninja.gif', 'Laser': 'img/ant_laser.gif', 'Wall': 'img/ant_wall.gif', 'Scuba': 'img/ant_scuba.gif', 'Queen': 'img/ant_queen.gif', 'Remover': 'img/remover.gif', 'Tank': 'img/ant_weeds.gif', 'Bee': 'img/bee.gif', 'Wasp': 'img/wasp.gif', 'Hornet': 'img/hornet.gif', 'NinjaBee': 'img/ninjabee.gif', 'Boss': 'img/boss.gif', } TUNNEL_FILE = 'img/tunnel.gif' ANT_IMAGE_WIDTH = 65 ANT_IMAGE_HEIGHT = 71 BEE_IMAGE_WIDTH = 58 PANEL_PADDING = (2, 4) PLACE_PADDING = (10, 10) PLACE_POS = (40, 180) PANEL_POS = (20, 40) CRYPT = 650 MESSAGE_POS = (120, 20) HIVE_HEIGHT = 300 PLACE_MARGIN = 10 LASER_OFFSET = (60, 40) LEAF_START_OFFSET = (30, 30) LEAF_END_OFFSET = (50, 30) LEAF_COLORS = {'Thrower': 'ForestGreen', 'Short': 'Green', 'Long': 'DarkGreen', 'Slow': 'LightBlue', 'Scary': 'Red', 'Scuba': 'Blue', 'Queen': 'Purple', 'Laser': 'Blue'} class AntsGUI: """GUI-based interactive strategy that logs all colony updates.""" def __init__(self): self.initialized = False def initialize_colony_graphics(self, colony): """Create canvas, control panel, places, and labels.""" self.initialized = True self.canvas = graphics.Canvas() self.food_text = self.canvas.draw_text('Food: 1 Time: 0', (20, 20)) self.ant_text = self.canvas.draw_text('Ant selected: None', (20, 140)) self._click_rectangles = list() self._init_control_panel(colony) self._init_places(colony) start_text = self.canvas.draw_text('CLICK TO START', MESSAGE_POS) self.canvas.wait_for_click() self.canvas.clear(start_text) def _init_control_panel(self, colony): """Construct the control panel of available ant types.""" self.ant_type_selected = None self.ant_type_frames = [] # rectangle ids of frames. panel_pos = PANEL_POS for name, ant_type in colony.ant_types.items(): width = ANT_IMAGE_WIDTH + 2 * PANEL_PADDING[0] height = ANT_IMAGE_HEIGHT + 6 + 2 * PANEL_PADDING[1] def on_click(colony, frame, name=name): self.ant_type_selected = name self._update_control_panel(colony) frame = self.add_click_rect(panel_pos, width, height, on_click) self.ant_type_frames.append((name, frame)) img_pos = shift_point(panel_pos, PANEL_PADDING) self.canvas.draw_image(img_pos, INSECT_FILES[name]) cost_pos = shift_point(panel_pos, (width / 2, ANT_IMAGE_HEIGHT + 4 + PANEL_PADDING[1])) food_str = str(ant_type.food_cost) self.canvas.draw_text(food_str, cost_pos, anchor="center") panel_pos = shift_point(panel_pos, (width + 2, 0)) def _init_places(self, colony): """Construct places in the play area.""" self.place_points = dict() # self.images: place_name -> insect instance -> image id self.images = {'AntQueen': dict()} place_pos = PLACE_POS width = BEE_IMAGE_WIDTH + 2 * PLACE_PADDING[0] height = ANT_IMAGE_HEIGHT + 2 * PLACE_PADDING[1] rows = 0 for name, place in colony.places.items(): if place.name == 'Hive': continue # Handled as a special case later if place.exit.name == 'AntQueen': row_offset = (0, rows * (height + PLACE_MARGIN)) place_pos = shift_point(PLACE_POS, row_offset) rows += 1 def on_click(colony, frame, name=name): ant_type = self.ant_type_selected existing_ant = colony.places[name].ant if ant_type is 'Remover': if existing_ant is not None: print("colony.remove_ant('{0}')".format(name)) colony.remove_ant(name) self._update_places(colony) elif ant_type is not None: try: print("colony.deploy_ant('{0}', '{1}')".format(name, ant_type)) colony.deploy_ant(name, ant_type) self._update_places(colony) except Exception as e: print(e) color = 'Blue' if place.name.startswith('water') else 'White' frame = self.add_click_rect(place_pos, width, height, on_click, color=color) self.canvas.draw_image(place_pos, TUNNEL_FILE) self.place_points[name] = place_pos self.images[name] = dict() place_pos = shift_point(place_pos, (width + PLACE_MARGIN, 0)) # Hive self.images[colony.beehive.name] = dict() self.place_points[colony.beehive.name] = (place_pos[0] + width, HIVE_HEIGHT) self.laser_end = (BEE_IMAGE_WIDTH + 2 * PLACE_PADDING[0]) * len(colony.places) for bee in colony.beehive.bees: self._draw_insect(bee, colony.beehive.name, True) def add_click_rect(self, pos, width, height, on_click, color='White'): """Construct a rectangle that can be clicked.""" frame_points = graphics.rectangle_points(pos, width, height) frame = self.canvas.draw_polygon(frame_points, fill_color=color) self._click_rectangles.append((pos, width, height, frame, on_click)) return frame def strategy(self, colony): """The strategy function is called by the ants.AntColony each turn.""" if not self.initialized: self.initialize_colony_graphics(colony) elapsed = 0 # Physical time elapsed this turn while elapsed < STRATEGY_SECONDS: self._update_control_panel(colony) self._update_places(colony) msg = 'Food: {0} Time: {1}'.format(colony.food, colony.time) self.canvas.edit_text(self.food_text, text=msg) pos, el = self.canvas.wait_for_click(STRATEGY_SECONDS - elapsed) elapsed += el if pos is not None: self._interpret_click(pos, colony) # Throw leaves at the end of the turn has_ant = lambda a: hasattr(a, 'ant') and a.ant for ant in colony.ants + [a.ant for a in colony.ants if has_ant(a)]: if ant.name in LEAF_COLORS: self._throw(ant, colony) def _interpret_click(self, pos, colony): """Interpret a click position by finding its click rectangle.""" x, y = pos for corner, width, height, frame, on_click in self._click_rectangles: cx, cy = corner if x >= cx and x <= cx + width and y >= cy and y <= cy + height: on_click(colony, frame) def _update_control_panel(self, colony): """Reflect the game state in the control panel.""" for name, frame in self.ant_type_frames: cost = colony.ant_types[name].food_cost color = 'White' if cost > colony.food: color = 'Gray' elif name == self.ant_type_selected: color = 'Blue' msg = 'Ant selected: {0}'.format(name) self.canvas.edit_text(self.ant_text, text=msg) self.canvas._canvas.itemconfigure(frame, fill=color) def _update_places(self, colony): """Reflect the game state in the play area. This function handles several aspects of the game: - Adding Ant images for newly placed ants - Moving Bee images for bees that have advanced - Moving insects out of play when they have expired """ for name, place in colony.places.items(): if place.name == 'Hive': continue current = self.images[name].keys() # Add/move missing insects if place.ant is not None: if hasattr(place.ant, 'is_container') and place.ant.is_container \ and place.ant.contained_ant and place.ant.contained_ant not in current: container = self.images[name][place.ant] self._draw_insect(place.ant.contained_ant, name, behind=container) if place.ant not in current: self._draw_insect(place.ant, name) for bee in place.bees: if bee not in current: for other_place, images in self.images.items(): if bee in images: break image = self.images[other_place].pop(bee) pos = shift_point(self.place_points[name], PLACE_PADDING) self.canvas.slide_shape(image, pos, STRATEGY_SECONDS) self.images[name][bee] = image # Remove expired insects valid_insects = set(place.bees + [place.ant]) if place.ant is not None and hasattr(place.ant, 'is_container') and \ place.ant.is_container: valid_insects.add(place.ant.contained_ant) for insect in current - valid_insects: if not place.exit or insect not in self.images[place.exit.name]: image = self.images[name].pop(insect) pos = (self.place_points[name][0], CRYPT) self.canvas.slide_shape(image, pos, STRATEGY_SECONDS) def _draw_insect(self, insect, place_name, random_offset=False, behind=0): """Draw an insect and store the ID of its image.""" image_file = INSECT_FILES[insect.name] pos = shift_point(self.place_points[place_name], PLACE_PADDING) if random_offset: pos = shift_point(pos, (random.randint(-10, 10), random.randint(-50, 50))) image = self.canvas.draw_image(pos, image_file, behind=behind) self.images[place_name][insect] = image def _throw(self, ant, colony): """Animate a leaf thrown at a Bee.""" bee = ant.nearest_bee(colony.beehive) # nearest_bee logic from ants.py if bee: start = shift_point(self.place_points[ant.place.name], LEAF_START_OFFSET) end = shift_point(self.place_points[bee.place.name], LEAF_END_OFFSET) animate_leaf(self.canvas, start, end, color=LEAF_COLORS[ant.name]) def leaf_coords(pos, angle, length): """Return the coordinates of a leaf polygon.""" angles = [angle - pi, angle - pi/2, angle, angle + pi/2] distances = [length/3, length/2, length, length/2] return [graphics.translate_point(pos, a, d) for a, d in zip(angles, distances)] def animate_laser(canvas, start, length, duration=0.6, color='cyan'): laser = canvas.draw_line(start, (length, start[1]), color, width=3) canvas._canvas.after(int(1000*duration) + 1, lambda: canvas.clear(laser)) def animate_leaf(canvas, start, end, duration=0.3, color='ForestGreen'): """Define the animation frames for a thrown leaf.""" length = 40 leaf = canvas.draw_polygon(leaf_coords(start, 0, length), color='DarkGreen', fill_color=color, smooth=1) num_frames = duration / graphics.FRAME_TIME increment = tuple([(e-s) / num_frames for s, e in zip(start, end)]) def points_fn(frame_count): nonlocal start angle = pi / 8 * frame_count cs = leaf_coords(start, angle, length) start = shift_point(start, increment) return cs canvas.animate_shape(leaf, duration, points_fn) canvas._canvas.after(int(1000*duration) + 1, lambda: canvas.clear(leaf)) from utils import * @main def run(*args): ants.Insect.reduce_armor = class_method_wrapper(ants.Insect.reduce_armor, pre=print_expired_insects) ants.start_with_strategy(args, AntsGUI().strategy)
by Editors’ Note: In honor of Rick Santorum’s sudden emergence in the Iowa caucuses as the anti-Romney du jour, CounterPunch is reprinting this 2003 profile of the Pennsylvania zealot about his career in the United States senate, where he was almost universally reviled as both stupid and mean by his colleagues and staff. –AC / JSC Rick Santorum had only been in the senate for a few weeks when Bob Kerrey, then Senator from Nebraska, pegged him. “Santorum, that’s Latin for asshole.” It was probably the funniest line the grim Kerrey ever uttered and it was on the mark, too. Such a stew of sleazy self-righteousness and audacious stupidity has not been seen in the senate since the days of Steve Symms, the celebrated moron from Idaho. In 1998, investigative reporter Ken Silverstein fingered Santorum as the dumbest member of congress in a story for The Progressive. Considering the competition, that’s an achievement of considerable distinction. Even Santorum’s staff knows the senator is a vacuous boob prone to outrageous gaffs and crude outbursts of unvarnished bigotry. For years, they kept him firmly leashed, rarely permitting him to attend a press interview without a senior staffer by his side. They learned the hard way. While in serving in the House, Santorum was asked by a reporter to explain why his record on environmental policy was so dreadful. Santorum replied by observing that the environment was of little consequence in God’s grand plan. “Nowhere in the Bible does it say that America will be here 100 years from now.” The reference was to the Rapture, which apparently is impending. Santorum is the self-anointed prophet of family values on the Hill, who issues frequent jeremiads on the threats Hollywood fare poses to the “fabric of American culture.” Of course, these sermons are hard to swallow from a man with Santorum’s resume. After all, before entering Congress Santorum worked as a lobbyist. His top client? The World Wrestling Federation. But now the Republican leadership, apparently cruising along in self-destruct mode, has elevated Santorum to the number three spot in the senate and his staff can’t run interference for him anymore. The results have been comically predictable. Six months ago, Santorum penned an op-ed for a Christian paper blaming the sexual molestation scandals in the Catholic Church on “the culture of liberalism.” Surely, an omen that the senator from Pennsylvania wasn’t quite ready for prime time. So it came to pass that on April 7, Santorum sat down for an interview with AP reporter Lara Jordan. He should have been on his guard. After all, Jordan is married to Jim Jordan, who oversees John Kerry’s presidential campaign. Kerry’s wife, Teresa Heinz, despises Santorum. He inherited the senate seat left open when her previous husband, John Heinz, perished in a plane crash. “Santorum is critical of everything, indifferent to nuance, and incapable of compromise,” Heinz said. This should have been a warning signal to Santorum that the interview with Jordan might be hostile terrain, but his intellectual radar seems to function about as well as Baghdad’s air defense system. Post-war, that is. After a brisk discussion of the degeneracy of American culture, the interview turned to the subject of the pending Supreme Court case on sodomy laws. Like most religious zealots, Santorum is obsessed not just with homosexuals but with visualizing the postures and physical mechanics of homosexual love. He seized on her question with an enthusiasm many Republicans reserve for discussions of the tax code. “I have no problem with homosexuality,” Santorum pronounced. “I have a problem with homosexual acts. As I would with acts of other, what I would consider to be, acts outside of traditional heterosexual relationships. And that includes a variety of different acts, not just homosexual. I have nothing, absolutely nothing against anyone who’s homosexual. If that’s their orientation, then I accept that. And I have no problem with someone who has other orientations. The question is, do you act upon those orientations? So it’s not the person, it’s the person’s actions. And you have to separate the person from their actions.” In the past, one of Santorum’s staffers would have found some way to interrupt the interview and deftly muzzle the senator. But he was flying solo and evidently trying to impress Ms. Jordan with his encyclopedic knowledge of the work of Krafft-Ebbing. Note the senator’s excited and flirtatious tone. AP: OK, without being too gory or graphic, so if somebody is homosexual, you would argue that they should not have sex? SANTORUM: We have laws in states, like the one at the Supreme Court right now, that [have] sodomy laws and they were there for a purpose. Because, again, I would argue, they undermine the basic tenets of our society and the family. And if the Supreme Court says that you have the right to consensual sex within your home, then you have the right to bigamy, you have the right to polygamy, you have the right to incest, you have the right to adultery. You have the right to anything. Does that undermine the fabric of our society? I would argue yes, it does. It all comes from, I would argue, this right to privacy that doesn’t exist in my opinion in the United States Constitution, this right that was created, it was created in Griswold – Griswold was the contraceptive case – and abortion. And now we’re just extending it out. And the further you extend it out, the more you – this freedom actually intervenes and affects the family. You say, well, it’s my individual freedom. Yes, but it destroys the basic unit of our society because it condones behavior that’s antithetical to strong, healthy families. Whether it’s polygamy, whether it’s adultery, where it’s sodomy, all of those things, are antithetical to a healthy, stable, traditional family. “Every society in the history of man has upheld the institution of marriage as a bond between a man and a woman. Why? Because society is based on one thing: that society is based on the future of the society. And that’s what? Children. Monogamous relationships. In every society, the definition of marriage has not ever to my knowledge included homosexuality. That’s not to pick on homosexuality. It’s not, you know, man on child, man on dog, or whatever the case may be. It is one thing. And when you destroy that you have a dramatic impact on the quality At this point, even the unnerved reporter tried to rein in Santorum. “I’m sorry,” Jordan interjected. “I didn’t think I was going to talk about ‘man on dog’ with a United States senator, it’s sort of freaking me out.” But the man was on a roll and there was no stopping him. “And that’s sort of where we are in today’s world, unfortunately,” Santorum said. “The idea is that the state doesn’t have rights to limit individuals’ wants and passions. I disagree with that. I think we absolutely have rights because there are consequences to letting people live out whatever wants or passions they desire. And we’re seeing it in our society.” There you have it. A case study in the politics of pathological homophobia. Despite outcries from gay Republicans, Bush stood by Santorum in his hour of media martyrdom: “The president believes the senator is an inclusive man,” Ari Fleishcer informed the press. “And that’s what he believes.” Santorum’s pal Tom Delay, the pest exterminator-turned-Republican House Majority Leader, was ebullient. He called Santorum’s remarks “courageous.” Trent Lott must be snickering in the senate cloakroom. Santorum, the Mullah Omar of Pennsylvania, is a ridiculous spectacle but he can’t be taken lightly. He is the slick-haired darling of the neo-cons, an obedient automaton that feverishly promotes their wildest fantasies without hesitation. Undeterred by the First Amendment, Santorum says planning to introduce legislation that will limit criticism of Israel in colleges and universities that receive federal money. And his passion for Israel is so profound that it obviates even his rancid homophobia. When it comes to the Middle East, liberal Democrats race to co-sponsor legislation with him. Most recently, Santorum and Barbara Boxer teamed up to introduce the Syria Accountability Act, which would inflict trade sanctions on Syria like those which gripped Iraq for 12 years, killing nearly one million children. Talk about family values. Sure, Santorum is an asshole. But he’s not one of a kind. Jeffrey St. Clair’s latest book is Born Under a Bad Sky. He is the co-editor of Hopeless: Barack Obama and the Politics of Illusion, forthcoming from AK Press. He can be reached at: [email protected].
Don't look now, but Hayden Cantrelle is on fire. The Cajuns' shortstop hit for the cycle Tuesday night to lead Louisiana past Northwestern State, 8-3 before 4,718 at M. L. "Tigue" Moore Field at Russo Park. Cantrelle, who was 11-21 last week with ten runs scored, reached base in all five plate appearances Tuesday night, scoring four times. He drew a walk in the first inning, tripled in the third, singled in the fifth, hit a solo homer in the sixth and doubled in the eighth in becoming the fourth Cajun to ever hit for the cycle, the first since Stefan Trosclair did it in 2016. Cantrelle has now reached in 21 of his last 31 plate appearances. Freshman Connor Cooke (2-0) allowed just one run on three hits over four innings to get the win. Jack Burk allowed just one hit over the final three innings to earn his first save of the season.Kole McKinnin drove in the first run with a sacrifice fly. Alex Hannie then followed with his first collegiate hit, a triple to right-center to bring home the second run of the inning. After the Demons touched Cooke for a run in the third, the Cajuns tied it again when Cantrelle tripled and scored on Brennan Breaux's single. Louisiana (10-11) took the lead for good in the fifth when Cantrelle singled, went to second on a ground ball and stole third. After Handsome Monica walked, Hunter Kasuls ripped a triple into the right field corner, scoring both runners. Cantrelle belted his fourth homer of the year, a solo shot in the sixth to make the score 6-3 and in the eighth after Johnathan Windham led off with an infield hit, Cantrelle doubled to complete the rare feat. Windham scored on Monica's sacrifice fly and Cantrelle came around on Kasuls' double to eend the scoring. In addition to Cantrelle, Kasuls and Hannie added two hits each, while Breaux, Lott, Veillon and Windham rounded out the Cajuns 12-hit attack. Robert Burke (0-1) the third Northwestern State pitcher was charged with the loss, allowing two runs on two hits in just a third of an inning. J. P. LaGreco, Jakob Nunez and Larson Fontenot each had two hits for the Demons. Louisiana travels to New Orleans to take on Tulane Wednesday night at 6:30 at Greer Field at Turchin Stadium. Pre game show on ESPN 1420 will begin at 6:00 pm.
The Significance of Perfusion-Weighted Magnetic Resonance Imaging in Evaluating the Pathological Biological Activity of Cerebral Alveolar Echinococcosis Objectives This study aimed to evaluate the value of perfusion-weighted magnetic resonance imaging (MR-PWI) in assessing cerebral alveolar echinococcosis (CAE) biological activity. Methods Totally, 15 cases of CAE patients who underwent surgery were enrolled. The MR-PWI perfusion parameters were measured and compared. Results The MR-PWI perfusion parameters cerebral blood flow (CBF), cerebral blood volume (CBV), and mean transit time were different among different areas. Their values were in the descending order of lesion marginal area > contralateral normal brain area > lesion center area. However, time-to-peak value was in the ascending order of lesion marginal area < contralateral normal brain area < lesion center area. Spearman correlation analysis showed that CBF and CBV at the edge of the lesion were significantly positively correlated with microvessel density. Moreover, CBF and CBV at the edge of the lesion were also significantly positively correlated with maximum standardized uptake value. Conclusions Perfusion-weighted magnetic resonance imaging can be used to dynamically reflect the neovascularization of CAE lesions and may have a good application prospect in evaluating the biological activity of CAE.
/* * Copyright 2006 National Institute of Advanced Industrial Science * and Technology (AIST), and contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ow.tool.util.vizframework; import java.awt.BasicStroke; import java.awt.Image; import java.awt.Point; import java.awt.Stroke; import java.awt.geom.Point2D; import java.net.URL; import javax.swing.ImageIcon; import ow.id.ID; class ImageManager { public final static int NUM_NODE_IMAGES = 3; public final static String[] NODE_IMAGE_NAMES = { "resources/node0.gif", "resources/node1.gif", "resources/node2.gif" }; public final static int NUM_MARK_IMAGES = 1; public final static String[] MARK_IMAGE_NAMES = { "resources/mark0.gif" }; private double scale, imageScale; GeometryManager geometryManager; // original images private final Image[] origNodeImage; private final int origNodeWidth[], origNodeHeight[]; private final Image[] origMarkImage; private final int origMarkWidth[], origMarkHeight[]; // scaled images private Image[] nodeImage; private int nodeWidth[], nodeHeight[]; private Image[] markImage; private int markWidth[], markHeight[]; // stroke for lines private Stroke strokeForMessaging; private Stroke strokeForConnection; protected ImageManager() { // load images URL iconURL; ClassLoader cl = ImageManager.class.getClassLoader(); ImageIcon icon = null; // load node image this.origNodeImage = new Image[NUM_NODE_IMAGES]; this.origNodeWidth = new int[NUM_NODE_IMAGES]; this.origNodeHeight = new int[NUM_NODE_IMAGES]; this.nodeImage = new Image[NUM_NODE_IMAGES]; this.nodeWidth = new int[NUM_NODE_IMAGES]; this.nodeHeight = new int[NUM_NODE_IMAGES]; for (int i = 0; i < NUM_NODE_IMAGES; i++) { iconURL = cl.getResource(NODE_IMAGE_NAMES[i]); if (iconURL == null) VisualizerUtil.fatal(NODE_IMAGE_NAMES[i] + " not found."); icon = new ImageIcon(iconURL); this.origNodeImage[i] = icon.getImage(); this.origNodeWidth[i] = icon.getIconWidth(); this.origNodeHeight[i] = icon.getIconHeight(); } // load mark images this.origMarkImage = new Image[NUM_MARK_IMAGES]; this.origMarkWidth = new int[NUM_MARK_IMAGES]; this.origMarkHeight = new int [NUM_MARK_IMAGES]; this.markImage = new Image[NUM_MARK_IMAGES]; this.markWidth = new int[NUM_MARK_IMAGES]; this.markHeight = new int [NUM_MARK_IMAGES]; for (int i = 0; i < NUM_MARK_IMAGES; i++) { iconURL = cl.getResource(MARK_IMAGE_NAMES[i]); if (iconURL == null) VisualizerUtil.fatal(MARK_IMAGE_NAMES[i] + " not found."); icon = new ImageIcon(iconURL); this.origMarkImage[i] = icon.getImage(); this.origMarkWidth[i] = icon.getIconWidth(); this.origMarkHeight[i] = icon.getIconHeight(); } // prepare 1.0 scaled images this.setScale(Visualizer.DEFAULT_SCALE); } public GeometryManager getGeometryManager() { return this.geometryManager; } public synchronized GeometryManager setGeometryManager(GeometryManager gm) { GeometryManager old = this.geometryManager; this.geometryManager = gm; return old; } public synchronized void setScale(double scale) { this.setImageScale(scale); if (scale <= 0.0 || scale == this.scale) return; this.scale = scale; if (this.geometryManager != null) { this.geometryManager.setScale(this.scale); } } public synchronized void setImageScale(double scale) { if (scale <= 0.0 || scale == this.imageScale) return; this.imageScale = scale; for (int i = 0; i < origNodeImage.length; i++) { nodeWidth[i] = (int)(origNodeWidth[i] * scale); nodeHeight[i] = (int)(origNodeHeight[i] * scale); nodeImage[i] = origNodeImage[i].getScaledInstance( nodeWidth[i], nodeHeight[i], Visualizer.IMAGE_SCALING_ALGORITHM); } for (int i = 0; i < origMarkImage.length; i++) { markWidth[i] = (int)(origMarkWidth[i] * scale); markHeight[i] = (int)(origMarkHeight[i] * scale); markImage[i] = origMarkImage[i].getScaledInstance( markWidth[i], markHeight[i], Visualizer.IMAGE_SCALING_ALGORITHM); } float lineWidth; lineWidth = (float)(Visualizer.MESSAGING_LINE_WIDTH * scale); if (lineWidth < 1.0f) lineWidth = 1.0f; strokeForMessaging = new BasicStroke(lineWidth); lineWidth = (float)(Visualizer.CONNECTION_LINE_WIDTH * scale); if (lineWidth < 1.0f) lineWidth = 1.0f; strokeForConnection = new BasicStroke(lineWidth); } public Point getNodeLocation(ID id, int type) { Point2D p = this.geometryManager.getNodePoint2D(id); double x = p.getX(); double y = p.getY(); x -= nodeWidth[type] / 2; y -= nodeHeight[type] / 2; return new Point((int)x, (int)y); } public Point getMarkLocation(ID id, int type) { Point2D p = this.geometryManager.getNodePoint2D(id); double x = p.getX(); double y = p.getY(); x -= markWidth[type] / 2; y -= markHeight[type] / 2; return new Point((int)x, (int)y); } // // Accessors // public Image getNodeImage(int type) { Image ret; try { ret = this.nodeImage[type]; } catch (ArrayIndexOutOfBoundsException e) { ret = this.nodeImage[0]; } return ret; } public int getNodeWidth(int type) { return this.nodeWidth[type]; } public int getNodeHeight(int type) { return this.nodeHeight[type]; } public Image getMarkImage(int type) { Image ret; try { ret = this.markImage[type]; } catch (ArrayIndexOutOfBoundsException e) { ret = this.markImage[0]; } return ret; } public int getMarkWidth(int type) { return this.markWidth[type]; } public int getMarkHeight(int type) { return this.markHeight[type]; } public Stroke getStrokeForMessaging() { return this.strokeForMessaging; } public Stroke getStrokeForConnection() { return this.strokeForConnection; } }
<filename>System/Library/PrivateFrameworks/UIKitCore.framework/UIStatusBar.h /* * This header is generated by classdump-dyld 1.5 * on Tuesday, November 10, 2020 at 10:22:38 PM Mountain Standard Time * Operating System: Version 14.2 (Build 18K57) * Image Source: /System/Library/PrivateFrameworks/UIKitCore.framework/UIKitCore * classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>. Updated by <NAME>. */ #import <UIKitCore/UIKitCore-Structs.h> #import <UIKitCore/UIStatusBar_Base.h> #import <UIKit/UIStatusBarServerClient.h> @class UIStatusBarBackgroundView, UIStatusBarForegroundView, UILabel, UIView, NSString, NSMutableArray, UIStatusBarStyleAttributes, UIColor, UIStatusBarStyleAnimationParameters, NSNumber, NSMutableSet, NSMutableDictionary; @interface UIStatusBar : UIStatusBar_Base <UIStatusBarServerClient> { BOOL _showsForeground; BOOL _observingDefaults; UIStatusBarBackgroundView* _backgroundView; UIStatusBarForegroundView* _foregroundView; UILabel* _doubleHeightLabel; UIView* _doubleHeightLabelContainer; NSString* _currentDoubleHeightText; SCD_Struct_UI115 _currentRawData; NSMutableArray* _interruptedAnimationCompositeViews; UIStatusBarBackgroundView* _newStyleBackgroundView; UIStatusBarForegroundView* _newStyleForegroundView; UIStatusBar* _slidingStatusBar; UIStatusBarStyleAttributes* _styleAttributes; BOOL _waitingOnCallbackAfterChangingStyleOverridesLocally; BOOL _showOnlyCenterItems; BOOL _foregroundViewShouldIgnoreStatusBarDataDuringAnimation; UIColor* _tintColor; UIColor* _lastUsedBackgroundColor; UIStatusBarStyleAnimationParameters* _nextTintTransition; NSNumber* _overrideHeight; NSMutableSet* _disableRasterizationReasons; NSMutableDictionary* _actions; BOOL _disablesRasterization; BOOL _timeHidden; } @property (assign,getter=isTimeHidden,nonatomic) BOOL timeHidden; //@synthesize timeHidden=_timeHidden - In the implementation block +(long long)lowBatteryLevel; +(double)_viewControllerAdjustmentForOrientation:(long long)arg1 inWindow:(id)arg2 ; +(long long)_deviceUserInterfaceLayoutDirection; +(NSDirectionalEdgeInsets)_roundedPadEdgeInsets; +(NSDirectionalEdgeInsets)_regularPadEdgeInsets; +(double)_roundedPadBaselineOffset; +(double)_regularPadBaselineOffset; +(void)getData:(SCD_Struct_UI115*)arg1 forRequestedData:(const SCD_Struct_UI115*)arg2 withOverrides:(const SCD_Struct_UI118*)arg3 ; +(long long)defaultStyleForRequestedStyle:(long long)arg1 styleOverrides:(int)arg2 ; +(id)_styleAttributesForStatusBarStyle:(long long)arg1 ; +(void)_setDeviceUserInterfaceLayoutDirection:(long long)arg1 ; +(BOOL)_shouldForwardToImplementationClassForStyle:(long long)arg1 ; +(double)_heightForStyle:(long long)arg1 orientation:(long long)arg2 forStatusBarFrame:(BOOL)arg3 inWindow:(id)arg4 ; +(id)_styleAttributesForRequest:(id)arg1 ; +(BOOL)_isLightContentStyle:(long long)arg1 ; +(id)_newStyleAttributesForRequest:(id)arg1 ; +(void)enumerateStatusBarStyleOverridesWithBlock:(/*^block*/id)arg1 ; -(void)dealloc; -(void)observeValueForKeyPath:(id)arg1 ofObject:(id)arg2 change:(id)arg3 context:(void*)arg4 ; -(void)setOrientation:(long long)arg1 ; -(void)setRegistered:(BOOL)arg1 ; -(long long)currentStyle; -(void)setTintColor:(id)arg1 ; -(void)layoutSubviews; -(BOOL)_shouldReverseLayoutDirection; -(void)touchesEnded:(id)arg1 withEvent:(id)arg2 ; -(double)defaultHeight; -(id)_backgroundView; -(BOOL)pointInside:(CGPoint)arg1 withEvent:(id)arg2 ; -(void)_setStyle:(id)arg1 ; -(BOOL)_isTransparent; -(BOOL)_shouldSeekHigherPriorityTouchTarget; -(void)_setDoubleHeightStatusString:(id)arg1 ; -(void)_adjustDoubleHeightTextVisibility; -(void)_finishedSettingStyleWithOldHeight:(double)arg1 newHeight:(double)arg2 animation:(int)arg3 ; -(void)forceUpdate:(BOOL)arg1 ; -(void)_didChangeFromIdiom:(long long)arg1 onScreen:(id)arg2 traverseHierarchy:(BOOL)arg3 ; -(BOOL)isTranslucent; -(void)_setVisualAltitude:(double)arg1 ; -(void)_setVisualAltitudeBias:(CGSize)arg1 ; -(id)statusBarWindow; -(void)setForegroundAlpha:(double)arg1 animationParameters:(id)arg2 ; -(void)setEnabledPartIdentifiers:(id)arg1 ; -(double)defaultDoubleHeight; -(void)forceUpdateDoubleHeightStatus; -(void)_performBlockWhileIgnoringForegroundViewChanges:(/*^block*/id)arg1 ; -(double)heightForOrientation:(long long)arg1 ; -(void)setPersistentAnimationsEnabled:(BOOL)arg1 ; -(void)_dateTimePreferencesUpdated; -(void)setAction:(id)arg1 forPartWithIdentifier:(id)arg2 ; -(id)actionForPartWithIdentifier:(id)arg1 ; -(CGRect)frameForPartWithIdentifier:(id)arg1 ; -(id)enabledPartIdentifiers; -(id)_doubleHeightStatusStringForStyle:(long long)arg1 ; -(void)statusBarServer:(id)arg1 didReceiveStatusBarData:(const SCD_Struct_UI115*)arg2 withActions:(int)arg3 ; -(void)statusBarServer:(id)arg1 didReceiveStyleOverrides:(int)arg2 ; -(void)statusBarServer:(id)arg1 didReceiveGlowAnimationState:(BOOL)arg2 forStyle:(long long)arg3 ; -(void)statusBarServer:(id)arg1 didReceiveDoubleHeightStatusString:(id)arg2 forStyle:(long long)arg3 ; -(BOOL)isDoubleHeight; -(id)_initWithFrame:(CGRect)arg1 showForegroundView:(BOOL)arg2 wantsServer:(BOOL)arg3 inProcessStateProvider:(id)arg4 ; -(void)_itemViewPerformButtonAction:(id)arg1 ; -(void)_itemViewShouldBeginDisablingRasterization:(id)arg1 ; -(void)_itemViewShouldEndDisablingRasterization:(id)arg1 ; -(void)_updateShouldRasterize; -(void)forceUpdateToData:(const SCD_Struct_UI115*)arg1 animated:(BOOL)arg2 ; -(id)_currentComposedData; -(CGRect)frameForOrientation:(long long)arg1 ; -(id)_currentStyleAttributes; -(id)styleRequest; -(id)_styleAttributesForRequest:(id)arg1 ; -(id)currentStyleRequestForStyle:(long long)arg1 ; -(void)_updateUIWithStyleAttributes:(id)arg1 animationParameters:(id)arg2 forced:(BOOL)arg3 ; -(id)_prepareToSetStyle:(id)arg1 animation:(int)arg2 forced:(BOOL)arg3 ; -(void)_endDisablingRasterizationForReason:(id)arg1 ; -(void)_beginDisablingRasterizationForReason:(id)arg1 ; -(void)_setStyle:(id)arg1 animation:(int)arg2 ; -(BOOL)_shouldUseInProcessProviderDoubleHeightStatusString; -(void)_updateBackgroundFrame; -(CGRect)_backgroundFrameForAttributes:(id)arg1 ; -(id)_prepareInterruptedAnimationCompositeViewIncludingForeground:(BOOL)arg1 ; -(void)_updatePersistentAnimationsEnabledForForegroundView:(id)arg1 ; -(id)_currentComposedDataForStyle:(id)arg1 ; -(CGAffineTransform)_slideTransform; -(void)_setFrameForStyle:(id)arg1 ; -(void)_swapToNewBackgroundView; -(void)_crossfadeToNewForegroundViewWithAlpha:(double)arg1 ; -(void)_swapToNewForegroundView; -(void)_crossfadeToNewBackgroundView; -(void)setSuppressesHiddenSideEffects:(BOOL)arg1 ; -(BOOL)_touchShouldProduceReturnEvent; -(void)animateUnlock; -(void)jiggleLockIcon; -(void)setForegroundColor:(id)arg1 animationParameters:(id)arg2 ; -(void)_updateUIWithStyleAttributes:(id)arg1 animationParameters:(id)arg2 ; -(void)setLegibilityStyle:(long long)arg1 animationParameters:(id)arg2 ; -(void)setStyleRequest:(id)arg1 animationParameters:(id)arg2 ; -(void)setEnabledCenterItems:(id)arg1 duration:(double)arg2 ; -(void)forceUpdateData:(BOOL)arg1 ; -(void)_noteStyleOverridesChangedLocally; -(void)forgetEitherSideHistory; -(void)setShowsOnlyCenterItems:(BOOL)arg1 ; -(void)requestStyle:(long long)arg1 animation:(int)arg2 startTime:(double)arg3 duration:(double)arg4 curve:(long long)arg5 ; -(void)_requestStyle:(long long)arg1 partStyles:(id)arg2 animationParameters:(id)arg3 forced:(BOOL)arg4 ; -(void)setTintColor:(id)arg1 withDuration:(double)arg2 ; -(void)_statusBarWillAnimateRotation; -(void)_statusBarDidAnimateRotation; -(void)_setDisablesRasterization:(BOOL)arg1 ; -(void)statusBarStateProvider:(id)arg1 didPostStatusBarData:(const SCD_Struct_UI115*)arg2 withActions:(int)arg3 ; -(void)statusBarStateProvider:(id)arg1 didChangeDoubleHeightStatusStringForStyle:(long long)arg2 ; -(void)_setOverrideHeight:(double)arg1 ; -(void)_clearOverrideHeight; -(BOOL)_rectIntersectsTimeItem:(CGRect)arg1 ; -(BOOL)disablesRasterization; -(BOOL)isTimeHidden; -(void)setTimeHidden:(BOOL)arg1 ; @end
Oxytocin Revisited: Its Role in Cardiovascular Regulation Traditionally associated with female reproduction, oxytocin (OT) was revisited recently and was revealed to have several new roles in the cardiovascular system. Functional OT receptors have been discovered in the rat and human heart, as well as in vascular beds. The cardiovascular activities of OT include: (i) lowering blood pressure; (ii) negative cardiac inotropy and chronotropy; (iii) parasympathetic neuromodulation; (iv) vasodilatation; (v) antiinflammatory; (vi) antioxidative; and (vii) metabolic effects. These outcomes are mediated, at least in part, by stimulating cardioprotective mediators, such as nitric oxide and atrial natriuretic peptide. OT and its extended form OTGlyLysArg have been shown to be abundant in the foetal mouse heart. OT has the capacity to generate cardiomyocytes from various types of stem cells, including the cardiac side population. Mesenchymal cells transfected with OTGlyLysArg, or preconditioned with OT, are resistant to apoptosis and express endothelial cell markers. OT increases glucose uptake in cultured cardiomyocytes from newborn and adult rats, in normal, hypoxic and even insulin resistance conditions. In rats with experimentallyinduced myocardial infarction, continuous in vivo OT delivery improves the cardiac healing process, as well as cardiac work, reduces inflammation and stimulates angiogenesis. Therefore, in pathological conditions, OT exerts antiinflammatory and cardioprotective properties, and improves vascular and metabolic functions. Thus, OT has potential for therapeutic use.
<reponame>smurfix/micropython-lib import sys from functools import wraps class aclosing: def __init__(self, aiter): self._aiter = aiter async def __aenter__(self): return self._aiter async def __aexit__(self, *args): await self._aiter.aclose() import inspect import collections.abc class YieldWrapper: def __init__(self, payload): self.payload = payload def _wrap(value): return YieldWrapper(value) def _is_wrapped(box): return isinstance(box, YieldWrapper) def _unwrap(box): return box.payload # This is the magic code that lets you use yield_ and yield_from_ with native # generators. # # The old version worked great on Linux and MacOS, but not on Windows, because # it depended on _PyAsyncGenValueWrapperNew. The new version segfaults # everywhere, and I'm not sure why -- probably my lack of understanding # of ctypes and refcounts. # # There are also some commented out tests that should be re-enabled if this is # fixed: # # if sys.version_info >= (3, 6): # # Use the same box type that the interpreter uses internally. This allows # # yield_ and (more importantly!) yield_from_ to work in built-in # # generators. # import ctypes # mua ha ha. # # # We used to call _PyAsyncGenValueWrapperNew to create and set up new # # wrapper objects, but that symbol isn't available on Windows: # # # # https://github.com/python-trio/async_generator/issues/5 # # # # Fortunately, the type object is available, but it means we have to do # # this the hard way. # # # We don't actually need to access this, but we need to make a ctypes # # structure so we can call addressof. # class _ctypes_PyTypeObject(ctypes.Structure): # pass # _PyAsyncGenWrappedValue_Type_ptr = ctypes.addressof( # _ctypes_PyTypeObject.in_dll( # ctypes.pythonapi, "_PyAsyncGenWrappedValue_Type")) # _PyObject_GC_New = ctypes.pythonapi._PyObject_GC_New # _PyObject_GC_New.restype = ctypes.py_object # _PyObject_GC_New.argtypes = (ctypes.c_void_p,) # # _Py_IncRef = ctypes.pythonapi.Py_IncRef # _Py_IncRef.restype = None # _Py_IncRef.argtypes = (ctypes.py_object,) # # class _ctypes_PyAsyncGenWrappedValue(ctypes.Structure): # _fields_ = [ # ('PyObject_HEAD', ctypes.c_byte * object().__sizeof__()), # ('agw_val', ctypes.py_object), # ] # def _wrap(value): # box = _PyObject_GC_New(_PyAsyncGenWrappedValue_Type_ptr) # raw = ctypes.cast(ctypes.c_void_p(id(box)), # ctypes.POINTER(_ctypes_PyAsyncGenWrappedValue)) # raw.contents.agw_val = value # _Py_IncRef(value) # return box # # def _unwrap(box): # assert _is_wrapped(box) # raw = ctypes.cast(ctypes.c_void_p(id(box)), # ctypes.POINTER(_ctypes_PyAsyncGenWrappedValue)) # value = raw.contents.agw_val # _Py_IncRef(value) # return value # # _PyAsyncGenWrappedValue_Type = type(_wrap(1)) # def _is_wrapped(box): # return isinstance(box, _PyAsyncGenWrappedValue_Type) # The magic @coroutine decorator is how you write the bottom level of # coroutine stacks -- 'async def' can only use 'await' = yield from; but # eventually we must bottom out in a @coroutine that calls plain 'yield'. #@coroutine def _yield_(value): return (yield _wrap(value)) # But we wrap the bare @coroutine version in an async def, because async def # has the magic feature that users can get warnings messages if they forget to # use 'await'. async def yield_(value=None): return await _yield_(value) async def yield_from_(delegate): # Transcribed with adaptations from: # # https://www.python.org/dev/peps/pep-0380/#formal-semantics # # This takes advantage of a sneaky trick: if an @async_generator-wrapped # function calls another async function (like yield_from_), and that # second async function calls yield_, then because of the hack we use to # implement yield_, the yield_ will actually propagate through yield_from_ # back to the @async_generator wrapper. So even though we're a regular # function, we can directly yield values out of the calling async # generator. def unpack_StopAsyncIteration(e): if e.args: return e.args[0] else: return None _i = type(delegate).__aiter__(delegate) if hasattr(_i, "__await__"): _i = await _i try: _y = await type(_i).__anext__(_i) except StopAsyncIteration as _e: _r = unpack_StopAsyncIteration(_e) else: while 1: try: _s = await yield_(_y) except GeneratorExit as _e: try: _m = _i.aclose except AttributeError: pass else: await _m() raise _e except BaseException as _e: _x = sys.exc_info() try: _m = _i.athrow except AttributeError: raise _e else: try: _y = await _m(*_x) except StopAsyncIteration as _e: _r = unpack_StopAsyncIteration(_e) break else: try: if _s is None: _y = await type(_i).__anext__(_i) else: _y = await _i.asend(_s) except StopAsyncIteration as _e: _r = unpack_StopAsyncIteration(_e) break return _r # This is the awaitable / iterator that implements asynciter.__anext__() and # friends. # # Note: we can be sloppy about the distinction between # # type(self._it).__next__(self._it) # # and # # self._it.__next__() # # because we happen to know that self._it is not a general iterator object, # but specifically a coroutine iterator object where these are equivalent. class ANextIter: def __init__(self, it, first_fn, *first_args): self._it = it self._first_fn = first_fn self._first_args = first_args def __await__(self): return self def __next__(self): if self._first_fn is not None: first_fn = self._first_fn first_args = self._first_args self._first_fn = self._first_args = None return self._invoke(first_fn, *first_args) else: return self._invoke(self._it.__next__) def send(self, value): return self._invoke(self._it.send, value) def throw(self, type, value=None, traceback=None): return self._invoke(self._it.throw, type, value, traceback) def _invoke(self, fn, *args): try: result = fn(*args) except StopIteration as e: # The underlying generator returned, so we should signal the end # of iteration. raise StopAsyncIteration(e.value) except StopAsyncIteration as e: # PEP 479 says: if a generator raises Stop(Async)Iteration, then # it should be wrapped into a RuntimeError. Python automatically # enforces this for StopIteration; for StopAsyncIteration we need # to it ourselves. raise RuntimeError( "async_generator raise StopAsyncIteration" ) from e if _is_wrapped(result): raise StopIteration(_unwrap(result)) else: return result class AsyncGenerator: def __init__(self, coroutine): self._coroutine = coroutine self._it = coroutine.__await__() self.ag_running = False self._finalizer = None self._closed = False self._hooks_inited = False # Yecchh. if sys.version_info < (3, 5, 2): async def __aiter__(self): return self else: def __aiter__(self): return self ################################################################ # Introspection attributes ################################################################ @property def ag_code(self): return self._coroutine.cr_code @property def ag_frame(self): return self._coroutine.cr_frame ################################################################ # Core functionality ################################################################ # These need to return awaitables, rather than being async functions, # to match the native behavior where the firstiter hook is called # immediately on asend()/etc, even if the coroutine that asend() # produces isn't awaited for a bit. def __anext__(self): return self._do_it(self._it.__next__) def asend(self, value): return self._do_it(self._it.send, value) def athrow(self, type, value=None, traceback=None): return self._do_it(self._it.throw, type, value, traceback) def _do_it(self, start_fn, *args): if not self._hooks_inited: self._hooks_inited = True (firstiter, self._finalizer) = get_asyncgen_hooks() if firstiter is not None: firstiter(self) async def step(): if self.ag_running: raise ValueError("async generator already executing") try: self.ag_running = True return await ANextIter(self._it, start_fn, *args) except StopAsyncIteration: raise finally: self.ag_running = False return step() ################################################################ # Cleanup ################################################################ async def aclose(self): # Make sure that even if we raise "async_generator ignored # GeneratorExit", and thus fail to exhaust the coroutine, # __del__ doesn't complain again. self._closed = True if state is CORO_CREATED: # Make sure that aclose() on an unstarted generator returns # successfully and prevents future iteration. self._it.close() return try: await self.athrow(GeneratorExit) except (GeneratorExit, StopAsyncIteration): pass else: raise RuntimeError("async_generator ignored GeneratorExit") def __del__(self): if True: if self._finalizer is not None: self._finalizer(self) else: # Mimic the behavior of native generators on GC with no finalizer: # throw in GeneratorExit, run for one turn, and complain if it didn't # finish. thrower = self.athrow(GeneratorExit) try: thrower.send(None) except (GeneratorExit, StopAsyncIteration): pass except StopIteration: raise RuntimeError("async_generator ignored GeneratorExit") else: raise RuntimeError( "async_generator {!r} awaited during finalization; install " "a finalization hook to support this, or wrap it in " "'async with aclosing(...):'" .format(self.ag_code.co_name) ) finally: thrower.close() def async_generator(coroutine_maker): @wraps(coroutine_maker) def async_generator_maker(*args, **kwargs): return AsyncGenerator(coroutine_maker(*args, **kwargs)) async_generator_maker._async_gen_function = id(async_generator_maker) return async_generator_maker def isasyncgen(obj): if hasattr(inspect, "isasyncgen"): if inspect.isasyncgen(obj): return True return isinstance(obj, AsyncGenerator) def isasyncgenfunction(obj): if hasattr(inspect, "isasyncgenfunction"): if inspect.isasyncgenfunction(obj): return True return getattr(obj, "_async_gen_function", -1) == id(obj) # Very much derived from the one in contextlib, by copy/pasting and then # asyncifying everything. (Also I dropped the obscure support for using # context managers as function decorators. It could be re-added; I just # couldn't be bothered.) # So this is a derivative work licensed under the PSF License, which requires # the following notice: # # Copyright © 2001-2017 Python Software Foundation; All Rights Reserved class _AsyncGeneratorContextManager: def __init__(self, func, args, kwds): self._func_name = func.__name__ self._agen = func(*args, **kwds).__aiter__() async def __aenter__(self): if sys.version_info < (3, 5, 2): self._agen = await self._agen try: return await self._agen.asend(None) except StopAsyncIteration: raise RuntimeError("async generator didn't yield") from None async def __aexit__(self, type, value, traceback): async with aclosing(self._agen): if type is None: try: await self._agen.asend(None) except StopAsyncIteration: return False else: raise RuntimeError("async generator didn't stop") else: # It used to be possible to have type != None, value == None: # https://bugs.python.org/issue1705170 # but AFAICT this can't happen anymore. assert value is not None try: await self._agen.athrow(type, value, traceback) raise RuntimeError( "async generator didn't stop after athrow()" ) except StopAsyncIteration as exc: # Suppress StopIteration *unless* it's the same exception # that was passed to throw(). This prevents a # StopIteration raised inside the "with" statement from # being suppressed. return (exc is not value) except RuntimeError as exc: # Don't re-raise the passed in exception. (issue27112) if exc is value: return False # Likewise, avoid suppressing if a StopIteration exception # was passed to throw() and later wrapped into a # RuntimeError (see PEP 479). if (isinstance(value, (StopIteration, StopAsyncIteration)) and exc.__cause__ is value): return False raise except: # only re-raise if it's *not* the exception that was # passed to throw(), because __exit__() must not raise an # exception unless __exit__() itself failed. But throw() # has to raise the exception to signal propagation, so # this fixes the impedance mismatch between the throw() # protocol and the __exit__() protocol. # if sys.exc_info()[1] is value: return False raise def __enter__(self): raise RuntimeError( "use 'async with {func_name}(...)', not 'with {func_name}(...)'". format(func_name=self._func_name) ) def __exit__(self): # pragma: no cover assert False, """Never called, but should be defined""" def asynccontextmanager(func): """Like @contextmanager, but async.""" if not isasyncgenfunction(func): raise TypeError( "must be an async generator (native or from async_generator; " "if using @async_generator then @acontextmanager must be on top." ) @wraps(func) def helper(*args, **kwds): return _AsyncGeneratorContextManager(func, args, kwds) # A hint for sphinxcontrib-trio: helper.__returns_acontextmanager__ = True return helper __all__ = [ "async_generator", "yield_", "yield_from_", "aclosing", "isasyncgen", "isasyncgenfunction", "asynccontextmanager", "get_asyncgen_hooks", "set_asyncgen_hooks", ]
One of the most popular topics from searchers (not SEOs) in Google help groups are about removing content in Google's search results. As you can imagine, Google is not eager to remove content in any case, so they have set up specific rules in which they will remove content. * The site owner has modified a page so that it no longer contains the information or image that concerns me. * The site owner has removed a page/image or blocked it from being indexed by using robots.txt or meta tags. * You've been unable to work with the site owner, but the information appearing in the search results is one of the following: - Your social security or government ID number - Your bank account or credit card number - Your image of my handwritten signature - Your full name or the name of your business appearing on an adult content site that's spamming Google's search results. Google has a step by step walk through on removing content from Google. You can start that process over here.
<gh_stars>0 import cv2 cap = cv2.VideoCapture(0) cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640) cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 480) print(cap.get(cv2.CAP_PROP_FPS)) while True: success, img = cap.read() img = cv2.flip(img, 1) img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) # Sobel Edges Detection sobelX = cv2.Sobel(src=img_gray, ddepth=cv2.CV_64F, dx=1, dy=0, ksize=5) sobelY = cv2.Sobel(src=img_gray, ddepth=cv2.CV_64F, dx=0, dy=1, ksize=5) sobelXY = cv2.Sobel(src=img_gray, ddepth=cv2.CV_64F, dx=1, dy=1, ksize=5) # Laplacian Edges Detection laplacian = cv2.Laplacian(img, cv2.CV_64F) # Canny Edges Detection img_blur = cv2.GaussianBlur(img_gray, (3, 3), 0) canny = cv2.Canny(image=img_blur, threshold1=100, threshold2=200) cv2.imshow("SobelX", sobelX) cv2.imshow("SobelY", sobelY) cv2.imshow("SobelXY", sobelXY) cv2.imshow("Laplacian", laplacian) cv2.imshow("Canny", canny) cv2.imshow("Original", img) if cv2.waitKey(5) & 0xFF == 27: break cap.release() cv2.destroyAllWindows()
#pragma once #include <mutex> #include "dx_helpers.h" #include "CommandAllocatorPool.h" namespace bdr { // Based off the MiniEngine Example // And <NAME>'s Example: http://www.alextardif.com/D3D11To12P1.html class CommandQueue { public: CommandQueue(D3D12_COMMAND_LIST_TYPE type); ~CommandQueue(); void init(ID3D12Device* pDevice); void shutdown(); inline bool isReady() { return m_pQueue != nullptr; } inline ID3D12CommandQueue* get() { return m_pQueue; } uint64_t incrementFence() { std::lock_guard<std::mutex> lockGuard(m_fenceMutex); m_pQueue->Signal(m_pFence, m_nextFenceValue); return m_nextFenceValue++; } bool isFenceComplete(const uint64_t fenceValue); // Note that these functions are NOT CPU blocking! void insertWait(const uint64_t fenceValue); void insertWaitOnOtherQueueFence(const CommandQueue* otherQueue, const uint64_t fenceValue); void insertWaitOnOtherQueue(const CommandQueue* otherQueue); // These functions are CPU blocking void waitForFence(const uint64_t fenceValue); inline void waitForIdle() { waitForFence(m_nextFenceValue - 1); }; uint64_t executeCommandList(ID3D12CommandList* commandList); inline ID3D12CommandAllocator* requestAllocator() { uint64_t completedValue = m_pFence->GetCompletedValue(); return m_allocatorPool.requestAllocator(completedValue); } inline void returnAllocator(const uint64_t fenceValue, ID3D12CommandAllocator* allocator) { m_allocatorPool.returnAllocator(fenceValue, allocator); } inline uint64_t getNextFenceValue() { return m_nextFenceValue; } inline uint64_t getCompletedFenceValue() const { uint64_t completedValue = m_pFence->GetCompletedValue(); return m_lastCompletedValue > completedValue ? m_lastCompletedValue : completedValue; } ID3D12CommandQueue* m_pQueue; const D3D12_COMMAND_LIST_TYPE m_type; ID3D12Fence* m_pFence; private: CommandAllocatorPool m_allocatorPool; std::mutex m_fenceMutex; std::mutex m_eventMutex; uint64_t m_nextFenceValue; uint64_t m_lastCompletedValue; HANDLE m_fenceEventHandle; }; class CommandQueueManager { public: CommandQueueManager(); void init(ID3D12Device* pDevice); void createNewCommandList( D3D12_COMMAND_LIST_TYPE type, ID3D12GraphicsCommandList** list, ID3D12CommandAllocator** allocator); void waitForIdle() { m_graphicsQueue.waitForIdle(); m_computeQueue.waitForIdle(); m_copyQueue.waitForIdle(); } CommandQueue m_graphicsQueue; CommandQueue m_computeQueue; CommandQueue m_copyQueue; private: ID3D12Device* m_pDevice; }; }
1. Related Field The present invention relates generally to data transmission wherein digital signals are multiplexed and demultiplexed. More particularly the invention relates to a data transmission system according to the preamble of claim 1 and a method according to the preamble of claim 14. The invention also relates to a computer program according to claim 24 and a computer readable medium according to claim 25. 2. Description of Related Art In telecommunication systems of Synchronous Digital Hierarchy (SDH) type, the synchronization signals may be carried by so-called E1 signals (2.048 Mbit/s). Similarly, in Synchronous Optical Networking (SONET) systems, the synchronization signals may be carried by so-called T1 signals (1.544 Mbit/s). In both cases it is the inherent clock frequency of the signals E1/T1 (i.e. 2.048 MHz and 1.544 MHz respectively) that carries the synchronization information through the systems. A telecommunication system normally uses atomic clocks to create the synchronization signals. These signals (e.g. of E1 type) are then transported through the network, and far out in the network the signals may be fed to a base station having a radio interface. In such a case, the synchronization signals will control the radio frequencies transmitted by the base station for communication with for example cell phones. Hence, even very small fluctuations of the clock frequencies, may cause substantial performance problems. To avoid this kind of problems, the wander of the synchronization signals must be lower than predefined limits, e.g. as specified in the standards ITU-T G.823 and ITU-T G.813. Typically, each mobile operator distributes a separate clock signal in its network. A so-called backhaul operator may provide network resources for two or more mobile operators. This means that in a given physical network signals originating from different clock sources may have to co-exist. Furthermore, multiple mobile operators may sometimes share a specific base station site. Such a site is a location, e.g. a tower, where a plurality of base stations may be installed. A site sharing situation may arise when one backhaul operator serves a number of mobile operators via one base station (or cell) site. Here, the technical problem for the backhaul operator is to send the different synchronization signals, together with the traffic signals, to the respective mobile operator at the cell site as efficiently as possible. Let us assume that E1 signals are used. Then, each mobile operator uses his own atomic clock to generate all the E1 signals in his system. Since these E1 signals all originate from the same source (in most cases an atomic clock), a group of E1 signals from a given mobile operator may be referred to as a particular synchronization group. For example, an STM-1 signal of 155.52 Mbps may carry up to 63 E1 signals and all these E1 signals would then belong to the same synchronization group. The atomic clock for this synchronization group controls the frequency corresponding to the bit rate, namely 155.52 MHz. The 155.52 MHz frequency will therefore be extremely exact because it originates from an exact clock. The inherent frequency of the E1 signals will be 2.048 MHz and this frequency will also be extremely exact due to that 2.048 MHz is exactly 16×155.52/1215=2.048 MHz. This means that if for example the 155.52 MHz is multiplied with 16 (e.g. with a phase locked loop, PLL) and divided by 1215, an exact clock frequency of 2.048 MHz would be generated. Alternatively, the frequency 2.048 MHz can be generated directly from 155.52 MHz by using so called fractional division. Nevertheless, it has proven to be very difficult to transmit two or more data or clock signals on a multiplex format with good synchronization quality/phase accuracy via a common medium. This is especially true if the signals have the same nominal frequencies, however where the signals show slight frequency deviations relative to one another. In particular, difficulties are here encountered in the demultiplexing process, where the wander must be sufficiently low, for instance to meet the requirements of given a telecom standard, such as ITU-T G.813. The term “wander” is defined as low frequency jitter, normally up to 10 Hz. When transferring a single signal, so-called low-factor oversampling may be employed to preserve phase information. By low-factor oversampling is here understood a factor higher than one, however typically lower than two. U.S. Pat. No. 3,819,853, U.S. Pat. No. 4,920,545 and U.S. Pat. No. 6,009,109 show different solutions of this type. Unfortunately, neither of these approaches as such can be used to tackle the above-mentioned problem. In the prior art, the problem has instead been avoided by embedding the necessary synchronization signals in the packets of Ethernet streams. Thereby, it has been possible to reconstruct an original signal on a receiver side, and thus through the use of packets, emulate a so-called circuit connection. This strategy is often referred to as “Pseudo Wire” or the high precision time synchronization protocol IEEE 1588 V.2. U.S. Pat. No. 4,873,684 shows system for multiplexing, transmitting and demultiplexing signals having different frequencies. Here, a reference sample signal is used, which is obtained by multiplying a frequency equal to, or higher than, the frequency of the maximum frequency among the transmitted signals with the number of transmitted signals. Each signal to be transmitted is sampled based on the reference sample signal before being multiplexed into the time division format. Any empty time slots are filled with a dummy signal. Consequently, the frequency requirements may become extreme, and substantial bandwidth resources risk being wasted. US 2008/0025346 describes a solution for synchronizing and multiplexing asynchronous signals. Here, so-called frame phase absorption is carried out with respect to the incoming asynchronous signals. As a result, synchronous signals are generated to which pointer values are assigned that describe the asynchronous properties. The synchronous signals are then multiplexed through processing of changing pointer values by a pointer transmission section. US 2002/0018493 reveals a digital data transmission system, wherein a plurality of data signals are embedded in a carrier signal using a time division multiplex (TDM) operation. Rate matching is here undertaken between the data signals and the carrier signal by means of stuff locations. Data to be stuffed and the management information for the reassignment are embedded in a path layer overhead of the carrier signal superframe. U.S. Pat. No. 6,888,826 discloses a solution, which enables multiple clock signals to share processing resources. Here, pointers are stored in FIFO buffers, and this in turn, renders it possible to compensate for timing differences between a system clock and the respective outgoing line clock signals, so that the clock signals can be regenerated on the receiver side. US 2005/0078683 describes a data communication system for transferring one or more payload streamed data signals and an auxiliary data signal via a common medium. The auxiliary data signal is organized as data packets, and a transmission data formatter formats these packets into a streamed data signal format. Then, the signal is multiplexed with the payload streamed data signals into a bit stream for transmission. Despite the various TDM-based approaches described above, there is no prior solution enabling the transmission of two or more data or clock signals on a multiplex format via a common medium with a sufficiently high phase accuracy to meet the wander requirements of today's most important telecom standards, if said signals are based on different synchronization sources.
Evaluation of amputees During 1978 the Queen Alia Fund registered all disabled persons in the East Bank of Jordan. This paper is intended to investigate the actual condition of a sample selected from those persons registered as amputees. The registration showed a total of 696 amputees and a sample of 100 patients was randomly selected from this group. This study explored the disability, its nature and location, physical and vocational rehabilitation received, and the functional capability of the amputee. The two most important causes which lead to the amputation were mine explosions and road accidents. 32 persons received physical rehabilitation and prosthesis was recommended for 56 persons. Only 9 patients received vocational rehabilitation. Final analysis of the data showed steady increase in the past two decades with a doubling of the number from the 1960s to the 1970s. Road accidents played a major role in this increase. It is interesting to note that males and educated persons tend to use their prosthesis more than females and illiterate persons. In conclusion vocational rehabilitation remains an important element missing from the management of amputees. More emphasis and effort should be directed toward this element in order to complete the picture of an effective rehabilitation of this group of disabled persons.
Property Rules, Liability Rules and Inalienability: One View of the Cathedral Property Rules, Liability Rules and Inalienability: One View of the Cathedral is an article in the scholarly legal literature (Harvard Law Review, Vol.85, p. 1089, April 1972), authored by Judge Guido Calabresi (of the United States Court of Appeals for the Second Circuit) and A. Douglas Melamed, currently a professor at Stanford Law School. The article is a seminal contribution to the field of law and economics, offering an ambitious attempt to treat various areas of the law through a uniform approach. It is grounded in the fact that the various interests created by the law enjoy various degrees and methods of protection. Certain interests are deemed human rights and inalienable as such. Other interests are protected by the criminal law; meaning that the state will bear the cost of initiating legal action if violations of such interests are brought to its attention; here begins the criminal law. The burden of proof required for the state to prevail in such cases is higher; thus the beginning of criminal procedure. Other interests give an injured party merely the option of petitioning for injunctive relief. There are yet other interests whose violation gives the injured party no more than the right to seek monetary damages, and only if the victim is willing to bear the costs of initiating legal action; such interests make up the essence of civil law. The burden of proof in such cases is less than in actions initiated under criminal law; thus the beginning of civil procedure. Thesis The title of the article references artist Claude Monet's series of paintings of Rouen Cathedral, modestly implying that the authors' ensuing academic analysis is but one look at a subject that can be considered from various points of view. The primary thesis of the article focuses on the notion of "entitlements," or rights, which can be protected by either property, liability, or inalienability rules. The authors' main goal, as noted in the Introduction, is to provide a conceptual framework within which the separate legal subject areas of Property and Torts can be approached from a unified perspective. The article then undertakes an analysis of the classic Law and Economics "Pollution Problem" and an analysis of various criminal sanctions, all within the new framework the authors have put forth. The Concept of Entitlements The article begins by discussing the crucial concept of "entitlements," which are defined as the rights established and protected by law, the absence of which would result in a "might makes right" world where either the strongest or shrewdest emerge victorious in any conflict. Thus, as the authors point out, the fundamental thing that law (the setting of entitlements) does is to decide which of the conflicting parties will be entitled to prevail. The authors also point out, however, that successfully enforcing these entitlements (the law) is as important as establishing them in order to avoid the "might makes right" world. From this notion, the authors consider three types of entitlements: entitlements protected by property rules, entitlements protected by liability rules, and inalienable entitlements. According to Calabresi and Melamed, an entitlement protected by a property rule is one that must be bought in a voluntary transaction in which the value of the entitlement is agreed upon by the buyer and seller. With an entitlement protected by property rule, a collective decision is made as to who is to be given the initial entitlement, but not as to the value of the entitlement itself. An entitlement protected by a liability rule, however, involves a collective decision as to the value of the entitlement without the need for a voluntary transaction. A destroyer of an initial entitlement protected by a liability rule must pay an objectively valued sum to the holder of the entitlement. The final entitlement the authors consider in the article is an entitlement protected by an inalienability rule. As the authors define it, an entitlement is inalienable to the extent that its transfer is not permitted between a willing buyer and a willing seller. Under this rule, the state intervenes to determine the initial entitlement, to forbid its sale or purchase, and to determine compensation to be paid if it is sold or destroyed. To conceptualize these three entitlements, the authors use the example of an individual homeowner whose house may be protected by a property rule where another individual wishes to purchase it, a liability rule where the government seizes the home by eminent domain, or an inalienability rule where the homeowner is drunk or incompetent. Having defined the concept of entitlements and laid out the three rules that they will focus on, the article proceeds to explore the following two questions: (1) In what circumstances should a particular entitlement be granted? And, (2) In what circumstances should a property, liability or inalienability rule be used to protect an entitlement? Rule 5: An Expansion of the Cathedral With the opportunity to use either liability or property-based rules to protect entitlements, the academic community soon concluded that the key to figuring out which rule to use turned on the transaction costs. Therefore, if there were low transaction costs, then property rules should be used. If the transaction costs were high, then liability rules should be used. A. Mitchell Polinsky, Josephine Scott Crocker Professor of Law and Economics at Stanford Law School, argued that transactions costs are not the only impediments to bargaining, there is also the problem of assessment costs. Assessment costs are the costs associated in obtaining and processing information. There is a long understanding in the academic community that when transaction costs are high a liability rule will help substitute for bargaining. Judges, however, may have a difficult time in obtaining and processing information in order to assess damages. There are two ways to assess damages. An objective method uses the market price. The market price, however, can underestimate the truth of the matter. A subjective method of calculation is the alternative. Under the subjective method, the amount of damages is the minimum price the entitlement holder would accept in a bilateral exchange. Unfortunately, the objective method does not capture the correct amount of damages, and the subjective method would be difficult for a judge to implement Rule 5, as advocated by James E. Krier, Earl Warren DeLano Professor of Law at the University of Michigan Law School, and Stewart Schwab, Professor of Law at Cornell Law School, in Property Rules and Liability Rules: the Cathedral in another Light provides for a solution for the shortfalls of Rule 4. Under Rule 5, the court would use a best-chooser principle: All other things being equal, when liability rules are used the party who is the best chooser should be confronted with the decision whether or not to force a sale upon the other party. Krier and Schwab argue that the best-chooser principle is "faithful to the approach of Calabrasi & Melamed [because] other things being equal, costs of activity 'should be put on the party or activity which can with the lowest transaction costs act … to correct an error in entitlements by inducing the party who can avoid social costs most cheaply to do so.'" The first step in implementing Rule 5 begins with picking the best chooser. Usually, the best chooser is the smallest number party. The best chooser will then have to "compare its own opportunity cost to the opportunity cost (figured by the judge) of the other party or parties." The second step is the implementation of the double-reverse twist proposed by Krier and Schwab. First, the judge estimates the other party's social costs (damages caused by the best chooser). Then, the judge enters an order requiring the best chooser to decide whether to continue causing damage to the other party without receiving any compensation or stop causing damage and receive compensation. Therefore, if the best chooser decides to stop causing harm to the other party, then the other party will have to pay the best chooser damages calculated by the judge. Should the best chooser decide to stay, then the best chooser continues to cause harm and receives no payment.
STOCHASTIC SPATIOTEMPORAL DIFFUSIVE PREDATOR-PREY SYSTEMS. In this paper, a spatiotemporal diffusive predator-prey system with Holling type-III is considered. By using a Lyapunov-like function, it is proved that the unique local solution of the system must be a a global one if the interaction intensity is small enough. A comparison theorem is used to show that the system can be extinction or stability in mean square under some additional conditions. Finally, an unique invariant measure for the system is obtained. 1. Introduction. In the ecosystem, none of the species survives alone. The relation of two species can be described by competition, predator-prey, auspiciousness and so on. Among them, the predator-prey interaction is a significant one, which was introduced by Lotka and Volterra( ) and has been developing rapidly in the last decades( ). In this paper, we consider a stochastic homogeneous spatiotemporal diffusive predator-prey system with functional response. In 2016, J.F. Wang() studied the dynamics of a deterministic homogeneous diffusive predator-prey system with Holling type-III, it takes the form: unit outer normal, we suppose the system is a closed one so that there is no flux boundary condition. But as we all know, in reality, random disturbance of various forms is everywhere. The disturbance which is described by White noise often plays an important role in the behaviour of the solution, even in the existence of the solutions. Almost all the statistical data showed that biological process has marked random fluctuation( ). But the deterministic systems always assume that the parameters of the models have nothing to do with the environmental disturbance. Hence the description and prediction of the deterministic systems are always less than satisfactory. Motivated by the issues, a great number of scholars introduced stochastic mathematical models instead of deterministic ones to exposit the population dynamics affected by environmental fluctuations in an ecosystem( ). Even so, as far as we know, studies about the predator-prey system with Holling type-III and the effect of a randomly fluctuating environment are relatively rare. We introduce the space-time stochastic perturbations by White noises into the equation directly, and obtain the following corresponding stochastic reaction-diffusion model: where W 1 (x, t), W 2 (x, t) are independent spatially dependent Wiener fields, (x, t) means the formal time derivative ∂ ∂t W (x, t) of W (x, t), and 2 1 > 0, 2 2 > 0 represent the intensities of the White fields. Suppose the initial values u 0, v 0 satisfy 0 < u 0 ≤ u, 0 < v 0 ≤ v, here u and v are both positive constants. In the corresponding deterministic system, the existence and boundedness of the non-negative global solution is shown in by a general result. Meanwhile, there are three nonnegative constant equilibrium solutions:, and (, v ) of system, where and the positive equilibrium solution (, v ) exists if and only if 0 < < 1, i.e. m > max{d, (a 2 −1)d}. gives a stability result regarding the equilibrium (, v ) and is under some conditions respectively. But for the stochastic case, because we formulate system by stochastic perturbations 1 u 1 and 2 v 2 directly, there is no positive equilibrium point as a solution. Hence, the solution of will not tend to a point. Things are quite different from the deterministic model, even the existence of the solution. So, we begin with discussing the existence of the global solution. Comparing with the condition for the existence of the positive equilibrium solution of, we find if the interaction intensity m satisfies m < min{2a 2, d − 2 2 2 } and d > 2 2 2, has a unique global positive mild solution. The difficulty here is f only satisfy local Lipschitz condition so the solution is a local one. Consequently, we apply a Lyapunov-like function and a stop time to overcome the troubles. It is generally known that permanent is the most important property of a system, it means every species in this system can survive with other species together continuously. And closely related to that is extinction. A lot of literatures talked about permanent, extinction and stability of a ecosystem, like. In Section 4, by Comparison theorem, we prove that if the strength of the White noise is large, 2 1 > 2, 2 2 > 2m, the stochastic system will not be permanent. And if where − ∆ be the principle eigenvalue of ∆, the system will be stability in mean square. Finally, we prove that the global mild solution is a Markov process. Under stronger conditions 2 of course under the conditions for the existence too, the stable system has a unique invariant measure which is a more delicate description of the L 2 exponential stability. Throughout this paper, we assume (, F, {F t } t≥0, P) is a complete probability space, {F t } t≥0 is a right continuous filtration and F 0 contains all P-null sets. Preliminary. We consider a nonlinear diffusion-reaction equation problem with White noise: where, d i > 0(i = 1,, m) and > 0. We recall some basic well known results(see, e.g. ). Let {W(x, t)} 0≤t≤T be a R−Wiener process in H = L 2 (D, R m ), satisfying We recall the eigenvalues and eigenvectors of the operator −A in H = L 2 (D, R m ). By the compact operator theory, the corresponding eigenvalues { k } and the eigen- and {e k } ⊂ H are the complete orthogonal basis functions of H. GUANQI LIU AND YUWEN WANG Define the Green function of linearized equation corresponding for the operator A is hence the equivalent integral equation of the equation is can be rewriten as here the adjoint Green operation semigroup { t } t≥0 is dW t = W(, dt). To prove there is a local solution of, we introduce the following three conditions: (A3) {g(, t)} 0≤t≤T is a H-valued process which is predictable, and Proof. The proof is similar to that of Theorem 6.5 in. Since the It formula is no longer valid for a mild solution, we should introduce a strong solution approximating system to which It's formula can be applied. An approximating system of is Lemma 2.4. Assume for any U 0 ∈ H is an given stochastic variable, E∥U 0 ∥ p < ∞(p > 2 is an integer). If f (,, ) + g(, ), (, ) in satisfy not only the local Lipschitz condition (A2) but also conditions (A1), (A3), then Proof. Since AR(l) = AlR(l, A) = l − l 2 R(l, A) are bounded operators. By Lemma 2.2, has a local mild solution, and Proposition 1.3.5 in indicates the local mild solution is also a local strong solution. The remainder of the proof is similar to that of Proposition 1.3.6 in. The difference is that we can deduce that Existence of the global positive solution. In this section, we consider the existence of the global solution for. The first step towards the existence is to obtain a positive local solution of. For a function V (U ) ∈ C 2 (R n ; R), we define a differential operator L with by Since there exists a unique local positive solution U (x, t) = (u(x, t), v(x, t)) T of for t ∈ , we need to consider the following approximation system of strong solution: From Lemma 2.4, we can see that has a unique local strong solution U n t and lim n→∞ U n t = U (x, t), a.s. uniformly for t ∈ , where U (x, t) is the unique local mild solution of. We need to proof M = ∞, then the solution is global. In fact, Lemma 4.1 remains true if the uniformly Lipschitz condition was replaced by local Lipschitz condition. Indeed, if that would not be the case, there must exist R, T > 0, such that By Comparison Theorem Lemma 4.1, we obtain For the mild solution u 1 (x, t) of, where x ∈ D, u is the semigroup generated by d 1 ∆ in C(D). Since u is a contraction semigroup and in addition, W1 t → 0 and 2 1 > 2, we have ∥u 1 (x, t)∥ C(D) → 0 as t → ∞. Therefore, The interpretation of the above result is: if the intensities 2 1, 2 2 of White noise are large, for example, 2 1 > 2, 2 2 > 2m, the prey and the predator will die out. Markov property, uniqueness of invariant measure and ergodic. In this section, we prove the Markov property for the solution of and follow the methods in and to seek for an unique invariant measure for. For a H-valued random variable X, and a probability measure P on, then by L (X) we denote the law of X: Following, let P s,t and P (s, x, t; ), t ≥ 0, u ∈ H, ∈ B(H) be the corresponding transition semigroup and transition function to U (t, s, u), here B(H) is the smallest −field containing all closed (or open) subsets of H. Thus where is the characteristic function of the set and B b (H) is the Banach space of all real bounded Borel functions, endowed with the sup norm. First, we have the Markov property of U (t, s, U 0 ), t ≥ s. Proof. By a series of simple but cumbersome calculations, for, we have Also, by Theorem 4.3, there is a positive constant C 1, such that where C 1 is a positive constant. In this theorem, U (t, s, U 0 ) will denote the unique mild solution of        dU = AU dt + f (U, t)dt + (U, t)dW, t ≥ s ∂U ∂n | ∂D = 0, the corresponding approximation system of strong solution is        dU n = AU n dt + R(n)f (U n, t)dt + R(n)(U n, t)dW, t ≥ s ∂U ∂n | ∂D = 0, U n (s) = R(n)U 0. Remark 1. If is the unique invariant measure for P s,t, then it is ergodic.
Concerned by the hateful rhetoric that has accompanied President-elect Donald Trump’s transition to the White House, a group of 1,200 historians and other scholars have put out a powerful statement urging Americans to stand guard against civil rights abuses. The statement was first created by three associate professors at Northwestern University, Oberlin College, and the University of Kansas who were alarmed about parallels between the current political climate and instances throughout history when Americans’ rights have been suspended, like during World War II. They originally planned to collect signatures from a small group of scholars and then publish a letter or an op-ed, says Shana Bernstein of Northwestern, one of the organizers, but interest spread quickly as they reached out to their networks. Historians from a range of institutions signed on, including those from Harvard, Yale, Stanford, and many other elite universities, as well as independent scholars. Among the signatories were six Pulitzer Prize winners, a MacArthur “Genius” award recipient, five Bancroft Prize winners, and at least 12 Guggenheim Fellows. “I continue to receive inquiries about signing the letter, from people both inside and outside academia,” Bernstein says, noting that they only included scholars of US history and related fields. And Now We’re Talking About Internment Camps?
Sites of the Unconscious: Hypnosis and The Emergence of the Psychoanalytic Setting. by Andreas Mayer; Translated by Christopher Barber Much of Shericks book could serve equally well in helping parents understand their own experience in relation to their childs. For more seasoned clinicians, his discussion is a helpful refresher and a good model for translating our abstract theoretical formulating into language that is accessible to families and parents. In short, Ivan Sherick has done our field an important service by writing a book about a highly complex subject that clearly demonstrates the value of a consistent, coherent theoretical perspective and the depth of meaning psychoanalysis offers to understanding a human life. DEBRA ROELKE (MORRISTOWN, NJ)
NGC 7531 Characteristics The inner region of NGC 7531 is characterised by a high surface brightness ring. The ring is a place of active star formation and a number of star clusters and HII regions have been identified in it. The star formation rate of the inner ring is estimated to be 0.41 ± 0.12 M☉ per year based on H-alpha emission. A weak bar is observed in the near infrared inside the ring, along with dust lanes. Yet, the motions at the inner ring are predominately circular. The inner ring may lie at the location of the inner Lindblad resonance. The galaxy has two patchy spiral arms that are relatively well defined but are of low surface brightness. HII regions have also been observed at the spiral arms of the galaxy, where there is also active star formation, which is more intense at the southwest region. A supermassive black hole is believed to lie in the centre of the galaxy whose mass is estimated to be 3-48 million (107.07±0.61) M☉, based on the spiral arm pitch angle. The galaxy is seen with an inclination of 66°. One supernova has been observed in NGC 7531, SN 2012dj. It was a type Ib/c supernova with a peak magnitude of 15.3. Nearby galaxies In long exposure photographic plates of the galaxy, a large low surface brightness region structure was observed 2.6 arcminutes west from the nucleus of the galaxy, that has more than half the apparent diameter of NGC 7531. It has been identified as a stellar cloud that may be a dwarf satellite of the galaxy or a shell created by the tidal disruption of NGC 7531. NGC 7531 is a member of the NGC 7582 galaxy group. Other members of the group include NGC 7552, NGC 7582, NGC 7590, and NGC 7599, which are also known as the Grus Quartet, NGC 7496, NGC 7632, and IC 5325. This group, along with the group centred around IC 1459 form the Grus cloud, a region of elevated galaxy density. The Grus cloud, along with the nearby Pavo-Indus cloud, lies between the Local Supercluster and Pavo-Indus Supercluster.
North Carolina�s soon-to-be Gov. Pat McCrory is off to a good start if his Monday interview on cable TV is an indication. Appearing on Fox News Channel�s morning show, the governor-elect offered some sensible � and encouraging � answers to questions about his plans to improve the state�s job situation. To those who followed the campaign, it comes as no surprise that McCrory is looking to the energy sector to create new jobs. The Charlotte area includes a wealth of energy jobs and McCrory has a strong knowledge of the business from his long tenure with Duke Energy. For starters, McCrory has his eye on tapping into North Carolina�s rich supply of natural gas, the same idea advanced in the Legislature last year by Rep. Kelly Hastings. New jobs would come from exploration as well as from processing and delivery to consumers, and tapping into natural gas supplies would expand an already strong North Carolina industry that employees thousands. Hastings, representing Gaston and Cleveland counties, is co-chairman of the House committee on energy independence and alternative fuel. As such, he is likely to have a higher-profile role in the governor�s jobs/energy plan. The new governor is sure to encounter opposition from rabid environmentalists with strong ties to the Obama administration and from the administration itself, but McCrory made it clear he wants to work with the president to advance the county�s independence from foreign energy. The electorate is sure to appreciate the idea of politicians in opposite parties working together for the good of the country. McCrory, in the TV interview, offered a preview of how he intends to work collaboratively with governors of neighboring states on energy and on the complex and the burdensome regulations coming down with Obamacare. Thumbs up to the Governor-elect for voicing the idea that state government can provide solutions and that leadership does not rest with the federal government alone. A coalition among states with common concerns can lead to better solutions than a one-size-fits-all federal approach. And state legislatures can be more nimble than Congress, leading to government that is more responsive to the public. These ideas, along with the Herculean task of revamping the state�s tax code to be more favorable to new and existing business and industry, demonstrate the leadership and vision the new governor will bring to Raleigh in his quest to improve the economy of North Carolina.
<reponame>AlecPapierniak/vdirsyncer<gh_stars>1-10 import os import pytest class ServerMixin: @pytest.fixture def get_storage_args(self, item_type, slow_create_collection): if item_type == "VTODO": # Fastmail has non-standard support for TODOs # See https://github.com/pimutils/vdirsyncer/issues/824 pytest.skip("Fastmail has non-standard VTODO support.") def inner(collection="test"): args = { "username": os.environ["FASTMAIL_USERNAME"], "password": os.environ["FASTMAIL_PASSWORD"], } if self.storage_class.fileext == ".ics": args["url"] = "https://caldav.fastmail.com/" elif self.storage_class.fileext == ".vcf": args["url"] = "https://carddav.fastmail.com/" else: raise RuntimeError() if collection is not None: args = slow_create_collection(self.storage_class, args, collection) return args return inner
#include <bits/stdc++.h> using namespace std; typedef long long ll; const ll oo = 1e18; ll a1, t1, p1, a2, t2, p2; ll f, T, t0; inline void chkmin(ll& a, ll b){ if(a > b) a = b; } int main(){ scanf("%lld%lld%lld", &f, &T, &t0); scanf("%lld%lld%lld", &a1, &t1, &p1); scanf("%lld%lld%lld", &a2, &t2, &p2); if(t0 * f <= T){ puts("0"); return 0; } ll ans = oo; for(ll i = 0; i <= f / a1 + 1; i++){ ll leftT = T - i * a1 * t1; ll leftf = f - i * a1; if(leftT < 0) break; if(leftf <= 0){ chkmin(ans, p1 * i); } else{ if(leftf * t0 <= leftT){ chkmin(ans, p1 * i); continue; } ll yyy = (t0 * leftf - leftT + t0 - t2 - 1) / (t0 - t2); ll i1 = (yyy + a2 - 1) / a2; if(leftf * t2 <= leftT) chkmin(ans, p1 * i + i1* p2); } } swap(a1, a2); swap(t1, t2); swap(p1, p2); for(ll i = 0; i <= f / a1 + 1; i++){ ll leftT = T - i * a1 * t1; ll leftf = f - i * a1; if(leftT < 0) break; if(leftf <= 0){ chkmin(ans, p1 * i); } else{ if(leftf * t0 <= leftT){ chkmin(ans, p1 * i); continue; } ll yyy = (t0 * leftf - leftT + t0 - t2 - 1) / (t0 - t2); ll i1 = (yyy + a2 - 1) / a2; if(leftf * t2 <= leftT) chkmin(ans, p1 * i + i1* p2); } } if(ans == oo) ans = -1; printf("%lld\n", ans); return 0; }
Phenomenological aspects of new gravitational forces. II. Static planetary potentials. Theories of quantum gravity generically predict that there are vector and scalar partners of the graviton. These bosons produce new gravitational effects which, in general, tend to cancel in matter because the vector boson produces a repulsive force, whereas a scalar boson produces an attractive force. In this paper we derive the effects which these new interactions will produce in planets. In particular, static Yukawa potential theory is elucidated for massive (nonpointlike) objects.
A Brand Theory of Trademark Law Trademark law is incoherent, and it fails to manage all the interests at stake in the modern business environment. This failure flows from a core misunderstanding. Trademark law has not grasped that is managing brands, not trademarks. In this article, Professor Deven Desai develops a new theory of trademarks which provides trademark law with a way out of its current confusion. Professor Desai argues that trademark law really protects brands and must fully embrace this fact. He demonstrates how his brand theory of trademark law will avoid the incoherence and problems from which trademark currently suffers and offers a framework to understand the purpose, function, and scope of trademark law.Published version, titled 'From Trademarks to Brands', Florida Law Review, Vol. 64, No. 4, pp. 981-1044, 2012.
Effect of micro-strain stress on in vitro proliferation and functional expression of human osteoarthritic chondrocytes Background This study aimed to analyze the in vitro effect of micro-strain stress on the proliferation and functional marker expression in chondrocytes isolated from human osteoarthritis cartilage samples. Methods Chondrocytes isolated from human osteoarthritis cartilage samples were subjected to loading with different types of micro-strain stress. The proliferation activity was assessed by flow cytometry, and the functional expression of chondrocyte markers was detected by qRT-PCR and western blot. Results Flow cytometry results showed stimulation of proliferation of human osteoarthritic chondrocytes when an adequate micro-strain stress was applied. qRT-PCR and western blot results showed that micro-strain stress promotes human osteoarthritic chondrocyte functional marker expression. These features coincide with the upregulation of multiple proteins and genes affecting cell proliferation and functional chondrocyte marker expression, including cyclin D1, collagen II, and Rock. Conclusion Adequate micro-strain stress could activate the Rho/Rock signaling pathway in osteoarthritic chondrocytes, thus transmitting mechanical signals to the cytoskeleton. This process leads to cytoskeleton reorganization, and transmission of the mechanical signals to the downstream effectors to promote proliferation and functional marker expression of osteoarthritic chondrocytes. Introduction Osteoarthritis is the most common type of joint disease, and its pathology is characterized by chondrocyte apoptosis and cartilage matrix destruction. Cartilage tissue, which covers the joint surface, is a specialized connective tissue without blood vessels. It is stimulated continuously by endogenous and exogenous mechanical stimuli. Mechanical stimulation in a specific range plays a critical role in the maintenance of the structural integrity of articular cartilage. Some in vitro studies have shown that loading cultured chondrocytes with different types of mechanical stimulation could produce varying degrees of cell biological changes. The cellular mechanism of how osteoarthritic chondrocytes sense mechanical stress stimulation signals and transfer them into the cells to regulate bone remodeling is not thoroughly understood. The cytoskeleton is a critical component involved in the maintenance of cell morphology and various cellular functions. After mechanical stimulation, the cytoskeletal structures undergo dynamic rearrangement, polymerization, and depolymerisation. The cytoskeleton plays a vital role in the maintenance of the intracellular structures and could rapidly transmit stress from the cytoskeleton to effectors in cells, thus causing various biological effects Open Access *Correspondence: [email protected]; [email protected] Bin Zhao and Jianxiong Ma have contributed equally to this work. Institute of Orthopedics, Tianjin Hospital, Tianjin, China. In studies on vascular endothelial cells, osteoblasts, and bone marrow mesenchymal stem cells, it has been shown that cytoskeletal-mediated mechanical signaling plays a crucial role in the reactivity of various cell lines to mechanical stimulation. At present, the biological effects of stress stimulation on osteoarthritic chondrocytes and the underlying role of the cytoskeleton are still unclear, and further experimental studies are needed urgently. In this study, the varying extent of micro-strain stress was applied to cultured human osteoarthritic chondrocytes and their effects on cell proliferation and functional expression of markers were evaluated. The regulatory effect of differing micro-strain stress on the cytoskeleton of osteoarthritic chondrocytes was studied by confocal fluorescence microscopy to explore the underlying role of the cytoskeleton in mediating the mechanical stress signals to regulate the physiological functions of osteoarthritic chondrocytes. The role of the Rho/Rock signaling pathway in the above process was studied through the use of the Rho/Rock signaling pathway inhibitor Y-27632. Study objects After obtaining the informed consent from patients with osteoarthritis of the knee, a small sample of cartilage tissue was removed during surgery. Tissues were collected aseptically, and sections prepared for Hematoxylin and Eosin (H&E) staining. Chondrocytes were isolated from the remaining tissues and cultured within two hours. The Ethical Committee of Tianjin Hospital approved this study. Following the Declaration of Helsinki guidelines, consents were obtained from either the patient or family member before enrollment in the study. Isolation, culture, and identification of chondrocytes The tissue was soaked in D-Hank's solution containing antibiotics for 5 min and rinsed twice with D-Hank's solution to remove the blood and fat from the cartilage tissue. The tissue was cut into 1 1 1 mm 3 sized tissue blocks and digested with 0.02% EDTA and 0.25% trypsin at 37℃ for 30 min. The trypsin-EDTA solution was removed, after which 0.2% collagen II enzyme was added, and the tissue digested overnight at 4℃. The tissue was digested almost completely after incubation at 37℃ for 2 h, and the addition of 1 ml FBS stopped the digestion. The resulting sample was filtered using a 200-mesh filter, and the filtrate transferred to a tube and centrifuged at 1500 r/min for 7 min, and the supernatant discarded. The cells were suspended in an H-DMEM medium containing 10% FBS, and cells counted. A 25 cm 2 cell culture flask was inoculated with 5 10 4 /ml of cells. After 24 h, half the volume was replaced with fresh media changes every two days. When the cells reached 80-90% confluence, the cells were trypsinized and passaged. The cells derived were identified by toluidine blue staining, Safranin O staining, and collagen II immunofluorescence staining. The experimental methods were carried out following the kit instructions. Cells at passage three were used for subsequent cell mechanical loading tests. Cell mechanical loading We performed the mechanical loading of cells following the methods described earlier by our group. The conditions used were: mechanical loading frequency, 0.25 HZ; time, 2 h/day; duration, three days. The study was divided into five groups: group A, 0% micro-strain; group B, 5% micro-strain; group C, 10% micro-strain; group D, 15% micro-strain; group E, 10% micro-strain plus Rock inhibitor Y-27632. Immunofluorescence detection Cells of each group were washed with phosphate-buffered saline (PBS) and fixed in 4% paraformaldehyde. Following fixation, the cells were washed with PBS and permeabilized with Triton X-100. After washing with PBS, the cells were blocked with 1% goat serum and incubated at room temperature with FITC labeled phalloidin. After PBS wash, the cells were stained with DAPI, and the immunofluorescent cytoskeletal proteins were observed under confocal microscopy. Cell cycle analysis The cell cycle analysis was performed as described previously. After mechanical loading, cells from each group were cultured in the incubator for another 24 h, allowing adequate recovery time. The cells were digested enzymatically, centrifuged at 2,000 r/min for 5 min, and the supernatant discarded. The cells were rinsed twice with sterile D-Hank's solution and fixed in 75% cold ethanol. The cells were centrifuged at 2,000 r/min for 5 min, and 500 l RNAase was added to the cell pellet and incubated at 37℃ for 30 min. The cells were centrifuged at 2,000 r/min for 5 min, and 500 l propidium iodide (PI) was added and incubated at room temperature in the dark for 30 min. The cell cycle analysis was carried out by flow cytometry. The data were calculated by Flow Plus software. The proliferative index, the proportion (%) of cells in the S phase were calculated based on the G0/G1 phase and G2/M phase, using the formula: Quantitative real-time (qRT) PCR We extracted total RNA from each group of cells using Trizol following the manufacturer's instructions. The Western blot analysis We extracted total proteins from each group of cells using RIPA lysis method. The proteins were quantified by BCA protein quantitation kit. An equal amount of protein from each group was separated on SDS-PAGE and transferred to nitrocellulose (NC) membranes. After blocking with 5% milk, the NC membranes were incubated with primary antibodies against -actin, cyclin D1, collagen II, and Rock, followed by appropriate HRP-conjugated secondary antibodies. The membranes were treated with ECL, and bands were visualized. Statistical analysis SPSS 18.0 software was used for analysis. Data were expressed as mean ± standard deviation (x ± s), one-way ANOVA was used for multi-group comparison, and the LSD-t method was used for multi-group comparison. P < 0.05 was considered statistically significant. Characterization and identification of cartilage and chondrocytes Gross observation: the articular cartilage lost its original luster, and its color darkened markedly. Articular cartilage surface defects of the lateral femoral condyle were noted, and subchondral bone was exposed, as shown in Fig. 1A. H&E staining showed the formation of longitudinal cracks in the cartilage, showing a fibrillation-like change and disordered cell arrangement. In the deep layer of the cartilage, cell aggregation, as shown in Fig. 1B was observed. The results, when combined with the clinicopathological data of the patient, showed moderate and severe degeneration of the articular cartilage. Toluidine blue staining results showed blue cytoplasm, confirming that cells secrete glycosaminoglycan (Fig. 1C). Safranin O staining results showed red cytoplasm, thus identifying than cells secrete proteoglycan (Fig. 1D). Collagen II immunofluorescence staining results showed green fluorescent cytoplasm suggesting secretion of collagen II (Fig. 1E). The above results confirmed that the cultured cells were chondrocytes. Micro-strain stress causes reorganization of osteoarthritic chondrocyte cytoskeleton The expression and distribution of cytoskeletal proteins in chondrocytes under micro-strain stress were evaluated by immunofluorescence, and the images captured by confocal microscopy: cells in group A showed uniform fluorescence staining, scattered and distributed in the cytoplasm without any directionality ( Fig. 2A). The cells in group B showed a fine fiber staining pattern, distributed in a specific direction (Fig. 2B). The cells in group C showed intense fluorescence staining, and the actin fibers were relatively thick, evenly distributed and arranged in bundles in parallel, and distributed in the direction of stress (Fig. 2C). The cells in group D were composed mainly of fine fibers, which were distributed in a specific direction (Fig. 2D). The above results suggested that the application of adequate micro-strain stress could lead to the reorganization of the cytoskeleton. Micro-strain stress promotes the proliferation of osteoarthritic chondrocytes Cell cycle analysis by flow cytometry was used to examine the micro-strain stress effect on the proliferation of osteoarthritic chondrocytes. PI staining shows the ability of cells to proliferate. With increased micro-strain stress, PI staining increased gradually, reaching peak levels in group C, and decreased gradually (Fig. 3A). Based on the qRT-PCR results, changes in Cyclin D1 gene expression were similar to changes in PI staining. With increasing micro-strain stress, the Cyclin D1 gene expression increased gradually, and reached a peak in group C, then progressively decreased (Fig. 3B). Western blot results showed that with an increase in micro-strain stress, Cyclin D1 protein expression increased gradually, and peaked in group C, then decreased gradually (Fig. 3C). These results suggested that adequate micro-strain stress could promote the proliferation of osteoarthritic chondrocytes. Micro-strain stress promotes the functional expression of osteoarthritic chondrocyte markers The effects of micro-strain stress on functional expression of osteoarthritic chondrocyte markers were examined by detecting the collagen II gene and protein expression in each group. qRT-PCR results showed that with increasing micro-strain stress, collagen II gene expression increased gradually, and reached a peak in group C, then steadily decreased (Fig. 4A). Western blot results showed a gradual increase in collagen II protein expression with increased micro-strain stress, which reached a peak in group C, then decreased gradually (Fig. 4B). These results showed the functional expression of osteoarthritic chondrocyte markers after the application of adequate micro-strain stress. Expected mechanism Phalloidin immunofluorescence results showed that compared to group C, the crude fibers in group E cells disappeared and were replaced by tiny fibers, which were distributed diffusely in the cytoplasm with no directionality (Fig. 2E). The cell cycle results showed a lower proliferative index value in group E compared to group C (P < 0.05, Fig. 3A). The cyclin D1 and collagen II gene and protein expression in group E were lower compared to group C as shown by qRT-PCR and western blot results (P < 0.05, Fig. 3B, C) and (P < 0.05, Fig. 4A, B), respectively. Western blot results showed a high expression of Rock in groups C and D, moderate expression in group B, and low expression in groups A and C (Fig. 4B). These results revealed the involvement of the Rho/Rock signaling pathway in the above biological changes (Fig. 4C). Discussion The present study used a novel micro-strain stress loading system designed by our research group. The osteoarthritic chondrocyte proliferation and functional marker expression were detected after subjecting cells to varying micro-strain stress during culture. The results showed that the application of adequate micro-strain stress could promote osteoarthritic chondrocyte proliferation and functional marker expression. The cyclin D1 and collagen II mRNA and protein expression increased significantly under micro-strain stress and peaked in the 10% micro-strain group. With increasing micro-strain stress, the cytoskeleton exhibited rearrangement. However, the above-described changes were suppressed when the Rock inhibitor Y-27632 was used before the application of micro-strain stress. Articular cartilage is in a complex physiological and mechanical environment in the body. The mechanical stimuli are important factors in maintaining the normal structure and function of articular cartilage. Xu et al. applied intermittent cyclic mechanical tension (0.5 Hz, 10% deformation, 4 h/d, 6d/ week) to rat endplate chondrocytes, and showed that tension stimulation promotes the proliferation of endplate chondrocytes. Thomopoulos et al. applied cyclic tensile loading tension (1 Hz, 10% deformation, 7d) to bone marrow stromal cells in a 3D in vitro model, showing that the cyclic tensile strain could promote spindle cell formation, increase collagen I and glycosaminoglycan synthesis. Therefore, we believe that an appropriate micro-strain stress could promote the proliferation and matrix anabolism of chondrocytes, and also maintain the normal structure and function of chondrocytes. However, micro-strain stress beyond the chondrocytes bearing range might inhibit cell proliferation, damage structural function and integrity of cells, and further damage the articular cartilage. Such changes weaken the ability of damaged cartilage tissue to withstand mechanical stimulation and may further aggravate the effects of mechanical stimulation leading to a vicious cycle of complete loss of cartilage tissue structure and function. The experimental results showed that proliferative index values, mRNA, and cyclin D1 and collagen II protein expression increased gradually with increasing micro-strain stress, and peaked at 10% micro-strain stress and then decreased gradually. However, after pretreatment of cells with the Rock inhibitor Y-27632, the proliferative index values, mRNA, and cyclin D1 and collagen II protein expression decreased. The above results indicate that micro-strain stress affects the proliferation and functional marker expression of osteoarthritic chondrocytes, and 10% micro-strain stress might offer the best mechanical stimulation. Rock inhibitor Y-27632 inhibits this process. After experiencing mechanical stimulation, the cells convert mechanical signals to chemical signals through specific signal transduction mechanisms resulting in changes in the biological function. In this series of signal transduction processes, the cytoskeleton plays a crucial role as the hub across the cell. Cytoskeleton, a critical component of cells, is composed of a large number of actin filaments and is the internal framework of cells. It consists of microtubules, microfilaments, and intermediate filaments, which are interlinked with protein-lipid molecules of the cytoplasmic side of the cell membrane to form the structural basis for cell movement, cell morphology, and transmembrane information transmission. It was shown earlier that cytoskeletal rearrangements occur during periodic mechanical stress and space microgravity. Phalloidin specifically binds to actin fibers in the cytoskeleton. The results of this experiment confirmed that under appropriate microstrain stress, the osteoarthritic chondrocyte cytoskeletal microfilaments changed in structure and arrangement. However, too much mechanical stimulation can suppress the above changes. Studies have shown that Rho GTPases can regulate the structure and function of the cytoskeleton in several ways under biomechanical stimulation, thus playing a critical role in biomechanical signal transduction. Rock is the downstream signaling molecule of the Rho GTP family and plays a crucial role in the Rho signaling pathway. Y-27632 has been widely used as an inhibitor of the Rho/ROCK signaling pathway. Therefore, Rock specific inhibitor Y-27632 was selected in this study. Whether the Rho/ Rock signaling pathway is involved in the regulation of osteoarthritic chondrocyte proliferation and functional expression of markers induced by micro-strain stress is still unclear. Western blot results showed that with increasing micro-strain stress, the Rock level increased gradually, reached a peak at 10% micro-strain, and decreased later. These results showed activation of the Rho/Rock signaling pathway in osteoarthritic chondrocytes by micro-strain stress, leading to cytoskeletal reorganization, and promoting the proliferation and functional expression of osteoarthritic chondrocyte markers. In summary, adequate micro-strain stress can activate the Rho/Rock signaling pathway in osteoarthritic chondrocytes, which leads to the transmission of mechanical signals to the cytoskeleton. The above processes cause the cytoskeletal reorganization and transmit the mechanical signals, leading to the promotion of proliferation and functional expression of osteoarthritic chondrocyte markers.
Heme bound amylin self-assembled monolayers on an Au electrode: an efficient bio-electrode for O2 reduction to H2O. Self-assembled monolayers of the water soluble hydrophilic part of naturally occurring amylin and its Arg11 mutant have been assembled on an Au surface, which are found to efficiently catalyze selective 4e(-)/4H(+) O2 reduction reaction (ORR) upon binding heme with a kcat of ∼10 M(-1) s(-1) under ambient conditions, where the Arg11 residue plays the key role of proton transfer in determining the rate of ORR.
// OnFailOrCancel is part of the jobs.Resumer interface. func (b *backupResumer) OnFailOrCancel(ctx context.Context, phs interface{}) error { defer b.maybeNotifyScheduledJobCompletion( ctx, jobs.StatusFailed, phs.(sql.PlanHookState).ExecCfg(), ) telemetry.Count("backup.total.failed") telemetry.CountBucketed("backup.duration-sec.failed", int64(timeutil.Since(timeutil.FromUnixMicros(b.job.Payload().StartedMicros)).Seconds())) p := phs.(sql.PlanHookState) cfg := p.ExecCfg() if err := b.clearStats(ctx, p.ExecCfg().DB); err != nil { log.Warningf(ctx, "unable to clear stats from job payload: %+v", err) } b.deleteCheckpoint(ctx, cfg, p.User()) return cfg.DB.Txn(ctx, func(ctx context.Context, txn *kv.Txn) error { return b.releaseProtectedTimestamp(ctx, txn, cfg.ProtectedTimestampProvider) }) }
<gh_stars>1-10 package loon.action.sprite; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import loon.core.LObject; import loon.core.LSystem; import loon.core.geom.RectBox; import loon.core.graphics.LColor; import loon.core.graphics.opengl.GL; import loon.core.graphics.opengl.GLEx; import loon.core.graphics.opengl.LTexture; import loon.core.timer.LTimer; /** * Copyright 2008 - 2011 * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * * @project loon * @author cping * @email:<EMAIL> * @version 0.1 */ public class WaitSprite extends LObject implements ISprite { /** * */ private static final long serialVersionUID = 1L; private final class DrawWait { private final float sx = 1.0f, sy = 1.0f; private final int ANGLE_STEP = 15; private final int ARCRADIUS = 120; private LColor color; private double r; private ArrayList<Object> list; int width, height; private int angle; private int style; private int paintX, paintY, paintWidth, paintHeight; private LColor fill; public DrawWait(int s, int width, int height) { this.style = s; this.width = width; this.height = height; this.color = new LColor(0.0f, 0.0f, 0.0f); switch (style) { case 0: int r1 = width / 8, r2 = height / 8; this.r = (r1 < r2 ? r1 : r2) / 2; this.list = new ArrayList<Object>(Arrays.asList(new Object[] { new RectBox(sx + 3 * r, sy + 0 * r, 2 * r, 2 * r), new RectBox(sx + 5 * r, sy + 1 * r, 2 * r, 2 * r), new RectBox(sx + 6 * r, sy + 3 * r, 2 * r, 2 * r), new RectBox(sx + 5 * r, sy + 5 * r, 2 * r, 2 * r), new RectBox(sx + 3 * r, sy + 6 * r, 2 * r, 2 * r), new RectBox(sx + 1 * r, sy + 5 * r, 2 * r, 2 * r), new RectBox(sx + 0 * r, sy + 3 * r, 2 * r, 2 * r), new RectBox(sx + 1 * r, sy + 1 * r, 2 * r, 2 * r) })); break; case 1: this.fill = new LColor(165, 0, 0, 255); this.paintX = (width - ARCRADIUS); this.paintY = (height - ARCRADIUS); this.paintWidth = paintX + ARCRADIUS; this.paintHeight = paintY + ARCRADIUS; break; } } public void next() { switch (style) { case 0: list.add(list.remove(0)); break; case 1: angle += ANGLE_STEP; break; } } public void draw(GLEx g, int x, int y) { LColor oldColor = g.getColor(); g.setColor(color); switch (style) { case 0: float alpha = 0.0f; int nx = x + width / 2 - (int) r * 4, ny = y + height / 2 - (int) r * 4; g.translate(nx, ny); for (Iterator<Object> it = list.iterator(); it.hasNext();) { RectBox s = (RectBox) it.next(); alpha = alpha + 0.1f; g.setAlpha(alpha); g.fillOval(s.x, s.y, s.width, s.height); } g.setAlpha(1.0F); g.translate(-nx, -ny); break; case 1: int old = g.getBlendMode(); g.setBlendMode(GL.MODE_SPEED); g.setLineWidth(10); g.translate(x, y); g.setColor(fill); g.drawOval(0, 0, width, height); int sa = angle % 360; g.fillArc(x + (width - paintWidth) / 2, y + (height - paintHeight) / 2, paintWidth, paintHeight, sa, sa + ANGLE_STEP); g.translate(-x, -y); g.resetLineWidth(); g.setBlendMode(old); break; } g.setColor(oldColor); } } private LTimer delay; private boolean visible; private DrawWait wait; private int style; private Cycle cycle; public WaitSprite(int s) { this(s, LSystem.screenRect.width, LSystem.screenRect.height); } public WaitSprite(int s, int w, int h) { this.style = s; this.wait = new DrawWait(s, w, h); this.delay = new LTimer(120); this.alpha = 1.0F; this.visible = true; if (s > 1) { int width = w / 2; int height = h / 2; cycle = newSample(s - 2, width, height); RectBox limit = cycle.getCollisionBox(); setLocation( (w - (limit.getWidth() == 0 ? 20 : limit.getWidth())) / 2, (h - (limit.getHeight() == 0 ? 20 : limit.getHeight())) / 2); } update(0); } private final static Cycle newSample(int type, float srcWidth, float srcHeight) { float width = 1; float height = 1; float offset = 0; int padding = 0; switch (type) { case 0: offset = 12; if (srcWidth < srcHeight) { width = 60; height = 60; padding = -35; } else { width = 100; height = 100; padding = -35; } break; case 1: width = 100; height = 40; if (srcWidth < srcHeight) { offset = 0; } else { offset = 8; } break; case 2: width = 30; height = 30; if (srcWidth < srcHeight) { offset = 0; } else { offset = 6; } break; case 3: width = 100; height = 100; padding = -30; break; case 4: width = 80; height = 80; offset = 14; padding = -15; break; case 5: width = 100; height = 100; if (srcWidth < srcHeight) { offset = -4; } break; case 6: width = 60; height = 60; offset = 12; if (srcWidth < srcHeight) { padding = -60; } else { padding = -80; } break; case 7: width = 60; height = 60; offset = 12; if (srcWidth < srcHeight) { padding = -80; } else { padding = -120; } break; case 8: width = 60; height = 60; offset = 12; if (srcWidth < srcHeight) { padding = -60; } else { padding = -80; } break; case 9: width = 80; height = 80; if (srcWidth < srcHeight) { offset = -2; padding = -20; } else { padding = -30; } break; } return Cycle.getSample(type, srcWidth, srcHeight, width, height, offset, padding); } public void createUI(GLEx g) { if (!visible) { return; } if (style < 2) { if (alpha > 0.1 && alpha < 1.0) { g.setAlpha(alpha); wait.draw(g, x(), y()); g.setAlpha(1.0F); } else { wait.draw(g, x(), y()); } } else { if (cycle != null) { cycle.createUI(g); } } } public int getHeight() { if (cycle != null) { return cycle.getCollisionBox().height; } else { return wait.height; } } public int getWidth() { if (cycle != null) { return cycle.getCollisionBox().width; } else { return wait.width; } } public void update(long elapsedTime) { if (!visible) { return; } if (cycle != null) { if (cycle.x() != x() || cycle.y() != y()) { cycle.setLocation(x(), y()); } cycle.update(elapsedTime); } else { if (delay.action(elapsedTime)) { wait.next(); } } } public void setAlpha(float alpha) { if (cycle != null) { cycle.setAlpha(alpha); } else { this.alpha = alpha; } } public float getAlpha() { if (cycle != null) { return cycle.getAlpha(); } else { return alpha; } } public RectBox getCollisionBox() { if (cycle != null) { return cycle.getCollisionBox(); } else { return getRect(x(), y(), getWidth(), getHeight()); } } public boolean isVisible() { return cycle != null ? cycle.isVisible() : visible; } public void setVisible(boolean visible) { if (cycle != null) { cycle.setVisible(visible); } else { this.visible = visible; } } public LTexture getBitmap() { return null; } public void dispose() { } }
Lumped element band pass filter design on 130nm CMOS using delta-star transformation This paper presents the design of a 10GHz lumped element band pass filter on standard 130nm CMOS technology. A series coupled resonator topology is selected due to its advantages over classical low-pass to band-pass filter mapping. A delta-star transformation technique is used in the network synthesis to minimise the impact of stray capacitances, and to avoid the problem of fabricating excessively small coupling capacitors.
King's Road (Florida) History When the first governor of British East Florida, Col. James Grant, arrived in the capital, St. Augustine, on August 29, 1764, almost the entire Spanish population of the town had emigrated to Cuba and elsewhere in New Spain. More than 3,700 people had left St. Augustine and its outposts when Florida was ceded to the British in 1763. Grant was immediately concerned about the poor state of the few roads in the province, knowing that new settlers from the other colonies would require passable roads for the wagons that carried their families and belongings. With inadequate funds available for constructing a road to reach those lands granted by the Crown for the establishment of plantations on the waterways near the coast, Grant raised a public subscription to finance the project. He secured five hundred guineas by July 1765 to build the road from New Smyrna to Colerain, Georgia, and a promise by the Georgia colonial assembly to build a road from Fort Barrington to the St. Marys River. In 1765, Jonathon Bryan, a wealthy Georgia planter who was also a skilled surveyor, rode on horseback from St. Augustine to the Cowford (now Jacksonville) in search of the most suitable course for that section of the proposed road. From the cattle crossing at this narrows of the St. Johns River he rode north through cypress swamps and across the south branch of the Nassau River, observing that this route would require several bridges and a causeway over the lowlands. He offered to build such a roadway with slave labor for £1,100 British Sterling, but because of the lack of funds in his allotted budget, Governor Grant was forced to decline. The northern portion of what would become the King's Road in Florida, stretching from the St. Johns to the St. Marys rivers, was finally constructed by the East Florida rice planters Charles and Jermyn Wright, brothers of Sir James Wright, the Royal Governor of Georgia. The works were begun in 1764 under the supervision of Capt. John Fairlamb and his nephew, Joshua Yallowby, and completed in 1775. Construction of the highway south of St. Augustine was under the supervision of John Moultrie, now Lieutenant Governor of East Florida, who had recently come from South Carolina with his family and slaves. Moultrie wrote Governor Grant on March 23, 1765, "You may depend on my utmost endeavors to forward the road, but believe no one will undertake till fall...". A serviceable road was not completed until late 1767, however. It was built mainly for the benefit of Dr. Andrew Turnbull, a Scottish entrepreneur who with Sir William Duncan organized a company in England to establish a large plantation south of Mosquito Inlet called New Smyrna. Fifteen hundred Greeks, Italians, and Minorcans were indentured to work for the company and settle there; those who survived the harsh conditions and treatment they endured fled to St. Augustine on the King's Road in 1777. Travelers on the King's Road crossed the narrowest part of the lower St. Johns River at the Cowford (now Jacksonville) by ferry, and continued south. During the American Revolution, American troops used this route to attack British forces. The most notable of these skirmishes was fought at Alligator Creek Bridge, near present-day Callahan on June 30, 1778. The King's Road had mostly reverted to wilderness by 1821, when the United States took possession of the former Spanish colony. The United States Army rebuilt the Kings Road between 1828 and 1831.
<reponame>stv0g/k66 #[doc = r" Value read from the register"] pub struct R { bits: u8, } #[doc = r" Value to write to the register"] pub struct W { bits: u8, } impl super::PE6 { #[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = "Possible values of the field `WUPE20`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum WUPE20R { #[doc = "External input pin disabled as wakeup input"] _00, #[doc = "External input pin enabled with rising edge detection"] _01, #[doc = "External input pin enabled with falling edge detection"] _10, #[doc = "External input pin enabled with any change detection"] _11, } impl WUPE20R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { WUPE20R::_00 => 0, WUPE20R::_01 => 1, WUPE20R::_10 => 2, WUPE20R::_11 => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> WUPE20R { match value { 0 => WUPE20R::_00, 1 => WUPE20R::_01, 2 => WUPE20R::_10, 3 => WUPE20R::_11, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `_00`"] #[inline] pub fn is_00(&self) -> bool { *self == WUPE20R::_00 } #[doc = "Checks if the value of the field is `_01`"] #[inline] pub fn is_01(&self) -> bool { *self == WUPE20R::_01 } #[doc = "Checks if the value of the field is `_10`"] #[inline] pub fn is_10(&self) -> bool { *self == WUPE20R::_10 } #[doc = "Checks if the value of the field is `_11`"] #[inline] pub fn is_11(&self) -> bool { *self == WUPE20R::_11 } } #[doc = "Possible values of the field `WUPE21`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum WUPE21R { #[doc = "External input pin disabled as wakeup input"] _00, #[doc = "External input pin enabled with rising edge detection"] _01, #[doc = "External input pin enabled with falling edge detection"] _10, #[doc = "External input pin enabled with any change detection"] _11, } impl WUPE21R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { WUPE21R::_00 => 0, WUPE21R::_01 => 1, WUPE21R::_10 => 2, WUPE21R::_11 => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> WUPE21R { match value { 0 => WUPE21R::_00, 1 => WUPE21R::_01, 2 => WUPE21R::_10, 3 => WUPE21R::_11, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `_00`"] #[inline] pub fn is_00(&self) -> bool { *self == WUPE21R::_00 } #[doc = "Checks if the value of the field is `_01`"] #[inline] pub fn is_01(&self) -> bool { *self == WUPE21R::_01 } #[doc = "Checks if the value of the field is `_10`"] #[inline] pub fn is_10(&self) -> bool { *self == WUPE21R::_10 } #[doc = "Checks if the value of the field is `_11`"] #[inline] pub fn is_11(&self) -> bool { *self == WUPE21R::_11 } } #[doc = "Possible values of the field `WUPE22`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum WUPE22R { #[doc = "External input pin disabled as wakeup input"] _00, #[doc = "External input pin enabled with rising edge detection"] _01, #[doc = "External input pin enabled with falling edge detection"] _10, #[doc = "External input pin enabled with any change detection"] _11, } impl WUPE22R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { WUPE22R::_00 => 0, WUPE22R::_01 => 1, WUPE22R::_10 => 2, WUPE22R::_11 => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> WUPE22R { match value { 0 => WUPE22R::_00, 1 => WUPE22R::_01, 2 => WUPE22R::_10, 3 => WUPE22R::_11, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `_00`"] #[inline] pub fn is_00(&self) -> bool { *self == WUPE22R::_00 } #[doc = "Checks if the value of the field is `_01`"] #[inline] pub fn is_01(&self) -> bool { *self == WUPE22R::_01 } #[doc = "Checks if the value of the field is `_10`"] #[inline] pub fn is_10(&self) -> bool { *self == WUPE22R::_10 } #[doc = "Checks if the value of the field is `_11`"] #[inline] pub fn is_11(&self) -> bool { *self == WUPE22R::_11 } } #[doc = "Possible values of the field `WUPE23`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum WUPE23R { #[doc = "External input pin disabled as wakeup input"] _00, #[doc = "External input pin enabled with rising edge detection"] _01, #[doc = "External input pin enabled with falling edge detection"] _10, #[doc = "External input pin enabled with any change detection"] _11, } impl WUPE23R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { WUPE23R::_00 => 0, WUPE23R::_01 => 1, WUPE23R::_10 => 2, WUPE23R::_11 => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> WUPE23R { match value { 0 => WUPE23R::_00, 1 => WUPE23R::_01, 2 => WUPE23R::_10, 3 => WUPE23R::_11, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `_00`"] #[inline] pub fn is_00(&self) -> bool { *self == WUPE23R::_00 } #[doc = "Checks if the value of the field is `_01`"] #[inline] pub fn is_01(&self) -> bool { *self == WUPE23R::_01 } #[doc = "Checks if the value of the field is `_10`"] #[inline] pub fn is_10(&self) -> bool { *self == WUPE23R::_10 } #[doc = "Checks if the value of the field is `_11`"] #[inline] pub fn is_11(&self) -> bool { *self == WUPE23R::_11 } } #[doc = "Values that can be written to the field `WUPE20`"] pub enum WUPE20W { #[doc = "External input pin disabled as wakeup input"] _00, #[doc = "External input pin enabled with rising edge detection"] _01, #[doc = "External input pin enabled with falling edge detection"] _10, #[doc = "External input pin enabled with any change detection"] _11, } impl WUPE20W { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { WUPE20W::_00 => 0, WUPE20W::_01 => 1, WUPE20W::_10 => 2, WUPE20W::_11 => 3, } } } #[doc = r" Proxy"] pub struct _WUPE20W<'a> { w: &'a mut W, } impl<'a> _WUPE20W<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: WUPE20W) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "External input pin disabled as wakeup input"] #[inline] pub fn _00(self) -> &'a mut W { self.variant(WUPE20W::_00) } #[doc = "External input pin enabled with rising edge detection"] #[inline] pub fn _01(self) -> &'a mut W { self.variant(WUPE20W::_01) } #[doc = "External input pin enabled with falling edge detection"] #[inline] pub fn _10(self) -> &'a mut W { self.variant(WUPE20W::_10) } #[doc = "External input pin enabled with any change detection"] #[inline] pub fn _11(self) -> &'a mut W { self.variant(WUPE20W::_11) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u8) << OFFSET); self.w.bits |= ((value & MASK) as u8) << OFFSET; self.w } } #[doc = "Values that can be written to the field `WUPE21`"] pub enum WUPE21W { #[doc = "External input pin disabled as wakeup input"] _00, #[doc = "External input pin enabled with rising edge detection"] _01, #[doc = "External input pin enabled with falling edge detection"] _10, #[doc = "External input pin enabled with any change detection"] _11, } impl WUPE21W { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { WUPE21W::_00 => 0, WUPE21W::_01 => 1, WUPE21W::_10 => 2, WUPE21W::_11 => 3, } } } #[doc = r" Proxy"] pub struct _WUPE21W<'a> { w: &'a mut W, } impl<'a> _WUPE21W<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: WUPE21W) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "External input pin disabled as wakeup input"] #[inline] pub fn _00(self) -> &'a mut W { self.variant(WUPE21W::_00) } #[doc = "External input pin enabled with rising edge detection"] #[inline] pub fn _01(self) -> &'a mut W { self.variant(WUPE21W::_01) } #[doc = "External input pin enabled with falling edge detection"] #[inline] pub fn _10(self) -> &'a mut W { self.variant(WUPE21W::_10) } #[doc = "External input pin enabled with any change detection"] #[inline] pub fn _11(self) -> &'a mut W { self.variant(WUPE21W::_11) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 2; self.w.bits &= !((MASK as u8) << OFFSET); self.w.bits |= ((value & MASK) as u8) << OFFSET; self.w } } #[doc = "Values that can be written to the field `WUPE22`"] pub enum WUPE22W { #[doc = "External input pin disabled as wakeup input"] _00, #[doc = "External input pin enabled with rising edge detection"] _01, #[doc = "External input pin enabled with falling edge detection"] _10, #[doc = "External input pin enabled with any change detection"] _11, } impl WUPE22W { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { WUPE22W::_00 => 0, WUPE22W::_01 => 1, WUPE22W::_10 => 2, WUPE22W::_11 => 3, } } } #[doc = r" Proxy"] pub struct _WUPE22W<'a> { w: &'a mut W, } impl<'a> _WUPE22W<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: WUPE22W) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "External input pin disabled as wakeup input"] #[inline] pub fn _00(self) -> &'a mut W { self.variant(WUPE22W::_00) } #[doc = "External input pin enabled with rising edge detection"] #[inline] pub fn _01(self) -> &'a mut W { self.variant(WUPE22W::_01) } #[doc = "External input pin enabled with falling edge detection"] #[inline] pub fn _10(self) -> &'a mut W { self.variant(WUPE22W::_10) } #[doc = "External input pin enabled with any change detection"] #[inline] pub fn _11(self) -> &'a mut W { self.variant(WUPE22W::_11) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 4; self.w.bits &= !((MASK as u8) << OFFSET); self.w.bits |= ((value & MASK) as u8) << OFFSET; self.w } } #[doc = "Values that can be written to the field `WUPE23`"] pub enum WUPE23W { #[doc = "External input pin disabled as wakeup input"] _00, #[doc = "External input pin enabled with rising edge detection"] _01, #[doc = "External input pin enabled with falling edge detection"] _10, #[doc = "External input pin enabled with any change detection"] _11, } impl WUPE23W { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { WUPE23W::_00 => 0, WUPE23W::_01 => 1, WUPE23W::_10 => 2, WUPE23W::_11 => 3, } } } #[doc = r" Proxy"] pub struct _WUPE23W<'a> { w: &'a mut W, } impl<'a> _WUPE23W<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: WUPE23W) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "External input pin disabled as wakeup input"] #[inline] pub fn _00(self) -> &'a mut W { self.variant(WUPE23W::_00) } #[doc = "External input pin enabled with rising edge detection"] #[inline] pub fn _01(self) -> &'a mut W { self.variant(WUPE23W::_01) } #[doc = "External input pin enabled with falling edge detection"] #[inline] pub fn _10(self) -> &'a mut W { self.variant(WUPE23W::_10) } #[doc = "External input pin enabled with any change detection"] #[inline] pub fn _11(self) -> &'a mut W { self.variant(WUPE23W::_11) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 6; self.w.bits &= !((MASK as u8) << OFFSET); self.w.bits |= ((value & MASK) as u8) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u8 { self.bits } #[doc = "Bits 0:1 - Wakeup Pin Enable For LLWU_P20"] #[inline] pub fn wupe20(&self) -> WUPE20R { WUPE20R::_from({ const MASK: u8 = 3; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u8) as u8 }) } #[doc = "Bits 2:3 - Wakeup Pin Enable For LLWU_P21"] #[inline] pub fn wupe21(&self) -> WUPE21R { WUPE21R::_from({ const MASK: u8 = 3; const OFFSET: u8 = 2; ((self.bits >> OFFSET) & MASK as u8) as u8 }) } #[doc = "Bits 4:5 - Wakeup Pin Enable For LLWU_P22"] #[inline] pub fn wupe22(&self) -> WUPE22R { WUPE22R::_from({ const MASK: u8 = 3; const OFFSET: u8 = 4; ((self.bits >> OFFSET) & MASK as u8) as u8 }) } #[doc = "Bits 6:7 - Wakeup Pin Enable For LLWU_P23"] #[inline] pub fn wupe23(&self) -> WUPE23R { WUPE23R::_from({ const MASK: u8 = 3; const OFFSET: u8 = 6; ((self.bits >> OFFSET) & MASK as u8) as u8 }) } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u8) -> &mut Self { self.bits = bits; self } #[doc = "Bits 0:1 - Wakeup Pin Enable For LLWU_P20"] #[inline] pub fn wupe20(&mut self) -> _WUPE20W { _WUPE20W { w: self } } #[doc = "Bits 2:3 - Wakeup Pin Enable For LLWU_P21"] #[inline] pub fn wupe21(&mut self) -> _WUPE21W { _WUPE21W { w: self } } #[doc = "Bits 4:5 - Wakeup Pin Enable For LLWU_P22"] #[inline] pub fn wupe22(&mut self) -> _WUPE22W { _WUPE22W { w: self } } #[doc = "Bits 6:7 - Wakeup Pin Enable For LLWU_P23"] #[inline] pub fn wupe23(&mut self) -> _WUPE23W { _WUPE23W { w: self } } }
<filename>pkgs/clean-pkg/src/genie/libs/clean/recovery/iosxr/recovery.py<gh_stars>0 '''IOSXR specific recovery functions''' # Python import re import time import logging # Unicon from unicon.eal.expect import Spawn from unicon.eal.dialogs import Dialog, Statement from unicon.plugins.iosxr.patterns import IOSXRPatterns # Genie from genie.libs.clean.recovery.iosxr.dialogs import (BreakBootDialog, RommonDialog, TftpRommonDialog) # Logger log = logging.getLogger() # Power Cycler handlers def sendbrk_handler(spawn, break_count): ''' Send break while rebooting device Args: spawn ('obj'): Spawn connection object break_count ('int'): Number of sending break times Returns: None ''' count = 1 xr_patterns = IOSXRPatterns() while count <= break_count: spawn.send("\035") spawn.expect([xr_patterns.telnet_prompt]) spawn.send("send brk\r\r") time.sleep(1) count += 1 def recovery_worker(*args, **kwargs): if kwargs.get('golden_image'): return device_recovery(*args, **kwargs) elif kwargs.get('tftp_boot'): return tftp_recovery_worker(*args, **kwargs) def device_recovery(start, device, console_activity_pattern, golden_image=None, break_count=10, timeout=600, recovery_password=None, tftp_boot=None, item=None, **kwargs): ''' A method for starting Spawns and handling the device statements during recovery Args: device ('obj'): Device object start ('obj'): Start method under device object console_activity_pattern ('str'): Pattern to send the break at golden_image ('dict'): information to load golden image on the device break_count ('int'): Number of sending break times timeout ('int'): Recovery process timeout recovery_password ('str'): Device password after recovery Returns: None ''' # Set a target for each recovery session # so it's easier to distinguish expect debug logs on the console. device.instantiate(connection_timeout=timeout) # Get device console port information last_word_in_start_match = re.match('.*\s(\S+)$', start) last_word_in_start = last_word_in_start_match.group(1) \ if last_word_in_start_match else "" # Set target target = "{}_{}".format(device.hostname, last_word_in_start) logfile = log.handlers[1].logfile if len(log.handlers) >=2 else None spawn = Spawn(spawn_command=start, settings=device.cli.settings, target=target, log=log, logfile=logfile) break_dialog = BreakBootDialog() break_dialog.add_statement(Statement(pattern=console_activity_pattern, action=sendbrk_handler, args={'break_count':break_count}, loop_continue=True, continue_timer=False), pos=0) break_dialog.dialog.process(spawn, timeout=timeout) dialog = RommonDialog() dialog.dialog.process(spawn, timeout=timeout, context={ 'boot_image': golden_image[0], 'break_count': break_count, 'password': <PASSWORD> }) spawn.close() def tftp_recovery_worker(start, device, console_activity_pattern, tftp_boot=None, break_count=10, timeout=600, recovery_username=None, recovery_password=None, golden_image=None, item=None, **kwargs): ''' A method for starting Spawns and handling the device statements during recovery Args: device ('obj'): Device object start ('obj'): Start method under device object console_activity_pattern ('str'): Pattern to send the break at tftp_boot ('dict'): Tftp boot information break_count ('int'): Number of sending break times timeout ('int'): Recovery process timeout recovery_username ('str'): Device username after recovery recovery_password ('str'): Device password after recovery Returns: None ''' log.info('Set the device in rommon and load the device with tftp boot') break_dialog = BreakBootDialog() break_dialog.add_statement(Statement(pattern=console_activity_pattern, action=sendbrk_handler, args={'break_count':break_count}, loop_continue=True, continue_timer=False), pos=0) # Set a target for each recovery session # so it's easier to distinguish expect debug logs on the console. device.instantiate(connection_timeout=timeout) # Get device console port information last_word_in_start_match = re.match('.*\s(\S+)$', start) last_word_in_start = last_word_in_start_match.group(1) \ if last_word_in_start_match else "" # Set target target = "{}_{}".format(device.hostname, last_word_in_start) if len(log.handlers) >= 2: logfile= log.handlers[1].logfile else: logfile = None spawn = Spawn(spawn_command=start, settings=device.cli.settings, target=target, log=log, logfile=logfile) tftp_rommon_dialog = TftpRommonDialog() if not recovery_username: recovery_username = device.connections[device.context]['credentials'].\ get('default', {}).get('username', {}) if not recovery_password: recovery_password = device.connections[device.context]['credentials'].\ get('default', {}).get('password', {}) tftp_rommon_dialog.hostname_statement(device.hostname) # exec_prompt, username, password tftp_rommon_dialog.dialog.process(spawn, timeout=timeout, context={'device_name': device.name, 'ip': tftp_boot['ip_address'][item], 'username': recovery_username, 'password': <PASSWORD>, 'subnet_mask': tftp_boot['subnet_mask'], 'gateway': tftp_boot['gateway'], 'image': tftp_boot['image'], 'tftp_server': tftp_boot['tftp_server'], 'hostname': device.hostname}) spawn.close() def tftp_recover_from_rommon(spawn, session, context, device_name, ip, subnet_mask, gateway, image, tftp_server): '''Load new image on the device from rommon with tftp''' log.info("Assigning boot variables in rommon...") # rommon arg name mapping mapping_list = { 'ip': 'IP_ADDRESS', 'subnet_mask': 'IP_SUBNET_MASK', 'gateway': 'DEFAULT_GATEWAY', 'tftp_server': 'TFTP_SERVER', } for item in mapping_list: log.info("\nSet '{}' to {}".format(mapping_list[item], context[item])) try: spawn.sendline("{}={}".format(mapping_list[item], context[item])) except Exception as e: log.error(str(e)) raise Exception("Unable to set {}={}".format(mapping_list[item], context[item])) # Build the boot command boot_cmd = 'boot tftp://{tftp}/{image}'.format(tftp=tftp_server, image=image[0]) # Send the boot command to the device log.info("Sending TFTP boot command...") try: spawn.sendline(boot_cmd) except Exception as e: raise Exception("Unable to boot {} error {}".format(boot_cmd, str(e)))
/** * Middle layer which translates {@link com.google.api.server.spi.config.ApiNamespace} annotations * into annotation agnostic {@link ApiNamespaceConfig}. */ public class ApiNamespaceAnnotationConfig { private final ApiNamespaceConfig config; public ApiNamespaceAnnotationConfig(ApiNamespaceConfig config) { this.config = Preconditions.checkNotNull(config, "config"); } public ApiNamespaceConfig getConfig() { return config; } public void setOwnerDomainIfNotEmpty(String ownerDomain) { if (!Preconditions.checkNotNull(ownerDomain, "ownerDomain").isEmpty()) { config.setOwnerDomain(ownerDomain); } } public void setOwnerNameIfNotEmpty(String ownerName) { if (!Preconditions.checkNotNull(ownerName, "ownerName").isEmpty()) { config.setOwnerName(ownerName); } } public void setPackagePathIfNotEmpty(String packagePath) { if (!Preconditions.checkNotNull(packagePath, "packagePath").isEmpty()) { config.setPackagePath(packagePath); } } }
<reponame>saitejar/JChessEngine import java.util.ArrayList; import java.util.Arrays; /** * */ /** * @author ranuva * */ public class AlphaBeta implements GlobalConstants{ static int V_WP[] = {0, 0, 0, 0, 0, 0, 0, 0, 50, 50, 50, 50, 50, 50, 50, 50, 10, 10, 20, 30, 30, 20, 10, 10, 5, 5, 10, 25, 25, 10, 5, 5, 0, 0, 0, 20, 20, 0, 0, 0, 5, -5,-10, 0, 0,-10, -5, 5, 5, 10, 10,-20,-20, 10, 10, 5, 0, 0, 0, 0, 0, 0, 0, 0}; static int V_BP[] = { 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 5 , 10 , 10 , -20 , -20 , 10 , 10 , 5 , 5 , -5 , -10 , 0 , 0 , -10 , -5 , 5 , 0 , 0 , 0 , 20 , 20 , 0 , 0 , 0 , 5 , 5 , 10 , 25 , 25 , 10 , 5 , 5 , 10 , 10 , 20 , 30 , 30 , 20 , 10 , 10 , 50 , 50 , 50 , 50 , 50 , 50 , 50 , 50 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0}; static int V_WN[] = {-50,-40,-30,-30,-30,-30,-40,-50, -40,-20, 0, 0, 0, 0,-20,-40, -30, 0, 10, 15, 15, 10, 0,-30, -30, 5, 15, 20, 20, 15, 5,-30, -30, 0, 15, 20, 20, 15, 0,-30, -30, 5, 10, 15, 15, 10, 5,-30, -40,-20, 0, 5, 5, 0,-20,-40, -50,-40,-30,-30,-30,-30,-40,-50}; static int V_BN[] = {-50 , -40 , -30 , -30 , -30 , -30 , -40 , -50 , -40 , -20 , 0 , 5 , 5 , 0 , -20 , -40 , -30 , 5 , 10 , 15 , 15 , 10 , 5 , -30 , -30 , 0 , 15 , 20 , 20 , 15 , 0 , -30 , -30 , 5 , 15 , 20 , 20 , 15 , 5 , -30 , -30 , 0 , 10 , 15 , 15 , 10 , 0 , -30 , -40 , -20 , 0 , 0 , 0 , 0 , -20 , -40 , -50 , -40 , -30 , -30 , -30 , -30 , -40 , -50}; static int V_WR[] = { 0, 0, 0, 0, 0, 0, 0, 0, 5, 10, 10, 10, 10, 10, 10, 5, -5, 0, 0, 0, 0, 0, 0, -5, -5, 0, 0, 0, 0, 0, 0, -5, -5, 0, 0, 0, 0, 0, 0, -5, -5, 0, 0, 0, 0, 0, 0, -5, -5, 0, 0, 0, 0, 0, 0, -5, 0, 0, 0, 5, 5, 0, 0, 0}; static int V_BR[] = { 0 , 0 , 0 , 5 , 5 , 0 , 0 , 0 , -5 , 0 , 0 , 0 , 0 , 0 , 0 , -5 , -5 , 0 , 0 , 0 , 0 , 0 , 0 , -5 , -5 , 0 , 0 , 0 , 0 , 0 , 0 , -5 , -5 , 0 , 0 , 0 , 0 , 0 , 0 , -5 , -5 , 0 , 0 , 0 , 0 , 0 , 0 , -5 , 5 , 10 , 10 , 10 , 10 , 10 , 10 , 5 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , 0 , }; static int V_WB[] = {-20,-10,-10,-10,-10,-10,-10,-20, -10, 0, 0, 0, 0, 0, 0,-10, -10, 0, 5, 10, 10, 5, 0,-10, -10, 5, 5, 10, 10, 5, 5,-10, -10, 0, 10, 10, 10, 10, 0,-10, -10, 10, 10, 10, 10, 10, 10,-10, -10, 5, 0, 0, 0, 0, 5,-10, -20,-10,-10,-10,-10,-10,-10,-20}; static int V_BB[] = {-20 , -10 , -10 , -10 , -10 , -10 , -10 , -20 , -10 , 5 , 0 , 0 , 0 , 0 , 5 , -10 , -10 , 10 , 10 , 10 , 10 , 10 , 10 , -10 , -10 , 0 , 10 , 10 , 10 , 10 , 0 , -10 , -10 , 5 , 5 , 10 , 10 , 5 , 5 , -10 , -10 , 0 , 5 , 10 , 10 , 5 , 0 , -10 , -10 , 0 , 0 , 0 , 0 , 0 , 0 , -10 , -20 , -10 , -10 , -10 , -10 , -10 , -10 , -20}; static int V_WQ[] = {-20,-10,-10, -5, -5,-10,-10,-20, -10, 0, 0, 0, 0, 0, 0,-10, -10, 0, 5, 5, 5, 5, 0,-10, -5, 0, 5, 5, 5, 5, 0, -5, 0, 0, 5, 5, 5, 5, 0, -5, -10, 5, 5, 5, 5, 5, 0,-10, -10, 0, 5, 0, 0, 0, 0,-10, -20,-10,-10, -5, -5,-10,-10,-20}; static int V_BQ[] = {-20 , -10 , -10 , -5 , -5 , -10 , -10 , -20 , -10 , 0 , 0 , 0 , 0 , 5 , 0 , -10 , -10 , 0 , 5 , 5 , 5 , 5 , 5 , -10 , -5 , 0 , 5 , 5 , 5 , 5 , 0 , 0 , -5 , 0 , 5 , 5 , 5 , 5 , 0 , -5 , -10 , 0 , 5 , 5 , 5 , 5 , 0 , -10 , -10 , 0 , 0 , 0 , 0 , 0 , 0 , -10 , -20 , -10 , -10 , -5 , -5 , -10 , -10 , -20}; static int V_WK_MIDDLE[] = {-30,-40,-40,-50,-50,-40,-40,-30, -30,-40,-40,-50,-50,-40,-40,-30, -30,-40,-40,-50,-50,-40,-40,-30, -30,-40,-40,-50,-50,-40,-40,-30, -20,-30,-30,-40,-40,-30,-30,-20, -10,-20,-20,-20,-20,-20,-20,-10, 20, 20, 0, 0, 0, 0, 20, 20, 20, 30, 10, 0, 0, 10, 30, 20}; static int V_BK_MIDDLE[] = {20 , 30 , 10 , 0 , 0 , 10 , 30 , 20 , 20 , 20 , 0 , 0 , 0 , 0 , 20 , 20 , -10 , -20 , -20 , -20 , -20 , -20 , -20 , -10 , -20 , -30 , -30 , -40 , -40 , -30 , -30 , -20 , -30 , -40 , -40 , -50 , -50 , -40 , -40 , -30 , -30 , -40 , -40 , -50 , -50 , -40 , -40 , -30 , -30 , -40 , -40 , -50 , -50 , -40 , -40 , -30 , -30 , -40 , -40 , -50 , -50 , -40 , -40 , -30}; static int V_WK_END[] = {-50,-40,-30,-20,-20,-30,-40,-50, -30,-20,-10, 0, 0,-10,-20,-30, -30,-10, 20, 30, 30, 20,-10,-30, -30,-10, 30, 40, 40, 30,-10,-30, -30,-10, 30, 40, 40, 30,-10,-30, -30,-10, 20, 30, 30, 20,-10,-30, -30,-30, 0, 0, 0, 0,-30,-30, -50,-30,-30,-30,-30,-30,-30,-50}; static int V_BK_END[] = {-50 , -30 , -30 , -30 , -30 , -30 , -30 , -50 , -30 , -30 , 0 , 0 , 0 , 0 , -30 , -30 , -30 , -10 , 20 , 30 , 30 , 20 , -10 , -30 , -30 , -10 , 30 , 40 , 40 , 30 , -10 , -30 , -30 , -10 , 30 , 40 , 40 , 30 , -10 , -30 , -30 , -10 , 20 , 30 , 30 , 20 , -10 , -30 , -30 , -20 , -10 , 0 , 0 , -10 , -20 , -30 , -50 , -40 , -30 , -20 , -20 , -30 , -40 , -50}; public static int evaluate(long wp, long wr, long wn, long wb, long wq, long wk, long bp, long br, long bn, long bb, long bq, long bk){ int eval =0; // TODO Auto-generated method stub String chessBoard[][] = new String[8][8]; for (int i = 0; i < 64; i++) { chessBoard[i / 8][i % 8] = " "; } for (int i = 0; i < 64; i++) { if (((wp >> i) & 1) == 1) { eval+= V_WP[i]; } if (((wn >> i) & 1) == 1) { eval+=V_WN[i]; } if (((wb >> i) & 1) == 1) { eval+=V_WB[i]; } if (((wr >> i) & 1) == 1) { eval+=V_WR[i]; } if (((wq >> i) & 1) == 1) { eval+=V_WQ[i]; } if (((wk >> i) & 1) == 1) { if((wr|wb|wn|wq|bb|br|bn|bq)==0 || (wq|bq)==0){ eval-=V_BK_END[i]; } else{ eval-=V_BK_MIDDLE[i]; } } if (((bp >> i) & 1) == 1) { eval-=V_BP[i]; } if (((bn >> i) & 1) == 1) { eval-=V_BN[i]; } if (((bb >> i) & 1) == 1) { eval-=V_BB[i]; } if (((br >> i) & 1) == 1) { eval-=V_BR[i]; } if (((bq >> i) & 1) == 1) { eval-=V_BQ[i]; } if (((bk >> i) & 1) == 1) { if((wr|wb|wn|wq|bb|br|bn|bq)==0 || (wq|bq)==0){ eval-=V_BK_END[i]; } else{ eval-=V_BK_MIDDLE[i]; } } } return eval; } public static int zWSearch(int beta,int depth) {//fail-hard zero window search, returns either beta-1 or beta //System.out.println("depth ="+ depth + ", bw = "+ MoveGen.bw ); int score = Integer.MIN_VALUE, move = 0; long WPt, WRt, WNt, WBt, WQt, WKt, BPt, BRt, BNt, BBt, BQt, BKt, EPt, EPct; boolean CWKt,CWQt,CBKt,CBQt,bwt; CWKt = MoveGen.CWK;CWQt = MoveGen.CWQ;CBKt = MoveGen.CBK; CBQt = MoveGen.CBQ;bwt = MoveGen.bw;WPt = MoveGen.WP; WRt = MoveGen.WR;WNt = MoveGen.WN;WBt = MoveGen.WB; WQt = MoveGen.WQ;WKt = MoveGen.WK;BPt = MoveGen.BP; BRt = MoveGen.BR;BNt = MoveGen.BN;BBt = MoveGen.BB; BQt = MoveGen.BQ;BKt = MoveGen.BK;EPt = MoveGen.EP; EPct = MoveGen.EPc; //alpha == beta - 1 //this is either a cut- or all-node if (depth == MoveGen.MaxDepth) { score = evaluate(MoveGen.WP,MoveGen.WR,MoveGen.WN,MoveGen.WB, MoveGen.WQ,MoveGen.WK,MoveGen.BP, MoveGen.BR,MoveGen.BN,MoveGen.BB, MoveGen.BQ,MoveGen.BK); //System.out.println("zero "); return score; } ArrayList<Integer> moves; if (MoveGen.bw) { moves=MoveGen.getPossibleWhiteMoves(); } else { moves=MoveGen.getPossibleBlackMoves(); } MoveGen.orderMoves(moves); for (int i=0;i<moves.size();i++) { CWKt = MoveGen.CWK;CWQt = MoveGen.CWQ;CBKt = MoveGen.CBK; CBQt = MoveGen.CBQ;bwt = MoveGen.bw;WPt = MoveGen.WP; WRt = MoveGen.WR;WNt = MoveGen.WN;WBt = MoveGen.WB; WQt = MoveGen.WQ;WKt = MoveGen.WK;BPt = MoveGen.BP; BRt = MoveGen.BR;BNt = MoveGen.BN;BBt = MoveGen.BB; BQt = MoveGen.BQ;BKt = MoveGen.BK;EPt = MoveGen.EP;EPct = MoveGen.EPc; move = moves.get(i); int sc = Integer.MIN_VALUE; MoveGen.NodeCounter++; MoveGen.makeMove(move, MoveGen.bw); if (((MoveGen.WK & MoveGen.unsafeForWhite()) == 0 && MoveGen.bw) || ((MoveGen.BK & MoveGen.unsafeForBlack()) == 0 && !MoveGen.bw)) { if ((move & 0x00f00000) == 0) {// 'regular' move int start = move & 0x000000ff; if (MoveGen.bw) { if (((1L << start) & WKt) != 0) { MoveGen.CWK = false; MoveGen.CWQ = false; } if (((1L << start) & WRt & (1L << 63)) != 0) { MoveGen.CWK = false; } if (((1L << start) & WRt & (1L << 56)) != 0) { MoveGen.CWQ = false; } } else { if (((1L << start) & BKt) != 0) { MoveGen.CBK = false; MoveGen.CBQ = false; } if (((1L << start) & BRt & (1L << 7)) != 0) { MoveGen.CBK = false; } if (((1L << start) & BRt & 1L) != 0) { MoveGen.CBQ = false; } } } sc = -zWSearch(1 - beta,depth+1); } if (sc >= beta) { return sc; } MoveGen.CWK = CWKt;MoveGen.CWQ = CWQt;MoveGen.CBK = CBKt;MoveGen.CBQ = CBQt;MoveGen.bw = bwt; MoveGen.WP = WPt;MoveGen.WR = WRt;MoveGen.WN = WNt;MoveGen.WB = WBt;MoveGen.WQ = WQt;MoveGen.WK = WKt; MoveGen.BP = BPt;MoveGen.BR = BRt;MoveGen.BN = BNt;MoveGen.BB = BBt;MoveGen.BQ = BQt;MoveGen.BK = BKt; MoveGen.EP = EPt;MoveGen.EPc = EPct; } return beta - 1; } public static ArrayList<Integer> pvSearch(int alpha,int beta,int depth) { int bestScore, move, bestMoveIndex; ArrayList<Integer> R = new ArrayList<Integer>(); ArrayList<Integer> S = new ArrayList<Integer>(); R.add(0); R.add(0); long WPt, WRt, WNt, WBt, WQt, WKt, BPt, BRt, BNt, BBt, BQt, BKt, EPt, EPct; boolean CWKt,CWQt,CBKt,CBQt,bwt; CWKt = MoveGen.CWK;CWQt = MoveGen.CWQ;CBKt = MoveGen.CBK; CBQt = MoveGen.CBQ;bwt = MoveGen.bw;WPt = MoveGen.WP; WRt = MoveGen.WR;WNt = MoveGen.WN;WBt = MoveGen.WB; WQt = MoveGen.WQ;WKt = MoveGen.WK;BPt = MoveGen.BP; BRt = MoveGen.BR;BNt = MoveGen.BN;BBt = MoveGen.BB; BQt = MoveGen.BQ;BKt = MoveGen.BK;EPt = MoveGen.EP;EPct = MoveGen.EPc; if (depth == MoveGen.MaxDepth) { bestScore = evaluate(MoveGen.WP,MoveGen.WR,MoveGen.WN,MoveGen.WB, MoveGen.WQ,MoveGen.WK,MoveGen.BP, MoveGen.BR,MoveGen.BN,MoveGen.BB, MoveGen.BQ,MoveGen.BK); R.set(0, bestScore); R.set(1, null); return R; } ArrayList<Integer> moves = new ArrayList<Integer>(); if (MoveGen.bw) { moves=MoveGen.getPossibleWhiteMoves(); } else { moves=MoveGen.getPossibleWhiteMoves(); } MoveGen.orderMoves(moves); int topMove = MoveGen.getFirstLegalMove(moves); if (topMove == -1) { R.set(1, null); if(MoveGen.bw==true) R.set(0, CHECK_MATE); //else //R.set(0, -CHECK_MATE); return R; } move = moves.get(topMove); MoveGen.makeMove(move, MoveGen.bw); if ((move & 0x00f00000) == 0) { int start = move & 0x000000ff; if (MoveGen.bw) { if (((1L << start) & WKt) != 0) { MoveGen.CWK = false; MoveGen.CWQ = false; } if (((1L << start) & WRt & (1L << 63)) != 0) { MoveGen.CWK = false; } if (((1L << start) & WRt & (1L << 56)) != 0) { MoveGen.CWQ = false; } } else { if (((1L << start) & BKt) != 0) { MoveGen.CBK = false; MoveGen.CBQ = false; } if (((1L << start) & BRt & (1L << 7)) != 0) { MoveGen.CBK = false; } if (((1L << start) & BRt & 1L) != 0) { MoveGen.CBQ = false; } } } MoveGen.bw = !MoveGen.bw; R = pvSearch(-beta,-alpha,depth+1); bestScore = -R.get(0); R.set(0, bestScore); MoveGen.NodeCounter++; if (Math.abs(bestScore) == CHECK_MATE) { R.set(1, move); R.set(0, bestScore); return R; } if (bestScore > alpha) { bestMoveIndex = topMove; if (bestScore >= beta) { R.set(1, bestMoveIndex); R.set(0, bestScore); return R; } R.set(1, bestMoveIndex); R.set(0, bestScore); alpha = bestScore; } bestMoveIndex = topMove; MoveGen.CWK = CWKt;MoveGen.CWQ = CWQt;MoveGen.CBK = CBKt;MoveGen.CBQ = CBQt;MoveGen.bw = bwt; MoveGen.WP = WPt;MoveGen.WR = WRt;MoveGen.WN = WNt;MoveGen.WB = WBt;MoveGen.WQ = WQt;MoveGen.WK = WKt; MoveGen.BP = BPt;MoveGen.BR = BRt;MoveGen.BN = BNt;MoveGen.BB = BBt;MoveGen.BQ = BQt;MoveGen.BK = BKt; MoveGen.EP = EPt;MoveGen.EPc = EPct; for (int i=topMove+1;i<moves.size();i++) { CWKt = MoveGen.CWK;CWQt = MoveGen.CWQ;CBKt = MoveGen.CBK; CBQt = MoveGen.CBQ;bwt = MoveGen.bw;WPt = MoveGen.WP; WRt = MoveGen.WR;WNt = MoveGen.WN;WBt = MoveGen.WB; WQt = MoveGen.WQ;WKt = MoveGen.WK;BPt = MoveGen.BP; BRt = MoveGen.BR;BNt = MoveGen.BN;BBt = MoveGen.BB; BQt = MoveGen.BQ;BKt = MoveGen.BK;EPt = MoveGen.EP;EPct = MoveGen.EPc; move = moves.get(i); int score = Integer.MIN_VALUE; MoveGen.NodeCounter++; MoveGen.makeMove(move, MoveGen.bw); if (((MoveGen.WK & MoveGen.unsafeForWhite()) == 0 && MoveGen.bw) || ((MoveGen.BK & MoveGen.unsafeForBlack()) == 0 && !MoveGen.bw)) { if ((move & 0x00f00000) == 0) {// 'regular' move int start = move & 0x000000ff; if (MoveGen.bw) { if (((1L << start) & WKt) != 0) { MoveGen.CWK = false; MoveGen.CWQ = false; } if (((1L << start) & WRt & (1L << 63)) != 0) { MoveGen.CWK = false; } if (((1L << start) & WRt & (1L << 56)) != 0) { MoveGen.CWQ = false; } } else { if (((1L << start) & BKt) != 0) { MoveGen.CBK = false; MoveGen.CBQ = false; } if (((1L << start) & BRt & (1L << 7)) != 0) { MoveGen.CBK = false; } if (((1L << start) & BRt & 1L) != 0) { MoveGen.CBQ = false; } } } MoveGen.bw = !MoveGen.bw; score = -zWSearch(-alpha,depth+1); if ((score > alpha) && (score < beta)) { S = pvSearch(-beta,-alpha,depth+1); score = -S.get(0); MoveGen.NodeCounter++; if (score>alpha) { R.set(0, alpha); R.set(1, i); bestMoveIndex = i; alpha = score; } } if ((score != Integer.MIN_VALUE) && (score > bestScore)) { if (score >= beta) { R.set(0, score); R.set(1, i); return R; } bestScore = score; R.set(0, score); R.set(1, i); if (Math.abs(bestScore) == CHECK_MATE) { R.set(0, score); R.set(1, i); return R; } } } MoveGen.CWK = CWKt;MoveGen.CWQ = CWQt;MoveGen.CBK = CBKt;MoveGen.CBQ = CBQt;MoveGen.bw = bwt; MoveGen.WP = WPt;MoveGen.WR = WRt;MoveGen.WN = WNt;MoveGen.WB = WBt;MoveGen.WQ = WQt;MoveGen.WK = WKt; MoveGen.BP = BPt;MoveGen.BR = BRt;MoveGen.BN = BNt;MoveGen.BB = BBt;MoveGen.BQ = BQt;MoveGen.BK = BKt; MoveGen.EP = EPt;MoveGen.EPc = EPct; } return R; } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package stallone.cluster; import static stallone.api.API.*; import stallone.api.datasequence.IDataSequence; import stallone.api.doubles.IDoubleArray; import stallone.api.ints.IIntArray; /** * * @author noe */ public class FixedClustering extends AbstractRegularClustering { IIntArray data2cluster; public FixedClustering(IDataSequence _clusterCenters) { this.clusterCenters = _clusterCenters; } @Override public void perform() { // assign data this.voronoiPartitioning = discNew.voronoiDiscretization(clusterCenters); this.data2cluster = intsNew.array(datasize); int k=0; for (IDoubleArray y : data) this.data2cluster.set(k++, this.voronoiPartitioning.assign(y)); for (int i=0; i<this.clusterCenters.size(); i++) System.out.print(ints.count(data2cluster, i)+", "); // done this.resultsAvailable = true; } @Override public IIntArray getClusterIndexes() { throw new UnsupportedOperationException("Not supported yet."); } }
1. Field The present disclosure relates generally to a system and method for applying seasoning to a food item, and more particularly, to a system and method for applying seasoning to multiple surfaces of a food item and adjusting system parameters to achieve a desired distribution of seasoning. 2. Description of the Related Art Various technologies exist for applying seasoning or flavoring to a food item. FIGS. 1-4 illustrate example pieces of equipment for use in applying seasoning. For example, seasoning powder can be delivered from a seasoning hopper 110 or other storage container and distributed over a food item in a coating drum 112 or blender or on a conveyor belt. In some systems, a scarf plate 120 is used to help improve product coverage and distribute seasoning more evenly. As shown in FIG. 3, an example scarf plate 120 can include an inclined tray 122 having side walls 124, a bias-cut downstream edge 126, and a weir plate 128. A scarf plate 120 can be coupled to a seasoning hopper 110, for example, as shown in FIG. 4. In use, the hopper 100 deposits seasoning onto an upstream portion of the tray 122, and the scarf plate 120 is vibrated to help move the seasoning down the tray. The weir plate 128 can help control the flow of seasoning down the tray 122. The seasoning falls off the tray 122 at the bias-cut edge 126, creating a curtain of seasoning that falls onto the food items as they tumble in a drum 112 or move along a conveyor.
Design and Implementation of a Distributed Conformance Test System based on XML Test system plays more and more important part during testing process. In this paper, the architecture of a distributed conformance test system is designed. The main function of each part is introduced. The core part of this test system, called IMU-TNGIP, is designed and implemented from editor to executor in many details. It includes at least three parts, expression of test suite, execution of test suite and log.XML is chose to express the test suites including test cases and test steps. For demonstration, Neighbor Discover Protocol is used to show the structure of test suite and log window. Compared with the other work, the characteristics of IMU-TNGIP are introduced and the research work in the future is presented.
Viable-transformative inclusion: diverse means of agency by an adolescent with Specific Intellectual Educational Needs (SIEN) and his educators ABSTRACT Analyzing data from a project carried out during the Covid-19 pandemics, this paper is motivated by the viable transformation that the history of a student with specific intellectual educational needs (SIEN) - and the understanding that the educators working with him have of this history - allow for a prospective view of teaching-learning, that focuses on possibilities rather than the determinations, which would be the emphasis of a more linear, banking view of teaching-learning and development. It is based on multiple theoretical backgrounds, i.e., discussions about specific intellectual educational needs (Fidalgo & Carvalho, 2020; Carvalho et al., in print; Fidalgo & Magalhes, 2017), critical-collaborative research methodology (Magalhes, 2011, 2018; Magalhes & Fidalgo, 2019), multiliteracies (Magalhes & Carrijo, 2019), transformative agency (Ninin & Magalhes, 2017) - all of which are organized within the Cultural-Historical Theory (Vygotsky, 1924-1934/1993).
////////////////////////////////////////////////////////// // GENERATED BY FLUTTIFY. DO NOT EDIT IT. ////////////////////////////////////////////////////////// package com.fluttify.xftts_fluttify.sub_handler; import android.os.Bundle; import android.util.Log; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import androidx.annotation.NonNull; import io.flutter.embedding.engine.plugins.FlutterPlugin; import io.flutter.plugin.common.BinaryMessenger; import io.flutter.plugin.common.MethodCall; import io.flutter.plugin.common.MethodChannel; import io.flutter.plugin.common.PluginRegistry.Registrar; import io.flutter.plugin.platform.PlatformViewRegistry; import com.fluttify.xftts_fluttify.XfttsFluttifyPlugin.Handler; import static me.yohom.foundation_fluttify.FoundationFluttifyPluginKt.getEnableLog; import static me.yohom.foundation_fluttify.FoundationFluttifyPluginKt.getHEAP; @SuppressWarnings("ALL") public class SubHandler6 { public static Map<String, Handler> getSubHandler(BinaryMessenger messenger) { return new HashMap<String, Handler>() {{ // factory put("ObjectFactory::create_batchcom_iflytek_cloud_VerifierResult__String", (argsBatch, methodResult) -> { List<Integer> __resultList__ = new ArrayList<>(); int __length__ = 0; // when batch size is 0, dart side will put a map with key 'length' to indicate the length // of this batch if (argsBatch instanceof Map) { __length__ = (Integer) ((Map<String, Object>) argsBatch).get("length"); } // or directly put the arg batch else if (argsBatch instanceof List) { __length__ = ((List<Map<String, Object>>) argsBatch).size(); } for (int __i__ = 0; __i__ < __length__; __i__++) { Map<String, Object> args = new HashMap<>(); // only when arg batch is not empty, java side needs to parse args; if (argsBatch instanceof List) { args = ((List<Map<String, Object>>) argsBatch).get(__i__); } // args // jsonable arg String var1 = (String) ((Map<String, Object>) args).get("var1"); // create target object com.iflytek.cloud.VerifierResult __obj__ = new com.iflytek.cloud.VerifierResult(var1); getHEAP().put(System.identityHashCode(__obj__), __obj__); // print current HEAP if (getEnableLog()) { Log.d("ObjectFactory", "HEAP: " + getHEAP()); } __resultList__.add(System.identityHashCode(__obj__)); } methodResult.success(__resultList__); }); // factory put("ObjectFactory::create_batchcom_iflytek_cloud_DataUploader__android_content_Context", (argsBatch, methodResult) -> { List<Integer> __resultList__ = new ArrayList<>(); int __length__ = 0; // when batch size is 0, dart side will put a map with key 'length' to indicate the length // of this batch if (argsBatch instanceof Map) { __length__ = (Integer) ((Map<String, Object>) argsBatch).get("length"); } // or directly put the arg batch else if (argsBatch instanceof List) { __length__ = ((List<Map<String, Object>>) argsBatch).size(); } for (int __i__ = 0; __i__ < __length__; __i__++) { Map<String, Object> args = new HashMap<>(); // only when arg batch is not empty, java side needs to parse args; if (argsBatch instanceof List) { args = ((List<Map<String, Object>>) argsBatch).get(__i__); } // args // ref arg android.content.Context var1 = (android.content.Context) getHEAP().get((int) ((Map<String, Object>) args).get("var1")); // create target object com.iflytek.cloud.DataUploader __obj__ = new com.iflytek.cloud.DataUploader(var1); getHEAP().put(System.identityHashCode(__obj__), __obj__); // print current HEAP if (getEnableLog()) { Log.d("ObjectFactory", "HEAP: " + getHEAP()); } __resultList__.add(System.identityHashCode(__obj__)); } methodResult.success(__resultList__); }); // factory put("ObjectFactory::create_batchcom_iflytek_cloud_RecognizerResult__String", (argsBatch, methodResult) -> { List<Integer> __resultList__ = new ArrayList<>(); int __length__ = 0; // when batch size is 0, dart side will put a map with key 'length' to indicate the length // of this batch if (argsBatch instanceof Map) { __length__ = (Integer) ((Map<String, Object>) argsBatch).get("length"); } // or directly put the arg batch else if (argsBatch instanceof List) { __length__ = ((List<Map<String, Object>>) argsBatch).size(); } for (int __i__ = 0; __i__ < __length__; __i__++) { Map<String, Object> args = new HashMap<>(); // only when arg batch is not empty, java side needs to parse args; if (argsBatch instanceof List) { args = ((List<Map<String, Object>>) argsBatch).get(__i__); } // args // jsonable arg String var1 = (String) ((Map<String, Object>) args).get("var1"); // create target object com.iflytek.cloud.RecognizerResult __obj__ = new com.iflytek.cloud.RecognizerResult(var1); getHEAP().put(System.identityHashCode(__obj__), __obj__); // print current HEAP if (getEnableLog()) { Log.d("ObjectFactory", "HEAP: " + getHEAP()); } __resultList__.add(System.identityHashCode(__obj__)); } methodResult.success(__resultList__); }); // factory put("ObjectFactory::create_batchcom_iflytek_cloud_TranscripterResult__String", (argsBatch, methodResult) -> { List<Integer> __resultList__ = new ArrayList<>(); int __length__ = 0; // when batch size is 0, dart side will put a map with key 'length' to indicate the length // of this batch if (argsBatch instanceof Map) { __length__ = (Integer) ((Map<String, Object>) argsBatch).get("length"); } // or directly put the arg batch else if (argsBatch instanceof List) { __length__ = ((List<Map<String, Object>>) argsBatch).size(); } for (int __i__ = 0; __i__ < __length__; __i__++) { Map<String, Object> args = new HashMap<>(); // only when arg batch is not empty, java side needs to parse args; if (argsBatch instanceof List) { args = ((List<Map<String, Object>>) argsBatch).get(__i__); } // args // jsonable arg String var1 = (String) ((Map<String, Object>) args).get("var1"); // create target object com.iflytek.cloud.TranscripterResult __obj__ = new com.iflytek.cloud.TranscripterResult(var1); getHEAP().put(System.identityHashCode(__obj__), __obj__); // print current HEAP if (getEnableLog()) { Log.d("ObjectFactory", "HEAP: " + getHEAP()); } __resultList__.add(System.identityHashCode(__obj__)); } methodResult.success(__resultList__); }); // factory put("ObjectFactory::create_batchcom_iflytek_cloud_Version__", (argsBatch, methodResult) -> { List<Integer> __resultList__ = new ArrayList<>(); int __length__ = 0; // when batch size is 0, dart side will put a map with key 'length' to indicate the length // of this batch if (argsBatch instanceof Map) { __length__ = (Integer) ((Map<String, Object>) argsBatch).get("length"); } // or directly put the arg batch else if (argsBatch instanceof List) { __length__ = ((List<Map<String, Object>>) argsBatch).size(); } for (int __i__ = 0; __i__ < __length__; __i__++) { Map<String, Object> args = new HashMap<>(); // only when arg batch is not empty, java side needs to parse args; if (argsBatch instanceof List) { args = ((List<Map<String, Object>>) argsBatch).get(__i__); } // args // create target object com.iflytek.cloud.Version __obj__ = new com.iflytek.cloud.Version(); getHEAP().put(System.identityHashCode(__obj__), __obj__); // print current HEAP if (getEnableLog()) { Log.d("ObjectFactory", "HEAP: " + getHEAP()); } __resultList__.add(System.identityHashCode(__obj__)); } methodResult.success(__resultList__); }); // factory put("ObjectFactory::create_batchcom_iflytek_cloud_UnderstanderResult__String", (argsBatch, methodResult) -> { List<Integer> __resultList__ = new ArrayList<>(); int __length__ = 0; // when batch size is 0, dart side will put a map with key 'length' to indicate the length // of this batch if (argsBatch instanceof Map) { __length__ = (Integer) ((Map<String, Object>) argsBatch).get("length"); } // or directly put the arg batch else if (argsBatch instanceof List) { __length__ = ((List<Map<String, Object>>) argsBatch).size(); } for (int __i__ = 0; __i__ < __length__; __i__++) { Map<String, Object> args = new HashMap<>(); // only when arg batch is not empty, java side needs to parse args; if (argsBatch instanceof List) { args = ((List<Map<String, Object>>) argsBatch).get(__i__); } // args // jsonable arg String var1 = (String) ((Map<String, Object>) args).get("var1"); // create target object com.iflytek.cloud.UnderstanderResult __obj__ = new com.iflytek.cloud.UnderstanderResult(var1); getHEAP().put(System.identityHashCode(__obj__), __obj__); // print current HEAP if (getEnableLog()) { Log.d("ObjectFactory", "HEAP: " + getHEAP()); } __resultList__.add(System.identityHashCode(__obj__)); } methodResult.success(__resultList__); }); // factory put("ObjectFactory::create_batchcom_iflytek_msc_VAD_VadData__", (argsBatch, methodResult) -> { List<Integer> __resultList__ = new ArrayList<>(); int __length__ = 0; // when batch size is 0, dart side will put a map with key 'length' to indicate the length // of this batch if (argsBatch instanceof Map) { __length__ = (Integer) ((Map<String, Object>) argsBatch).get("length"); } // or directly put the arg batch else if (argsBatch instanceof List) { __length__ = ((List<Map<String, Object>>) argsBatch).size(); } for (int __i__ = 0; __i__ < __length__; __i__++) { Map<String, Object> args = new HashMap<>(); // only when arg batch is not empty, java side needs to parse args; if (argsBatch instanceof List) { args = ((List<Map<String, Object>>) argsBatch).get(__i__); } // args // create target object com.iflytek.msc.VAD.VadData __obj__ = new com.iflytek.msc.VAD.VadData(); getHEAP().put(System.identityHashCode(__obj__), __obj__); // print current HEAP if (getEnableLog()) { Log.d("ObjectFactory", "HEAP: " + getHEAP()); } __resultList__.add(System.identityHashCode(__obj__)); } methodResult.success(__resultList__); }); // factory put("ObjectFactory::create_batchcom_iflytek_msc_MSCSessionInfo__", (argsBatch, methodResult) -> { List<Integer> __resultList__ = new ArrayList<>(); int __length__ = 0; // when batch size is 0, dart side will put a map with key 'length' to indicate the length // of this batch if (argsBatch instanceof Map) { __length__ = (Integer) ((Map<String, Object>) argsBatch).get("length"); } // or directly put the arg batch else if (argsBatch instanceof List) { __length__ = ((List<Map<String, Object>>) argsBatch).size(); } for (int __i__ = 0; __i__ < __length__; __i__++) { Map<String, Object> args = new HashMap<>(); // only when arg batch is not empty, java side needs to parse args; if (argsBatch instanceof List) { args = ((List<Map<String, Object>>) argsBatch).get(__i__); } // args // create target object com.iflytek.msc.MSCSessionInfo __obj__ = new com.iflytek.msc.MSCSessionInfo(); getHEAP().put(System.identityHashCode(__obj__), __obj__); // print current HEAP if (getEnableLog()) { Log.d("ObjectFactory", "HEAP: " + getHEAP()); } __resultList__.add(System.identityHashCode(__obj__)); } methodResult.success(__resultList__); }); // factory put("ObjectFactory::create_batchcom_iflytek_msc_MetaVAD__", (argsBatch, methodResult) -> { List<Integer> __resultList__ = new ArrayList<>(); int __length__ = 0; // when batch size is 0, dart side will put a map with key 'length' to indicate the length // of this batch if (argsBatch instanceof Map) { __length__ = (Integer) ((Map<String, Object>) argsBatch).get("length"); } // or directly put the arg batch else if (argsBatch instanceof List) { __length__ = ((List<Map<String, Object>>) argsBatch).size(); } for (int __i__ = 0; __i__ < __length__; __i__++) { Map<String, Object> args = new HashMap<>(); // only when arg batch is not empty, java side needs to parse args; if (argsBatch instanceof List) { args = ((List<Map<String, Object>>) argsBatch).get(__i__); } // args // create target object com.iflytek.msc.MetaVAD __obj__ = new com.iflytek.msc.MetaVAD(); getHEAP().put(System.identityHashCode(__obj__), __obj__); // print current HEAP if (getEnableLog()) { Log.d("ObjectFactory", "HEAP: " + getHEAP()); } __resultList__.add(System.identityHashCode(__obj__)); } methodResult.success(__resultList__); }); // factory put("ObjectFactory::create_batchcom_iflytek_msc_VAD__", (argsBatch, methodResult) -> { List<Integer> __resultList__ = new ArrayList<>(); int __length__ = 0; // when batch size is 0, dart side will put a map with key 'length' to indicate the length // of this batch if (argsBatch instanceof Map) { __length__ = (Integer) ((Map<String, Object>) argsBatch).get("length"); } // or directly put the arg batch else if (argsBatch instanceof List) { __length__ = ((List<Map<String, Object>>) argsBatch).size(); } for (int __i__ = 0; __i__ < __length__; __i__++) { Map<String, Object> args = new HashMap<>(); // only when arg batch is not empty, java side needs to parse args; if (argsBatch instanceof List) { args = ((List<Map<String, Object>>) argsBatch).get(__i__); } // args // create target object com.iflytek.msc.VAD __obj__ = new com.iflytek.msc.VAD(); getHEAP().put(System.identityHashCode(__obj__), __obj__); // print current HEAP if (getEnableLog()) { Log.d("ObjectFactory", "HEAP: " + getHEAP()); } __resultList__.add(System.identityHashCode(__obj__)); } methodResult.success(__resultList__); }); // factory put("ObjectFactory::create_batchcom_iflytek_msc_AIMIC__", (argsBatch, methodResult) -> { List<Integer> __resultList__ = new ArrayList<>(); int __length__ = 0; // when batch size is 0, dart side will put a map with key 'length' to indicate the length // of this batch if (argsBatch instanceof Map) { __length__ = (Integer) ((Map<String, Object>) argsBatch).get("length"); } // or directly put the arg batch else if (argsBatch instanceof List) { __length__ = ((List<Map<String, Object>>) argsBatch).size(); } for (int __i__ = 0; __i__ < __length__; __i__++) { Map<String, Object> args = new HashMap<>(); // only when arg batch is not empty, java side needs to parse args; if (argsBatch instanceof List) { args = ((List<Map<String, Object>>) argsBatch).get(__i__); } // args // create target object com.iflytek.msc.AIMIC __obj__ = new com.iflytek.msc.AIMIC(); getHEAP().put(System.identityHashCode(__obj__), __obj__); // print current HEAP if (getEnableLog()) { Log.d("ObjectFactory", "HEAP: " + getHEAP()); } __resultList__.add(System.identityHashCode(__obj__)); } methodResult.success(__resultList__); }); // factory put("ObjectFactory::create_batchcom_iflytek_msc_MSC__", (argsBatch, methodResult) -> { List<Integer> __resultList__ = new ArrayList<>(); int __length__ = 0; // when batch size is 0, dart side will put a map with key 'length' to indicate the length // of this batch if (argsBatch instanceof Map) { __length__ = (Integer) ((Map<String, Object>) argsBatch).get("length"); } // or directly put the arg batch else if (argsBatch instanceof List) { __length__ = ((List<Map<String, Object>>) argsBatch).size(); } for (int __i__ = 0; __i__ < __length__; __i__++) { Map<String, Object> args = new HashMap<>(); // only when arg batch is not empty, java side needs to parse args; if (argsBatch instanceof List) { args = ((List<Map<String, Object>>) argsBatch).get(__i__); } // args // create target object com.iflytek.msc.MSC __obj__ = new com.iflytek.msc.MSC(); getHEAP().put(System.identityHashCode(__obj__), __obj__); // print current HEAP if (getEnableLog()) { Log.d("ObjectFactory", "HEAP: " + getHEAP()); } __resultList__.add(System.identityHashCode(__obj__)); } methodResult.success(__resultList__); }); // factory put("ObjectFactory::create_batchcom_iflytek_msc_MetaVAD_Instance__", (argsBatch, methodResult) -> { List<Integer> __resultList__ = new ArrayList<>(); int __length__ = 0; // when batch size is 0, dart side will put a map with key 'length' to indicate the length // of this batch if (argsBatch instanceof Map) { __length__ = (Integer) ((Map<String, Object>) argsBatch).get("length"); } // or directly put the arg batch else if (argsBatch instanceof List) { __length__ = ((List<Map<String, Object>>) argsBatch).size(); } for (int __i__ = 0; __i__ < __length__; __i__++) { Map<String, Object> args = new HashMap<>(); // only when arg batch is not empty, java side needs to parse args; if (argsBatch instanceof List) { args = ((List<Map<String, Object>>) argsBatch).get(__i__); } // args // create target object com.iflytek.msc.MetaVAD.Instance __obj__ = new com.iflytek.msc.MetaVAD.Instance(); getHEAP().put(System.identityHashCode(__obj__), __obj__); // print current HEAP if (getEnableLog()) { Log.d("ObjectFactory", "HEAP: " + getHEAP()); } __resultList__.add(System.identityHashCode(__obj__)); } methodResult.success(__resultList__); }); }}; } }
/** If @a state is true, the gripper is drawn on floating mode too. By default, the gripper is not drawn in floating mode. */ void DockBar::setFloatingGripper(bool state) { m_floatingGripper = state; if (isFloating()) { m_dockFrame->setSize(m_dockFrame->getPreferredSize()); m_dockFrame->invalidate(true); } }
package uk.nhs.digital.nhsconnect.nhais.inbound.jsonpatch; import lombok.RequiredArgsConstructor; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import uk.nhs.digital.nhsconnect.nhais.inbound.jsonpatch.mapper.PatchTransactionMapper; import uk.nhs.digital.nhsconnect.nhais.model.edifact.PersonName; import uk.nhs.digital.nhsconnect.nhais.model.edifact.Transaction; import uk.nhs.digital.nhsconnect.nhais.model.edifact.message.EdifactValidationException; import uk.nhs.digital.nhsconnect.nhais.model.jsonpatch.AmendmentBody; import uk.nhs.digital.nhsconnect.nhais.model.jsonpatch.AmendmentPatch; import java.util.Comparator; import java.util.List; import java.util.Objects; import java.util.stream.Collectors; @Component @RequiredArgsConstructor(onConstructor = @__(@Autowired)) public class EdifactToPatchService { private final List<PatchTransactionMapper> patchTransactionMappers; public AmendmentBody convertToPatch(Transaction transaction) { var nhsNumber = getNhsNumber(transaction); var gpCode = getGpCode(transaction); var gpTradingPartnerCode = getGpTradingPartnerCode(transaction); var healthcarePartyCode = getHealthcarePartyCode(transaction); var patches = getPatches(transaction); return AmendmentBody.builder() .nhsNumber(nhsNumber) .gpCode(gpCode) .gpTradingPartnerCode(gpTradingPartnerCode) .healthcarePartyCode(healthcarePartyCode) .patches(patches) .build(); } private List<AmendmentPatch> getPatches(Transaction transaction) { var amendmentPatches = patchTransactionMappers.stream() .map(patchTransactionMapper -> patchTransactionMapper.map(transaction)) .filter(Objects::nonNull) .sorted(Comparator.comparing(AmendmentPatch::getPath)) .collect(Collectors.toList()); if (amendmentPatches.isEmpty()) { throw new EdifactValidationException("No patches has been produces."); } return amendmentPatches; } private String getHealthcarePartyCode(Transaction transaction) { return transaction.getMessage().getHealthAuthorityNameAndAddress().getIdentifier(); } private String getGpTradingPartnerCode(Transaction transaction) { return transaction.getMessage().getInterchange().getInterchangeHeader().getRecipient(); } private String getGpCode(Transaction transaction) { return transaction.getGpNameAndAddress().getIdentifier(); } private String getNhsNumber(Transaction transaction) { return transaction.getPersonName() .map(PersonName::getNhsNumber) .orElseThrow(() -> new EdifactValidationException("Missing mandatory NHS number")); } }
t = int(input()) for _ in range(t): n = int(input()) a = [n,1] d = 2 k = n while d*d<=n: x = 0 while k%d == 0: k //= d x+=1 if x>a[1]: a = [d,x] d+=1 d = a[0] x = a[1] print(x) for i in range(x-1): print(d,end=' ') print(n//(d**(x-1)))
Call for Papers: Special Issue of International Journal of Adaptive Control and Signal Processing on Setmembership methods applied to FDI and FTC Fault detection and isolation (FDI) and Fault-tolerant control (FTC) possess the ability to automat-ically detect/isolate and accommodate faults and failures in engineering systems while maintaining the overall system stability and acceptable performance. FDI and FTC are desirable in a wide range of systems, from and marine systems, mechatronics, electric power (generation, transmission, and distribution) including offshore wind technology, to chemical processes as well as biomedical and other industrial applications involving control. The goal is to maintain functional integrity, reliability and performance using robustness to faults, fault compensation/accommodation or through system redundancy and reconfiguration.
Kevin Plank, the billionaire founder of Under Armour, spoke at the Baron Investment Conference at the Metropolitan Opera House at Lincoln Center in Manhattan last Friday. He had two very awesome words for people who worry about fiscal cliffs and other headwinds. "Today we're not going to talk about fiscal cliffs. We're not going to talk about the negative. We're not going to talk about something we coined at Under Armour 'loser talk,' which are all the things we can't control," he said. This is refreshing. All of these business leaders have been talking about uncertainty and taxes and the fiscal cliff and so on. Finally someone is stepping up and saying he's just gonna do his work, and stop whining.
def open_file_externally(self, filename): system = platform.system() if (system == 'Windows'): proc = subprocess.Popen(['start', '', filename], shell=True) elif (system == 'Linux'): proc = subprocess.Popen(['xdg-open', filename]) elif (system == 'Darwin'): proc = subprocess.Popen(['open', filename]) else: raise Exception("{cls} method open_file_externally does not support {system} OS".format(cls=type(self).__name__,system=system))
Statistical Independence and Determinants in a Contingency Table - Interpretation of Pearson Residuals based on Linear Algebra - This paper analyzes pearson residuals, which is an important element of chi-square test statistic, in a contingency table from the viewpoint of matrix theory as follows. First, a given contingency table is viewed as a matrix and the residual of each element in a matrix are obtained as the difference bewteen observed values and expected values calculated by marginal distributions. Then, each residual $_{ij}$ is decomposed into the linear sum of the 2 2 subderminants of a original matrix, except for i-th column and j-th row. Furthermore, the number of the determinants is equal to the degree of freedom for the chi-square test statistic for a given contingency table. Thus, 2 2 subdeterminants in a contingencymatrix determine the degree of statistical independence of two attributes as elementary granules.
<gh_stars>0 package ua.com.lsd25.jms; import ua.com.lsd25.service.ApplicationException; /** * @author vzagnitko */ public interface PlayMusicMessage { void sendMessageToPlayMusic(long musicId) throws ApplicationException; void sendMessageToStopMusic(long musicId) throws ApplicationException; void sendMessageToSuspendMusic(long musicId) throws ApplicationException; }
/* * Copyright (c) 2017 - 2020, GÉANT * * Licensed under the Apache License, Version 2.0 (the “License”); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an “AS IS” BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.geant.security.jwk; import org.opensaml.security.credential.BasicCredential; import com.nimbusds.jose.Algorithm; /** * A basic implementation of {@link JWKCredential}. */ public class BasicJWKCredential extends BasicCredential implements JWKCredential { /** jwk algorithm. */ private Algorithm jwkAlgorithm; /** jwk kid. */ private String jwkKid; /** * Set the kid of jwk. * * @param kid kid of jwk */ public void setKid(String kid) { jwkKid = kid; } /** {@inheritDoc} */ @Override public String getKid() { return jwkKid; } /** * Set the algorithm of jwk. * * @param algorithm algorithm of jwk. */ public void setAlgorithm(Algorithm algorithm) { jwkAlgorithm = algorithm; } /** {@inheritDoc} */ @Override public Algorithm getAlgorithm() { return jwkAlgorithm; } }
<reponame>hoho20000000/gem5-fy<gh_stars>0 #include "sim/init.hh" namespace { const uint8_t data_m5_internal_param_LinuxX86System[] = { 120,156,197,88,235,79,28,201,17,175,158,125,177,192,194,98, 176,241,243,24,199,198,94,191,192,143,227,226,228,28,43,126, 112,145,165,51,135,6,75,182,73,164,209,176,211,187,12,222, 157,89,205,52,182,247,4,82,18,172,188,164,124,200,183,252, 17,247,37,202,31,152,84,85,207,12,13,44,182,149,199,30, 176,77,109,119,117,119,85,215,175,30,221,77,72,127,174,225, 231,151,54,64,242,15,11,192,199,63,1,111,0,58,2,214, 45,16,210,2,127,26,182,74,16,157,1,225,151,224,3,192, 122,1,100,1,246,144,40,194,175,11,16,222,211,92,51,57, 87,101,16,87,13,7,112,237,17,120,83,228,46,11,250,163, 32,75,176,94,134,151,225,52,20,101,5,182,70,33,170,128, 192,159,16,119,126,213,175,67,58,99,4,214,171,200,53,135, 92,163,204,53,205,92,233,104,149,70,121,134,95,5,127,20, 62,160,228,99,224,143,177,20,227,224,143,51,81,3,191,198, 196,4,248,19,76,76,102,203,215,97,125,42,163,79,24,244, 180,65,207,24,244,73,131,62,101,208,179,6,125,218,160,207, 24,244,89,131,62,103,208,231,13,250,130,65,127,97,208,115, 6,109,27,244,69,131,254,137,65,95,50,232,203,6,61,111, 208,87,12,250,170,65,55,12,250,154,65,95,55,232,27,6, 125,211,160,111,49,61,9,114,10,130,5,8,22,33,184,13, 45,1,126,157,142,29,17,241,122,253,14,200,34,4,119,97, 253,46,72,252,187,3,123,2,225,49,101,204,184,199,51,78, 228,51,190,228,25,75,176,190,4,18,255,190,212,51,70,96, 173,113,18,225,27,252,11,127,26,2,41,53,142,205,91,25, 39,65,20,186,65,216,138,2,139,198,43,212,16,216,155,212, 20,240,83,198,207,19,66,125,12,12,121,148,29,81,191,139, 43,8,192,57,190,69,59,248,5,56,179,43,232,75,80,128, 29,36,138,208,226,129,160,152,114,236,34,142,167,96,7,87, 47,193,14,247,172,189,12,47,64,81,149,25,168,83,12,84, 61,140,147,105,24,97,10,40,118,9,183,93,97,185,21,201, 189,192,210,41,82,198,237,121,177,215,117,191,13,194,237,247, 175,238,127,181,214,79,148,236,54,72,3,85,37,53,186,189, 40,86,157,96,67,141,16,179,27,122,93,233,186,106,20,191, 196,56,83,5,10,85,87,69,252,186,21,5,161,34,61,59, 137,138,131,158,170,229,179,221,110,228,111,119,164,26,195,158, 103,220,179,28,199,81,220,160,131,113,168,81,212,244,222,180, 21,137,217,165,45,26,36,31,55,201,43,108,22,55,163,174, 196,38,108,247,183,23,219,178,187,116,171,213,95,220,216,14, 58,254,34,202,236,62,95,94,123,230,190,120,23,185,223,202, 183,178,179,216,235,43,100,93,236,46,45,162,68,50,14,61, 236,26,160,228,2,242,157,160,29,222,5,109,55,149,116,83, 118,122,50,38,197,147,9,218,93,140,139,105,241,133,40,136, 41,49,33,130,114,102,82,58,158,90,102,210,31,82,147,90, 105,32,67,171,138,212,196,22,236,50,65,118,107,144,73,201, 146,5,50,32,170,138,230,105,11,216,179,224,55,5,98,216, 197,182,136,113,199,206,205,57,163,227,142,94,170,2,187,104, 243,18,89,244,251,243,188,212,8,47,101,193,14,182,104,236, 34,236,98,112,67,86,236,194,118,171,10,209,4,8,252,18, 84,9,212,34,68,8,191,218,41,35,24,138,57,24,52,136, 73,27,63,136,233,220,29,194,111,99,52,235,141,146,133,158, 167,54,157,90,102,36,60,38,54,246,74,20,106,123,182,130, 208,207,236,171,17,210,10,58,136,16,135,206,144,87,99,182, 78,228,229,108,100,228,102,39,74,36,163,140,215,118,38,137, 145,184,91,61,94,134,118,37,121,120,178,47,147,38,33,10, 145,166,87,36,9,104,181,33,161,196,33,47,159,161,93,206, 50,38,234,136,138,50,98,162,129,152,208,212,121,171,38,38, 197,74,64,199,217,44,165,254,94,204,0,242,79,208,70,17, 176,101,177,147,238,112,120,64,110,52,29,59,233,14,135,0, 26,189,9,66,89,105,63,70,1,180,48,245,158,192,57,12, 27,196,15,242,62,32,159,102,131,18,14,74,128,192,212,70, 71,48,105,148,176,233,75,52,131,150,178,104,139,34,244,102, 113,241,17,194,195,14,164,192,217,43,32,48,80,34,116,104, 12,24,216,125,10,247,253,61,35,46,13,26,140,3,181,25, 36,209,59,237,231,68,115,220,91,67,191,89,237,127,183,177, 37,155,42,153,195,142,215,209,182,221,244,194,48,82,182,231, 251,182,167,48,14,108,108,43,153,216,42,178,231,147,6,217, 210,57,155,65,41,95,175,223,147,14,19,26,63,126,208,84, 24,97,166,249,11,251,102,34,21,34,97,51,242,19,236,167, 169,109,169,156,58,205,160,99,142,88,0,6,138,75,172,180, 45,242,145,251,62,202,36,208,241,166,156,97,39,145,157,22, 135,176,102,199,75,18,151,36,224,126,70,28,105,253,214,235, 108,75,94,61,193,245,80,32,34,181,12,195,138,76,167,73, 159,76,125,214,41,140,66,191,143,34,6,205,123,180,251,105, 198,98,13,35,83,77,156,194,79,85,156,20,21,68,100,69, 204,90,205,98,138,191,60,239,156,34,221,129,13,47,82,219, 35,30,247,48,158,52,44,14,7,172,22,225,215,185,78,20, 77,118,46,83,51,79,205,21,106,174,102,154,15,65,253,218, 97,245,159,208,150,22,235,220,44,164,218,229,30,230,30,240, 176,9,195,195,246,200,83,118,56,201,6,5,195,75,10,116, 2,209,88,230,87,236,133,104,122,244,66,98,102,127,194,220, 107,122,3,109,186,226,156,33,49,46,98,115,117,62,185,106, 107,236,217,155,94,98,135,209,62,224,109,26,212,209,141,224, 238,156,167,195,55,0,221,54,0,237,216,196,65,104,118,46, 81,83,60,238,244,175,253,40,167,223,214,167,255,43,218,114, 60,69,220,4,35,109,76,52,9,46,100,146,74,102,135,53, 36,250,179,100,7,211,0,179,152,4,95,134,231,49,175,177, 17,40,181,213,116,106,227,252,168,139,233,44,188,5,165,140, 40,147,41,90,5,56,149,102,172,132,82,74,47,142,222,247, 237,168,101,43,200,68,122,48,159,44,204,39,95,99,132,177, 31,238,31,122,26,77,98,217,163,104,160,163,3,29,141,10, 66,252,78,75,45,191,111,74,78,42,252,205,117,117,48,208, 181,141,155,38,43,180,15,27,196,202,12,194,225,16,11,28, 138,130,195,178,198,104,110,13,210,102,149,246,27,101,83,20, 196,44,186,191,97,8,250,20,200,16,4,182,63,2,87,181, 2,254,0,116,204,120,152,169,175,179,255,100,62,52,77,236, 84,212,236,136,129,25,202,74,125,195,74,99,7,58,79,175, 198,137,39,205,88,88,165,252,201,8,44,121,70,41,164,149, 142,233,67,197,220,135,216,70,159,149,53,138,7,221,136,206, 31,253,141,216,216,97,116,21,121,249,96,144,226,178,166,192, 145,94,13,193,64,35,122,39,151,132,122,189,111,30,138,205, 23,196,140,165,113,194,16,250,41,53,247,115,79,22,89,223, 255,87,190,185,195,193,212,200,37,174,142,68,180,127,82,100, 177,39,43,156,60,15,46,195,101,47,253,80,122,124,76,152, 185,139,132,196,139,173,0,201,81,245,3,87,193,212,90,100, 249,61,75,224,165,28,139,138,15,124,41,215,119,111,71,23, 21,140,215,236,195,129,131,2,206,129,152,109,156,89,110,123, 109,86,106,222,15,203,245,200,178,15,58,94,119,195,247,30, 254,142,118,163,45,155,153,171,89,153,248,117,83,124,114,18, 113,140,6,252,245,235,76,141,183,195,42,96,31,224,226,185, 248,236,24,126,212,228,32,241,98,83,218,93,217,221,192,59, 236,102,208,179,91,29,175,205,214,41,164,234,125,151,169,167, 216,188,134,47,115,40,73,168,88,88,137,236,102,20,98,80, 220,110,170,40,182,125,137,151,2,233,219,183,108,142,168,118, 144,216,222,6,142,122,77,165,1,127,208,109,185,242,242,226, 118,194,69,214,155,119,68,14,211,188,46,94,220,3,172,56, 247,32,47,48,116,22,225,192,67,165,22,187,131,246,31,76, 63,120,63,84,125,29,195,30,81,179,68,205,34,152,201,121, 8,22,253,57,46,254,91,218,133,14,173,44,206,89,85,75, 205,28,241,218,85,154,157,28,245,221,141,207,241,93,89,132, 245,82,230,193,101,226,148,21,186,117,82,91,165,60,176,142, 151,6,253,70,55,198,157,227,252,238,85,78,223,189,208,215, 43,255,165,175,179,167,12,211,71,254,242,191,116,113,231,23, 63,150,244,206,67,72,107,128,227,220,91,152,170,213,180,123, 7,34,171,132,249,240,87,180,38,124,225,62,119,12,178,220, 102,44,61,37,181,177,46,15,79,93,14,21,122,239,191,238, 187,109,94,6,149,50,205,190,201,53,219,227,26,168,63,195, 54,204,30,216,232,85,143,31,122,21,215,157,84,152,214,245, 19,26,31,133,107,165,181,41,228,71,82,206,143,132,174,95, 161,124,231,14,58,22,93,128,18,175,215,235,201,208,119,110, 211,180,59,96,22,146,204,51,44,116,80,160,250,27,228,133, 201,56,86,142,51,88,156,28,245,77,138,121,134,186,108,215, 122,238,142,67,180,48,3,250,239,25,160,27,244,62,180,31, 144,157,199,212,112,8,206,163,175,179,156,155,230,210,113,104, 149,247,239,222,118,149,183,129,117,61,222,107,62,139,15,11, 38,174,191,247,187,248,48,217,35,124,217,145,74,14,68,128, 162,67,75,111,123,190,196,228,23,245,241,102,81,225,78,156, 230,186,67,77,22,116,119,254,51,164,111,154,152,44,68,25, 211,197,73,171,90,174,10,206,195,135,94,173,141,190,114,222, 247,21,100,37,117,63,113,168,71,209,249,167,89,145,69,113, 205,135,112,126,51,211,230,224,247,188,44,111,146,229,248,190, 181,226,117,245,179,12,143,167,55,179,68,123,10,191,29,82, 101,225,220,160,230,86,110,232,159,209,236,47,176,233,46,45, 100,90,47,104,173,81,225,101,180,209,11,50,17,191,36,118, 151,184,238,53,57,83,158,229,80,197,125,148,150,170,149,143, 175,198,156,108,235,163,44,107,65,87,191,125,169,169,67,227, 126,236,33,125,242,80,111,34,227,192,235,4,223,203,99,214, 203,45,166,46,28,55,254,232,201,234,51,103,237,233,234,39, 57,94,168,43,71,85,167,49,220,225,169,76,154,124,74,217, 9,204,127,108,49,115,194,199,183,125,69,219,222,56,142,227, 25,246,116,158,175,126,211,137,60,180,115,123,53,98,22,117, 245,19,252,79,162,176,21,180,245,238,55,143,170,148,114,61, 246,18,169,57,15,152,246,83,194,28,154,118,132,127,159,115, 249,189,26,176,254,245,79,172,127,112,214,17,147,236,155,253, 249,227,32,74,116,171,85,29,32,137,201,180,198,165,246,118, 44,63,41,201,192,89,156,187,6,0,90,163,239,48,58,101, 184,221,117,159,203,110,20,247,159,71,190,84,231,15,141,63, 242,253,216,241,194,118,46,204,197,195,12,105,241,175,215,200, 184,236,129,50,28,228,61,198,83,112,48,245,188,195,14,174, 199,159,116,162,230,27,233,167,60,131,65,203,60,79,163,46, 121,42,5,160,108,88,81,166,57,236,144,20,143,242,111,92, 122,15,168,136,56,20,198,178,29,36,4,236,73,115,74,90, 21,80,24,230,211,25,152,116,204,233,195,204,14,250,142,171, 159,187,30,234,215,45,108,234,244,162,60,82,21,21,250,63, 137,255,45,44,26,172,130,24,21,19,162,132,255,235,248,127, 202,26,175,87,139,213,42,242,141,141,139,255,244,119,14,179, 208,168,53,87,175,138,127,3,104,211,116,178, }; EmbeddedPython embedded_m5_internal_param_LinuxX86System( "m5/internal/param_LinuxX86System.py", "/home/hongyu/gem5-fy/build/X86_MESI_Two_Level/python/m5/internal/param_LinuxX86System.py", "m5.internal.param_LinuxX86System", data_m5_internal_param_LinuxX86System, 2652, 8228); } // anonymous namespace
<filename>app/test/test_users.py import json from flask import jsonify from faker import Factory from app.test import test_setup class TestUser(test_setup.TestSetUp): """Test user creation and login.""" def test_it_creates_a_user(self): fakes = Factory.create() self.username = fakes.user_name() self.password = <PASSWORD>() response = self.app.post("/auth/register", content_type="application/json", data=json.dumps({"username": self.username, "password": <PASSWORD>})) self.assertEqual(response.status_code, 201) def test_reg_fails_if_username_empty(self): fakes = Factory.create() self.username = fakes.user_name() response = self.app.post("/auth/register", content_type="application/json", data=json.dumps({"password": <PASSWORD>})) self.assertEqual(response.status_code, 400) def test_reg_fails_if_password_empty(self): fakes = Factory.create() self.username = fakes.user_name() response = self.app.post("/auth/register", content_type="application/json", data=json.dumps({"username": self.username})) self.assertEqual(response.status_code, 400) def test_reg_fails_if_username_already_exists(self): response = self.app.post("/auth/register", content_type="application/json", data=json.dumps({"username": self.username, "password": <PASSWORD>})) self.assertEqual(response.status_code, 409) def login_succeeds_if_details_are_correct(self): response = self.app.post("/auth/login", content_type="application/json", data=json.dumps({"username": self.username, "password": <PASSWORD>})) self.assertEqual(response.status_code, 200) def test_login_fails_if_username_blank(self): response = self.app.post("/auth/login", content_type="application/json", data=json.dumps({"password": <PASSWORD>})) self.assertEqual(response.status_code, 400) def test_login_fails_if_password_is_blank(self): response = self.app.post("/auth/login", content_type="application/json", data=json.dumps({"username": self.username})) self.assertEqual(response.status_code, 400) def tearown(self): db.session.remove() db.drop_all() if __name__ == "__main__": unittest.main()
<reponame>Wikidata/QueryAnalysis<filename>src/main/java/query/factories/package-info.java<gh_stars>10-100 /** * @author adrian */ package query.factories;
/* * Copyright © 2013-2018 camunda services GmbH and various authors (<EMAIL>) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.rest.security.auth.impl; import org.camunda.bpm.engine.ProcessEngine; import org.camunda.bpm.engine.impl.digest._apacheCommonsCodec.Base64; import org.camunda.bpm.engine.rest.security.auth.AuthenticationProvider; import org.camunda.bpm.engine.rest.security.auth.AuthenticationResult; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.core.HttpHeaders; /** * <p> * Authenticates a request against the provided process engine's identity service by applying http basic authentication. * </p> * * @author <NAME> */ public class HttpBasicAuthenticationProvider implements AuthenticationProvider { protected static final String BASIC_AUTH_HEADER_PREFIX = "Basic "; @Override public AuthenticationResult extractAuthenticatedUser(HttpServletRequest request, ProcessEngine engine) { String authorizationHeader = request.getHeader(HttpHeaders.AUTHORIZATION); if (authorizationHeader != null && authorizationHeader.startsWith(BASIC_AUTH_HEADER_PREFIX)) { String encodedCredentials = authorizationHeader.substring(BASIC_AUTH_HEADER_PREFIX.length()); String decodedCredentials = new String(Base64.decodeBase64(encodedCredentials)); int firstColonIndex = decodedCredentials.indexOf(":"); if (firstColonIndex == -1) { return AuthenticationResult.unsuccessful(); } else { String userName = decodedCredentials.substring(0, firstColonIndex); String password = decodedCredentials.substring(firstColonIndex + 1); if (isAuthenticated(engine, userName, password)) { return AuthenticationResult.successful(userName); } else { return AuthenticationResult.unsuccessful(userName); } } } else { return AuthenticationResult.unsuccessful(); } } protected boolean isAuthenticated(ProcessEngine engine, String userName, String password) { return engine.getIdentityService().checkPassword(userName, password); } @Override public void augmentResponseByAuthenticationChallenge( HttpServletResponse response, ProcessEngine engine) { response.setHeader(HttpHeaders.WWW_AUTHENTICATE, BASIC_AUTH_HEADER_PREFIX + "realm=\"" + engine.getName() + "\""); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.watcher.trigger.schedule; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.json.JsonXContent; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; public class IntervalScheduleTests extends ESTestCase { public void testParseNumber() throws Exception { long value = randomIntBetween(0, Integer.MAX_VALUE); XContentBuilder builder = jsonBuilder().value(value); BytesReference bytes = BytesReference.bytes(builder); XContentParser parser = createParser(JsonXContent.jsonXContent, bytes); parser.nextToken(); // advancing to the start object IntervalSchedule schedule = new IntervalSchedule.Parser().parse(parser); assertThat(schedule, notNullValue()); assertThat(schedule.interval().seconds(), is(value)); } public void testParseNegativeNumber() throws Exception { long value = randomIntBetween(Integer.MIN_VALUE, 0); XContentBuilder builder = jsonBuilder().value(value); BytesReference bytes = BytesReference.bytes(builder); XContentParser parser = createParser(JsonXContent.jsonXContent, bytes); parser.nextToken(); // advancing to the start object try { new IntervalSchedule.Parser().parse(parser); fail("exception expected, because interval is negative"); } catch (ElasticsearchParseException e) { assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); assertThat(e.getCause().getMessage(), containsString("interval can't be lower than 1000 ms, but")); } } public void testParseString() throws Exception { IntervalSchedule.Interval value = randomTimeInterval(); XContentBuilder builder = jsonBuilder().value(value); BytesReference bytes = BytesReference.bytes(builder); XContentParser parser = createParser(JsonXContent.jsonXContent, bytes); parser.nextToken(); // advancing to the start object IntervalSchedule schedule = new IntervalSchedule.Parser().parse(parser); assertThat(schedule, notNullValue()); assertThat(schedule.interval(), is(value)); } public void testParseInvalidString() throws Exception { XContentBuilder builder = jsonBuilder().value("43S"); BytesReference bytes = BytesReference.bytes(builder); XContentParser parser = createParser(JsonXContent.jsonXContent, bytes); parser.nextToken(); // advancing to the start object try { new IntervalSchedule.Parser().parse(parser); fail("Expected ElasticsearchParseException"); } catch (ElasticsearchParseException e) { assertThat(e.getMessage(), containsString("unrecognized interval format [43S]")); } } public void testParseInvalidObject() throws Exception { XContentBuilder builder = jsonBuilder().startObject().endObject(); BytesReference bytes = BytesReference.bytes(builder); XContentParser parser = createParser(JsonXContent.jsonXContent, bytes); parser.nextToken(); // advancing to the start object try { new IntervalSchedule.Parser().parse(parser); } catch (ElasticsearchParseException e) { assertThat( e.getMessage(), containsString("expected either a numeric value (millis) or a string value representing time value") ); assertThat(e.getMessage(), containsString("found [START_OBJECT]")); } } private static IntervalSchedule.Interval randomTimeInterval() { int randomSize = randomIntBetween(0, IntervalSchedule.Interval.Unit.values().length - 1); IntervalSchedule.Interval.Unit unit = IntervalSchedule.Interval.Unit.values()[randomSize]; return new IntervalSchedule.Interval(randomIntBetween(1, 100), unit); } }
# -*- coding: utf-8 -*- from . clio_infra import ClioInfraLoader from . ihme import IHMELoader from . ilo import ILOLoader from . oecd import OECDLoader from . worldbank import WorldBankLoader
Corneal Graft Rejection after Penetrating Keratoplasty for Keratoconus in Turner's Syndrome Purpose To report a patient with Turner's syndrome who developed graft rejection after penetrating keratoplasty (PK) for keratoconus and to review the ophthalmic literature on the association between keratoconus and Turner's syndrome. Methods A woman with bilateral keratoconus and Turner's syndrome (45,XO) was referred for progressive visual loss in the right eye. Best-corrected visual acuity was 20/400 in the right eye. Slit-lamp examination revealed corneal thinning with ectatic protrusion of the central cornea and Vogt's striae in the right eye. The patient underwent PK in the right eye in January 2001. She developed graft rejection in April 2003 and visual acuity dropped to hand motion. After treatment with topical and systemic steroids and systemic cyclosporine A, visual acuity recovered to 20/80 in July 2003. Results The authors know of only three other reported patients (six eyes) with keratoconus in Turner's syndrome. Five eyes underwent PK with good visual rehabilitation, but one developed immunologic graft rejection 7 years after surgery. On the whole, considering the current report and the other cases described in the literature, graft rejection occurred in 2 out of 6 eyes (33.3%). The graft survival rate was 80% after 2 years and 40% after 7 years. Conclusions The results suggest that grafts for keratoconus in patients with Turner's syndrome might have an increased risk of immunologic rejection. Corneal grafts in Turner's syndrome need to be monitored closely. Early detection of graft rejection and aggressive treatment with topical and systemic steroids and systemic cyclosporine A can save the graft and restore useful vision.
Tibco's latest package includes components that make it easier for firms to tie older, mainframe-based apps to current software. Infrastructure applications provider Tibco Software will launch its bid for a portion of the emerging market for integration tools tomorrow with a bevy of new offerings. The company's software has served as a driving engine behind real-time information dissemination, such as stock quotes. Tibco has carved out a niche for itself in the financial, energy, and manufacturing industries. Others, such as Oberon Software and CrossWorlds Software, among others, also have targeted this niche in the software market. The latest package from the software provider--called TIB/Active Enterprise--includes multiple software components that essentially make it easier for corporations to tie older mainframe-based applications to current desktop and server-based software, regardless of the underlying operating system. The tools include a message broker that serves as the "nerve center" for communications between these disparate applications, according to Tibco executives. This piece of software takes messages from one application and translates them so another application can understand the communication. Along with this base tool, the company will launch a series of "connectors" and "adapters" for specific software packages such as business applications from PeopleSoft and Baan. Initial "connectors" for IBM's MQ Series and Web-based applications are already available. Others will roll out in the second quarter of this year. Other components of the suite include a management console and existing software from the company that insures message delivery. Mark Bowles, chief technology officer for Tibco Software, said customers have told Tibco that the biggest problem they are having is integrating various applications packages so they can work together, rather than functioning separately from other complementary systems. A software developer's kit is available now, with prices starting at $2,500. The rest of the tools will ship in the second quarter with prices based on the number of servers in the organization that take advantage of the software.
Funny thing about being a beer writer: I receive more books about beer than actual beer. The majority of them — the ones that open with "the differences between ales and lagers" — are a bore. Lucky for me, then, that three recent releases are anything but. If you're looking to give something to yourself or the beer-lover in your life this holiday season, you might want to consider picking one of these up. After all, a book about beer is the next-best thing to beer. Trust me, I know. The major release of the year is the highly-anticipated The Oxford Companion to Beer (Oxford University Press, $65). Big both in size (a whopping 960 pages, or four pounds) and in concept, the OCB is the result of years of research by editor Garrett Oliver, brewmaster of Brooklyn Brewery, and his team of 166 contributors from every corner of the ale world. Even Top Chef head judge Tom Colicchio drops in with a food-focused foreword. The encyclopedic chapters include everything from history (there's a seven-page entry on Germany) to personalities and styles. In other words, there's a little bit for everyone, and it's easy to drop in anywhere you want. How else would I have found out that in Japan, a man will often declare "Toriaezu biiru!" ("I'll start with a beer!") when he enters a bar? Yes, the price tag is steep (you can find it cheaper online), but the insight is worth it. And if you pick up a box set in person at Brooklyn Brewery, you even get — yes — a free beer. Greg Koch isn't shy about promoting craft beer, especially those dispensed by his own Stone Brewing Co. in San Diego. I mean, the man travels with a megaphone. Now he has a new way to evangelize: The Craft of Stone Brewing Co.: Liquid Lore, Epic Recipes, and Unabashed Arrogance (Ten Speed Press, $25). Koch teamed up with Stone co-founder Steve Wagner and spokesman Randy Clemens (who's also author of The Sriracha Cookbook, where we found one of our favorite Michelada recipes) to create a tome that is equal parts brewery history, surprising food recipes incorporating Stone's bottles (stir-fried Brussels sprouts?), and lessons in homebrewing. You'd want to be a Stone enthusiast already (you can pick up their pale ale many places), but then it's just about perfect. And anyone who's ever held a bottle of He'Brew Beer or Coney Island Craft Lager knows something about enthusiasm. Jeremy Cowan runs his Shmaltz Brewing Co. with a fair amount of shtick, but, circus-sideshow labeling aside, it's clear that Shmaltz owes at least part of its success to Cowan's ability to not take himself too seriously, which is accounted for in Craft Beer Bar Mitzvah (Malt Shop Publishing, $16.99), a look at the brewery's 13-year history. Chapter titles like "Pomegranates and Cocksuckers" are anything but conventional, but then again, neither are some of the best beers. Bar Mitzvah is another reminder of how weird — and wonderful — the industry can be. At the very least, it's more interesting than most books about a business startup. Evan S. Benn is the beer columnist for the St. Louis Post-Dispatch. His Hip Hops column, blog, and beer app can be found at stltoday.com/hiphops, and you can buy his new book here. You can also follow him on Twitter and Facebook.
/** * Baidu.com,Inc. * Copyright (c) 2000-2013 All Rights Reserved. */ package com.baidu.hsb.manager.response; import org.apache.log4j.Logger; import com.baidu.hsb.config.ErrorCode; import com.baidu.hsb.manager.ManagerConnection; import com.baidu.hsb.net.mysql.OkPacket; /** * @author <EMAIL> */ public final class ReloadUser { private static final Logger logger = Logger.getLogger(ReloadUser.class); public static void execute(ManagerConnection c) { boolean status = false; if (status) { StringBuilder s = new StringBuilder(); s.append(c).append("Reload userConfig success by manager"); logger.warn(s.toString()); OkPacket ok = new OkPacket(); ok.packetId = 1; ok.affectedRows = 1; ok.serverStatus = 2; ok.message = "Reload userConfig success".getBytes(); ok.write(c); } else { c.writeErrMessage(ErrorCode.ER_YES, "Unsupported statement"); } } }
The Detroit Lions have been connected to plenty of pass rushers in next week's NFL draft, from Montez Sweat to Ed Oliver to Rashan Gary. Here's one more: Clemson defensive lineman Clelin Ferrell. Ferrell, who's expected to be a first-round pick after a standout college career, was one of three players who made pre-draft visits to the Lions in recent days, along with Arizona defensive tackle P.J. Johnson and Middle Tennessee State linebacker Darius Harris. Ferrell led Clemson with 11.5 sacks last season and helped the Tigers win their second college football championship in his three seasons as a starter. He's one of the highest-ranked prospects to take a top-30 visit to the Lions this spring. Kentucky linebacker Josh Allen, a likely top-five pick, Iowa tight end Noah Fant and Mississippi State safety Johnathan Abram are among other potential first-round picks who've visited the Lions in recent weeks, and the team hosted former Michigan standouts Devin Bush and Rashan Gary on local visits. Ferrell has been projected as high as a top-10 pick and as low as the late-20s. He's considered a safe prospect who's best suited to play defensive end but versatile enough to move inside on passing downs. The Lions return four of their top defensive linemen from last season in Damon Harrison, A'Shawn Robinson, Da'Shawn Hand and Romeo Okwara and added Trey Flowers in free agency, but they have long-term concerns at the position. Robinson is entering the final season on his contract, and Harrison wants a new deal despite his contract running through 2020. The Lions, so far, have been reluctant to negotiate with Harrison. Johnson had three sacks in nine games for Arizona last season and is a mountainous run stuffer at 6 feet 4 and 330 pounds. Both he and Harris were combine snubs and are projected to go on Day 3 of the draft. Harris, who had 97 tackles and 12 for loss last season, played in the NFLPA postseason all-star game.
The property at 37, Shaheed Bhagat Singh Marg has about 85 rooms, 3 banquet halls and one swimming pool and is situated adjacent to Connaught Place. NEW DELHI: The Connaught, a premium hotel in Lutyen's Delhi, was successfully acquired by Tata's Indian Hotel Company Limited (IHCL) in an e-auction conducted by the New Delhi Municipal Council (NDMC) on Wednesday. "The successful bidder is the Taj group which will pay 31.80 per cent of the Gross Turn Over (GOT) or Rs 5.868 crore per annum, whichever is higher. This current auction has yielded more than the double revenue that the NDMC was getting from this hotel," a senior NDMC official said. The licensing rights have been granted for a period of 33 years. "The licence fee out of this public property will help in boosting the revenue and financial potential of the NDMC which will help in financing the public projects," the official added. The NDMC had last year decided to auction The Connaught and re-auction Asian Hotel, after they were sealed by it in 2015 due to non-payment of license fees dues. In January last year, the Asian Hotel was e-auctioned fetching Rs 45.5 lakh per month as licence fee, the highest-ever for the agency. But, the bidder later refused to take the property necessitating a re-auction.
A Case Study on the Wool Textile Enterprise Brands of Modern Shanghai Take the Brand of Zhanghua "Nine one Eight" for an Example Shanghais development begun with shipping industry and prospered with industrialization. Compared to the national industry, which has developed since the late Qing dynasty, the development of Shanghais modern industry is more recent and is a very memorable history. It recorded the history of the traditional industrial development in Shanghai and its current situation, focusing on a large number of industrial enterprises that have had and still have the glory and dream. In this paper, we will discuss Zhang Hua plush textile company's technology and brand-making, and how they contributed to the companys success and profitability, in detail. Instead of symbols, their high-quality products, big name, and relationship with consumers definitely have powerful functions in the enterprise and in connecting consumers. In the brilliant, high-spirited but hard times, we hope that it would arouse awareness for a new understanding of the past days, give birth to the new beginning of the old story, and step on a new endeavor.
DSL (Digital Subscriber Line) is a technology for bringing high-bandwidth information to e.g. homes and small businesses over ordinary copper telephone lines. Digital Subscriber Line is a technology that assumes digital data does not have to be converted into analogue form and back. Digital data is transmitted to a subscriber directly as digital data and this allows a much wider bandwidth to be used for transmitting the data than in traditional telephone systems. Several modulation technologies are used by various kinds of DSL (such as DSL, HDSL, SDSL, ADSL, RADSL and VDSL), and these are being standardized by the International Telecommunication Union (ITU). In a typical DSL access system configuration, subscriber devices, such as telephones or IP terminals, are connected to a DSL line via a multiplexer (also known as Integrated Access Device, IAD), which comprises a DSL modem and provides access to the DSL system. The subscriber devices and the multiplexer, i.e. terminal equipment located at the premises owned or controlled by the customer using network services are referred to as Customer Premises Equipment or CPE. The DSL line is further connected to a DSL Access Multiplexer (DSLAM), which connects a number of DSL lines to other networks, such as the Internet, typically via a backbone network, which employs e.g. Internet Protocol (IP) or IP over ATM (Asynchronous Transfer Mode). A DSL access system further comprises an Element Management System (EMS), which provides an interface for managing the various elements of the DSL access system network. With the management system, a network operator can communicate with the network elements. A function associated with the EMS is Customer Premises Equipment Management System (CPE-MS). The CPE-MS is typically used for various monitoring, diagnosis and configuration purposes and communicates with the customer premises equipment. The communication between the CPE-MS and CPE typically employs TCP/IP protocol (Transmission Control Protocol/Internet Protocol) or UDP/IP (User Datagram Protocol) transport protocol messages. The TCP/IP acts as the data transmission protocol in the Internet, the special advantage being its independence of different device or software architectures, which makes it the most generally used network protocol in the world, especially in local networks. In Internet-based networks, the IP protocol is the actual network protocol, which serves to route an addressed IP message from a source station to a destination station. A problem associated with the transmission of management messages between the CPE-MS and the CPE using IP is that a dedicated management IP address should be reserved for each CPE, which is impractical as the number of CPEs may be high, even hundreds. For the same reason, also the use of dedicated point-to-point connections between the CPE-MS and the CPE for management purposes is impractical.
​ , our free weekday email newsletter. Like ​The Atlantic? Subscribe to The Atlantic Daily​ , our free weekday email newsletter. C hris Hughes was a mythical savior—boyishly innocent, fantastically rich, intellectually curious, unexpectedly humble, and proudly idealistic. My entire career at the New Republic had been spent dreaming of such a benefactor. For years, my colleagues and I had sputtered our way through the internet era, drifting from one ownership group to the next, each eager to save the magazine and its historic mission as the intellectual organ for hard-nosed liberalism. But these investors either lacked the resources to invest in our future or didn’t have quite enough faith to fully commit. The unending search for patronage exhausted me, and in 2010, I resigned as editor. Then, in 2012, Chris walked through the door. Chris wasn’t just a savior; he was a face of the zeitgeist. At Harvard, he had roomed with Mark Zuckerberg, and he had gone on to become one of the co-founders of Facebook. Chris gave our fusty old magazine a Millennial imprimatur, a bigger budget, and an insider’s knowledge of social media. We felt as if we carried the hopes of journalism, which was yearning for a dignified solution to all that ailed it. The effort was so grand as to be intoxicating. We blithely dismissed anyone who warned of how our little experiment might collapse onto itself—how instead of providing a model of a technologist rescuing journalism, we could become an object lesson in the dangers of journalism’s ever greater reliance on Silicon Valley. Chris Hughes wasn’t just a savior; he was a face of the zeitgeist. When Chris first invited me for a chat one jacketless day in earliest spring, we wandered aimlessly across downtown Washington, paper coffee cups in hand. During those first weeks of his ownership, Chris had booked himself an endless listening tour. He seemed eager to speak with anyone who had worked at the magazine, or who might have a strong opinion about it. But as we talked, I wondered whether he wanted something more than my advice. I began to suspect that he wanted to rehire me as the New Republic’s editor. Before long he offered me the job, and I accepted. In my experience, owners of the New Republic were older men who had already settled into their wealth and opinions. Chris was intriguingly different. He was 28, and his enthusiasm for learning made him seem even younger. During his honeymoon, he read War and Peace; the ottoman in his SoHo apartment was topped with seemingly every literary journal published in the English language. “When I first heard the New Republic was for sale,” he told me, “I went to the New York Public Library and began to read.” As he plowed through microfiche, the romance of the magazine’s history—and its storied writers, among them Rebecca West, Virginia Woolf, Edmund Wilson, Ralph Ellison, and James Wood—helped loosen his hold on his wallet. Even after Facebook went public, leaving Chris with hundreds of millions of dollars in stock, he seemed indifferent to his wealth, or at least conflicted by it. He would get red-faced when people pointed out that he owned two estates and a spacious loft; he was apt to wear the same blazer every day. The source of his fortune didn’t define him—indeed, he always spoke of Facebook with an endearing detachment. He didn’t even use it that much, he once confessed to me at dinner. It was an admission that I found both disarming and hugely compelling. We soon began to remake the magazine, setting out to fulfill our own impossibly high expectations. O ver the past generation, journalism has been slowly swallowed. The ascendant media companies of our era don’t think of themselves as heirs to a great ink-stained tradition. Some like to compare themselves to technology firms. This redefinition isn’t just a bit of fashionable branding. As Silicon Valley has infiltrated the profession, journalism has come to unhealthily depend on the big tech companies, which now supply journalism with an enormous percentage of its audience—and, therefore, a big chunk of its revenue. Dependence generates desperation—a mad, shameless chase to gain clicks through Facebook, a relentless effort to game Google’s algorithms. It leads media outlets to sign terrible deals that look like self-preserving necessities: granting Facebook the right to sell their advertising, or giving Google permission to publish articles directly on its fast-loading server. In the end, such arrangements simply allow Facebook and Google to hold these companies ever tighter. What makes these deals so terrible is the capriciousness of the tech companies. Quickly moving in a radically different direction may be great for their bottom line, but it is detrimental to the media companies that rely on the platforms. Facebook will decide that its users prefer video to words, or ideologically pleasing propaganda to more-objective accounts of events—and so it will de-emphasize the written word or hard news in its users’ feeds. When it makes shifts like this, or when Google tweaks its algorithm, the web traffic flowing to a given media outlet may plummet, with rippling revenue ramifications. The problem isn’t just financial vulnerability, however. It’s also the way tech companies dictate the patterns of work; the way their influence can affect the ethos of an entire profession, lowering standards of quality and eroding ethical protections. I never imagined that our magazine would go down that path. My first days working with Chris were exhilarating. As an outsider, he had no interest in blindly adhering to received wisdom. When we set out to rebuild the New Republic’s website, we talked ourselves into striking a reactionary stance. We would resist the impulse to chase traffic, to clutter our home page with an endless stream of clicky content. Our digital pages would prize beauty and finitude; they would brashly announce the import of our project—which he described as nothing less than the preservation of long-form journalism and cultural seriousness. Chris said he believed that he could turn the New Republic into a profitable enterprise. But his rhetoric about profit never seemed entirely sincere. “I hate selling ads,” he would tell me over and over. “It makes me feel seedy.” And for more than a year, he was willing to spend with abandon. With the benefit of hindsight, I might have been more disciplined about the checks we, I mean he, wrote. But he had a weakness for leasing offices in prime locations and hiring top-shelf consultants. I had a weakness for handsomely paying writers to travel the globe. I moved quickly to hire a large staff, which included experienced writers and editors, who didn’t come cheap. Chris didn’t seem to mind. “I’ve never been so happy or fulfilled,” he would tell me. “I’m working with friends.” Eventually, though, the numbers caught up with Chris. Money needed to come from somewhere—and that somewhere was the web. A dramatic increase in traffic would bring needed revenue. And so we found ourselves suddenly reliving recent media history, but in a time-compressed sequence that collapsed a decade of painful transition into a few tense months. A t the beginning of this century, journalism was in extremis. Recessions, coupled with readers’ changing habits, prodded media companies to gamble on a digital future unencumbered by the clunky apparatus of publishing on paper. Over a decade, the number of newspaper employees dropped by 38 percent. As journalism shriveled, its prestige plummeted. One report ranked newspaper reporter as the worst job in America. The profession found itself forced to reconsider its very reasons for existing. All the old nostrums about independence suddenly seemed like unaffordable luxuries. Growing traffic required a new mentality. Unlike television, print journalism had previously shunned the strategic pursuit of audience as a dirty, somewhat corrupting enterprise. The New Republic held an extreme version of this belief. An invention of Progressive-era intellectuals, the magazine had, over the decades, became something close to a cult, catering to a loyal group that wanted to read insider writing about politics and highbrow meditations on culture. For stretches of its long history, however, this readership couldn’t fill the University of Mississippi’s football stadium. A larger readership was clearly within reach. The rest of journalism was already absorbing this lesson, which Jonah Peretti, the founder of BuzzFeed, had put this way: R = ßz. (In epidemiology, ß represents the probability of transmission; z is the number of people exposed to a contagious individual.) The equation supposedly illustrates how a piece of content could go viral. But although Peretti got the idea for his formula from epidemiology, the emerging science of traffic was really a branch of behavioral science: People clicked so quickly, they didn’t always fully understand why. These decisions were made in a semiconscious state, influenced by cognitive biases. Enticing a reader entailed a little manipulation, a little hidden persuasion. Chris not only felt urgency about the necessity of traffic, he knew the tricks to make it grow. He was a fixture at panels on digital media, and he had learned about virality from Upworthy, a site he had supplied with money to help launch. Upworthy plucked videos and graphics from across the web, usually obscure stuff, then methodically injected elements that made them go viral. As psychologists know, humans are comfortable with ignorance, but they hate feeling deprived of information. Upworthy used this insight to pioneer a style of headline that explicitly teased readers, withholding just enough information to titillate them into reading further. For every item posted, Upworthy would write 25 different headlines, test all of them, and determine the most clickable of the bunch. Based on these results, it uncovered syntactical patterns that almost ensured hits. Classic examples: “9 out of 10 Americans Are Completely Wrong About This Mind-Blowing Fact” and “You Won’t Believe What Happened Next.” These formulas became commonplace on the web, until readers grew wise to them. Like a manager with a stopwatch, Chartbeat and its ilk now hover over the newsroom. The core insight of Upworthy, BuzzFeed, Vox Media, and other emerging internet behemoths was that editorial success could be engineered, if you listened to the data. This insight was embraced across the industry and wormed its way into the New Republic. Chris installed a data guru on our staff to increase our odds of producing viral hits. The guru kept a careful eye on Facebook’s trending topics and on what the public had craved at the same time the year before. “Super Bowl ads are big,” he told the staff at one of our weekly meetings. “What can we create to hit that moment?” Questions like these were usually greeted by hostile silence. While I didn’t care for the tactics, I didn’t strenuously resist them either. Chris still encouraged us to publish long essays and deeply reported pieces. What’s more, he asked a perfectly reasonable question: Did we really think we were better than sober places like Time or The Washington Post? Clicks would rain down upon us if only we could get over ourselves and write about the same outrage as everyone else. Everyone else was doing this because it worked. We needed things to work. O ne of the emblems of the new era in journalism haunted my life at the New Republic. Every time I sat down to work, I surreptitiously peeked at it—as I did when I woke up in the morning, and a few minutes later when I brushed my teeth, and again later in the day as I stood at the urinal. Sometimes, I would just stare at its gyrations, neglecting the article I was editing or ignoring the person seated across from me. My master was Chartbeat, a site that provides writers, editors, and their bosses with a real-time accounting of web traffic, showing the flickering readership of each and every article. Chartbeat and its competitors have taken hold at virtually every magazine, newspaper, and blog. With these meters, no piece has sufficient traffic—it can always be improved with a better headline, a better approach to social media, a better subject, a better argument. Like a manager standing over the assembly line with a stopwatch, Chartbeat and its ilk now hover over the newsroom. This is a dangerous turn. Journalism may never have been as public-spirited an enterprise as editors and writers liked to think it was. Yet the myth mattered. It pushed journalism to challenge power; it made journalists loath to bend to the whims of their audience; it provided a crucial sense of detachment. The new generation of media giants has no patience for the old ethos of detachment. It’s not that these companies don’t have aspirations toward journalistic greatness. BuzzFeed, Vice, and the Huffington Post invest in excellent reporting and employ first-rate journalists—and they have produced some of the most memorable pieces of investigative journalism in this century. But the pursuit of audience is their central mission. They have allowed the endless feedback loop of the web to shape their editorial sensibility, to determine their editorial investments.  James Gilleard The New York Times and The New Yorker—attempted to scrape some traffic from Cecil. This required finding a novel angle, or a just novel enough angle. Once a story grabs attention, the media write about the topic with repetitive fury, milking the subject for clicks until the public loses interest. A memorable yet utterly forgettable example: A story about a Minnesota hunter killing a lion named Cecil generated some 3.2 million stories. Virtually every news organization—evenand—attempted to scrape some traffic from Cecil. This required finding a novel angle, or a just novel enough angle. Vox : “Eating Chicken Is Morally Worse Than Killing Cecil the Lion.” BuzzFeed : “A Psychic Says She Spoke With Cecil the Lion After His Death.” TheAtlantic.com : “From Cecil the Lion to Climate Change: A Perfect Storm of Outrage One-upmanship.” In some ways, this is just a digitally enhanced version of an old-fashioned media pile-on. But social media amplify the financial incentive to join the herd. The results are highly derivative. Joshua Topolsky, a founder of The Verge, has bemoaned this creeping homogenization: “Everything looks the same, reads the same, and seems to be competing for the same eyeballs.” Donald Trump is the culmination of the era. He understood how, more than at any other moment in recent history, the media need to give the public the circus that it desires. Even if the media disdained Trump’s outrages, they built him up as a plausible candidate, at which point they had no choice but to cover him. Stories about Trump yielded the sort of traffic that pleased the data gods and benefited the bottom line. Trump began as Cecil the lion and ended up president of the United States. C hris and I once sat at the breakfast table of an august Washington hotel, pondering the core qualities of the New Republic—the New Republic that we would re-create together. We didn’t say so explicitly, but we were searching for a piece of common ground, an adjective that could unite us. If there had been a whiteboard—and Chris loved whiteboards—it would have been filled with discarded terms. “We’re idealistic,” he said finally. “It ties together our storied past and our optimism about solutions.” Idealism was a word that melted my heart, and I felt uncontainable joy at the prospect of agreement. “Boom. That’s it.” We were idealistic about our shared idealism. But my vision of the world was moralistic and romantic; his was essentially technocratic. He had faith in systems—rules, efficiencies, organizational charts, productivity tools. Around the second anniversary of Chris’s ownership, he shared a revised vision of the magazine’s future with me. As the months had slipped by, he had gotten antsy. Results, by which he meant greater web traffic and greater revenue, needed to come faster. “To save the magazine, we need to change the magazine,” he said. Engineers and marketers were going to begin playing a central role in the editorial process. They would give our journalism the cool, innovative features that would help it stand out in the marketplace. Of course, this required money, and that money would come from the budget that funded long-form journalism. We were now a technology company, he told me. (Hughes denies saying this.) To which I responded, “That doesn’t sound like the type of company that I’m qualified to run.” He assured me that I was. From Our September 2017 Issue Try 2 FREE issues of The Atlantic Subscribe Two months later, I learned from a colleague that Chris had hired my replacement—and that my replacement was lunching around New York, offering jobs at the New Republic. Before Chris had the chance to fire me, I resigned, and most members of the magazine’s editorial staff quit too. Their idealism dictated that they resist his idealism. They didn’t want to work for a publication whose ethos more clearly aligned with Silicon Valley than with journalism. They were willing to pay careful attention to Facebook, but they didn’t want their jobs defined by it. The bust-up received its fair share of attention and then the story faded—a bump on Silicon Valley’s route to engulfing journalism.
# Generated by Django 3.0.6 on 2020-05-11 11:38 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('Blog', '0004_auto_20200511_1114'), ] operations = [ migrations.RenameField( model_name='post', old_name='content', new_name='content2', ), migrations.AddField( model_name='post', name='content1', field=models.TextField(default='lorem ipsum dolor sit amet'), ), ]
<gh_stars>0 import { Injectable } from '@nestjs/common'; import { NgRepository } from 'src/core/ng-respository.service'; import { IssuePollResponse } from './issue-poll-response.entity'; // @ts-ignore import * as SQL from 'assets/sql/sql.json'; const IssuePollResponseEntity: string = 'IssuePollResponse'; @Injectable() export class IssuePollResponseService { constructor(private repository: NgRepository) {} public storeMany(payload: any): Promise<IssuePollResponse | undefined> { return this.repository.storeMany( IssuePollResponseEntity, payload, 'Não foi possível adicionar as opções da Enquete. Tente novamente.', ); } public getAnswersByIdIssue(payload: { id: number; }): Promise<Array<{ answer: string }> | undefined> { const features = SQL.issuePollResponse.features as Array<any>; const sql = features.find(feature => feature['getAnswersByIdIssue']); return this.repository.getByGivenQuery({ entity: IssuePollResponseEntity, errorMsg: 'Erro ao recuperar o PollAnswer. Tente novamente.', sql: (sql['getAnswersByIdIssue'] as string).concat(` ${payload.id}`), }); } }
Deborah Dion and other protesters gather in the office of Florida State Rep. Manny Diaz as they protest his stance against the expansion of healthcare coverage on September 20, 2013 in Miami, Florida. Arkansas struck a creative deal with the Obama administration last year, allowing it to embrace Medicaid expansion under the Affordable Care Act, and bringing coverage to nearly 100,000 low-income Arkansans. This year, however, state Republicans were poised to take it away. Because of a quirk in the state policymaking process, last year’s vote that expanded access needs to be reauthorized this year, and many of the same GOP policymakers who backed the policy in 2013 are facing primary challengers in 2014. Arkansas’ state House has therefore voted down Medicaid expansion several times in recent weeks. In a minor miracle, the policy somehow prevailed today. Apparently, the fifth time was the charm. The state House on Tuesday voted 76-24 to approve a new round of funding for the so-called private option, resolving the issue that has dominated the fiscal session. The Senate passed the appropriation last month in a 27-8 vote. The House failed in four previous attempts to pass it, each time falling a few votes short of the three-fourths majority, or 75 votes in the 100-member House, needed to approve any appropriations bill. Senate Bill 111 goes next to the governor, who has said he will sign it. The Arkansas Democrat-Gazette’s report quoted one Republican lawmaker, state Rep. Kim Hammer, who had voted against the policy, but ultimately changed his mind. But Arkansas isn’t the only state with Medicaid expansion news. In late January, Utah’s Republican governor, Gary Herbert, said he was prepared to accept Medicaid expansion, at least in some form, bringing coverage to 111,000 low-income adults in the state. We’re now getting a better sense of what Herbert has in mind. …Herbert’s plan won’t be a straightforward expansion of Utah’s Medicaid program. Instead, Herbert will ask the Department of Health and Human Services to provide block grants that would allow the state to spend the money on subsidies for Utah residents who make up to 138 percent of the federal poverty line. The three-year pilot program Herbert proposed Thursday, dubbed the Healthy Utah Plan, would provide subsidies to individuals and families based on their ability to work, their household incomes and access to health care through employers, Herbert’s office said. Participants would have to make co-payments, and parents with children on Medicaid would have the option to put their whole family on private insurance plans. Recipients of the subsidies would contribute an average of around $420 per year toward their own health care. It’s unclear if the Obama administration will be amenable to Utah’s request, and if HHS turns Utah down, it would appear the state would be in a tough position: either figure out some other way to expand coverage or leave $258 million in federal funds on the table and 111,000 low-income Utahans behind.
def loglike_from_hist(bins, counts): integral = np.sum(np.diff(bins)*counts) norm = counts/integral def loglike(x): if x <= bins[0]: return -np.inf if x >= bins[-1]: return -np.inf i = np.searchsorted(bins, x) return np.log(norm[i - 1]) return loglike
<gh_stars>0 from dataclasses import dataclass @dataclass class Die: value: int = 0 faces: int = 100 rolls: int = 0 def roll(self): self.rolls += 1 self.value = 1 if self.value == 100 else self.value + 1 return self.value @dataclass class Player: position: int score: int = 0 @property def has_won(self): return self.score >= 1000 def move(self, die: Die): total_move = 0 for _ in range(3): total_move += die.roll() new_position = (self.position + total_move) % 10 if new_position == 0: self.position = 10 else: self.position = new_position self.score += self.position def play(p1: Player, p2: Player, die: Die): while not p1.has_won and not p2.has_won: p1.move(die) if p1.has_won: break p2.move(die) if p1.has_won: return p2.score * die.rolls else: return p1.score * die.rolls def main(): p1 = Player(6) p2 = Player(4) die = Die() # p1.move(die) # p2.move(die) total = play(p1, p2, die) print(total) if __name__ == "__main__": main()
Optical specifications for pseudocolor diagnosis An optical pseudocolor specifications of a radiographic image utilizing phase modulation method is suggested for medical diagnosis. With this suggestion the illness can be diagnosed not only referred to the color but also to the chromaticity coordinates of the affected part of the color image taking from a patient. This paper offered a theoretical method to calculate the chromaticity coordinates based on the effective relief thickness corresponding to the affected part of the patient, the result of calculation is in agreement with the gamut observation.
Multiple Transceivers Inter-satellite Optical wireless communication System Performance This Study clarifies the effect of using multi transceiver channel in the inter-satellite optical wireless communication system. The results obtained in this research have been simulated by optisystem version 7. This study clarifies the impact of applying 16 transceiver on the system performance. Also, it represents comparison between inter-satellite optical wireless communication system using single channel and the other used 16 transceiver at different wavelengths. Then, it clarifies the impact of increasing transmission data rate on the system performance at different propagation distances for 16 transceiver system. The impact of transmitted power on the system performance also has been explained. The performance parameters in our study are quality factor and received power.
Neurogenic bladder dysfunction in lumbar intervertebral disc prolapse. Lumbar disc prolapse with urinary dysfunction is an uncommon condition. The clinical, pathological and follow-up details of 30 patients are presented. Detrusor recovery is rare and most patients are left with an areflexic bladder. Female patients with lumbar disc prolapse and an areflexic detrusor who strain to empty their bladders are likely to develop genuine stress incontinence. This may be managed by the early institution of intermittent self-catheterisation, which may reduce the need for incontinence surgery.
/** * Created by IntelliJ IDEA. * User: grigo * Date: Mar 18, 2009 * Time: 3:32:06 PM */ public class LoginWindow extends JFrame { private JTextField userId; private JPasswordField passwd; private JButton logBut, cancelBut; private ChatClientCtrl ctrl; public LoginWindow(String title, ChatClientCtrl ctrl) throws HeadlessException { super(title); this.ctrl = ctrl; getContentPane().add(createLogin()); setDefaultCloseOperation(EXIT_ON_CLOSE); } private JPanel createLogin() { JPanel res = new JPanel(new GridLayout(3, 1)); JPanel line1 = new JPanel(); line1.add(new JLabel("User id:")); line1.add(userId = new JTextField(15)); res.add(line1); JPanel line2 = new JPanel(); line2.add(new JLabel("Password:")); line2.add(passwd = new JPasswordField(15)); res.add(line2); JPanel line3 = new JPanel(); line3.add(logBut = new JButton("Login")); line3.add(cancelBut = new JButton("Clear")); ActionListener al = new ButListener(); logBut.addActionListener(al); cancelBut.addActionListener(al); res.add(line3); return res; } private class ButListener implements ActionListener { public void actionPerformed(ActionEvent e) { if (e.getSource() == logBut) { System.out.println("Login button pressed."); String user = userId.getText(); String pass = new String(passwd.getPassword()); try { ctrl.login(user, pass); ChatWindow win = new ChatWindow("Chat window for " + user, ctrl); win.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); win.setSize(500, 400); win.setVisible(true); LoginWindow.this.dispose(); } catch (ChatException e1) { JOptionPane.showMessageDialog(LoginWindow.this, "Login error " + e1, "Error", JOptionPane.ERROR_MESSAGE); } return; } else { System.out.println("Clear button pressed."); userId.setText(""); passwd.setText(""); } } } }
def evaluate( self, output: "JupyterIpynbNotebook", cwd: Path, parameters: Dict[str, Any], ): assert self.filepath.is_file() assert cwd.is_dir() papermill.execute_notebook( input_path=self.filepath, output_path=output.filepath, parameters=parameters, request_save_on_cell_execute=True, kernel_name="python", language="python", progress_bar=True, stdout_file=None, stderr_file=None, log_output=True, cwd=cwd, )
1. Technical Field The present invention relates to an organic light-emitting diode (OLED) display and, more particularly, to an OLED display which can improve the problem of non-uniform luminance of pixels caused by IR-drop. 2. Description of the Related Art OLED (Organic Light-Emitting Diode) panel employs OLED as light-emitting element. The OLED is driven by current, and the luminance thereof changes with the current passing through the OLED. Therefore, how to accurately control the value of the current passing through the OLED is an important problem of developing the OLED panel. Referring to FIG. 1, a schematic view of a conventional OLED display is shown. The OLED display 100 comprises a scan driving circuit 110, a data driving circuit 120, a power supply 130 and an OLED panel 140. The OLED panel 140 comprises a plurality of scan lines (as shown by mark 142), a plurality of data lines (as shown by mark 144), a conducting wire 146 and a plurality of pixels (as shown by mark 148). Each of the pixels 148 is composed of a transistor 148-1, a transistor 148-2, a capacitor 148-3 and an OLED 148-4, and OVSS as shown in FIG. 1 is a reference voltage. The coupling relation of the above elements is shown in FIG. 1 and is not described herein. In addition, the pixels 148 are arranged in an array to form a display region of the OLED panel 140 for displaying images or words. In the structure as shown in FIG. 1, the transistor 148-2 of each of the pixels 148 is electrically coupled to a voltage OVDD supplied by the power supply 130 through the conducting wire 146, so as to obtain a display potential (as shown by mark OVDD′). The cathode of the OLED 148-4 of each of the pixels 148 is electrically coupled to the reference voltage OVSS to obtain a reference potential. The data voltage transmitted by a corresponding data line 144 and the potential difference between the display potential OVDD′ and the reference potential will jointly influence the value of the current passing through the OLED 148-4 and thereby control the luminance of the OLED 148-4. Generally, the value of the reference potential is constant. However, the values of the display potentials OVDD′ of the pixels 148 are different from each other because the locations of the pixels are different. That is, the current supplied by the power supply 130 is transmitted to different pixels 148 through different current-transmitting paths, and the different current-transmitting paths cause different voltage drops (that is the IR-drop). Thus, the display potentials OVDD′ received by the pixels 148 are different from each other. In summary, since the display potentials OVDD′ received by the pixels 148 are different from each other, the currents of the OLEDs 148-4 are different from each other. Thus, the luminance of the OLEDs 148-4 is non-uniform. In other words, the IR-drop will cause the non-uniform luminance of the pixels 148.
import Document, { Html, Head, NextScript, Main, DocumentContext, } from "next/document"; class MyDocument extends Document { static async getInitialProps(ctx: DocumentContext) { const initialProps = await Document.getInitialProps(ctx); return initialProps; } render() { return ( <Html> <Head> <meta name="description" content="A clone for https://what-to-code.com/." /> <meta name="keywords" content="ui clone, website clones, ideas, code, programming ideas, coding projects, ideas for projects" /> <meta name="theme-color" content="#fad400" /> <link rel="stylesheet" href="https://fonts.googleapis.com/css2?family=Poppins:wght@300;400&display=swap" /> <link href="https://fonts.googleapis.com/css2?family=Fira+Sans:wght@400&display=swap" rel="stylesheet" /> <link href="https://fonts.googleapis.com/css2?family=Lato:wght@400;700&display=swap" rel="stylesheet" /> </Head> <body> <Main /> <NextScript /> </body> </Html> ); } } export default MyDocument;