content
stringlengths
10
4.9M
class BaseRelationalModel: """Base class for all the relational models. The ``BaseRelationalModel`` class defines the common API that all the relational models need to implement, as well as common functionality. Args: metadata (dict, str or Metadata): Metadata dict, path to the metadata JSON file or Metadata instance itself. root_path (str or None): Path to the dataset directory. If ``None`` and metadata is a path, the metadata location is used. If ``None`` and metadata is a dict, the current working directory is used. """ metadata = None def __init__(self, metadata, root_path=None): if isinstance(metadata, Metadata): self.metadata = metadata else: self.metadata = Metadata(metadata, root_path) self._primary_key_generators = dict() self._remaining_primary_keys = dict() def _fit(self, tables=None): """Fit this relational model instance to the dataset data. Args: tables (dict): Dictionary with the table names as key and ``pandas.DataFrame`` instances as values. If ``None`` is given, the tables will be loaded from the paths indicated in ``metadata``. Defaults to ``None``. """ raise NotImplementedError() def fit(self, tables=None): """Fit this relational model instance to the dataset data. Args: tables (dict): Dictionary with the table names as key and ``pandas.DataFrame`` instances as values. If ``None`` is given, the tables will be loaded from the paths indicated in ``metadata``. Defaults to ``None``. """ self._fit(tables) self.fitted = True def _reset_primary_keys_generators(self): """Reset the primary key generators.""" self._primary_key_generators = dict() self._remaining_primary_keys = dict() def _get_primary_keys(self, table_name, num_rows): """Return the primary key and amount of values for the requested table. Args: table_name (str): Name of the table to get the primary keys from. num_rows (str): Number of ``primary_keys`` to generate. Returns: tuple (str, pandas.Series): primary key name and primary key values. If the table has no primary key, ``(None, None)`` is returned. Raises: ValueError: If the ``metadata`` contains invalid types or subtypes, or if there are not enough primary keys left on any of the generators. NotImplementedError: If the primary key subtype is a ``datetime``. """ primary_key = self.metadata.get_primary_key(table_name) field = self.metadata.get_fields(table_name)[primary_key] generator = self._primary_key_generators.get(table_name) if generator is None: if field['type'] != 'id': raise ValueError('Only columns with type `id` can be primary keys') subtype = field.get('subtype', 'integer') if subtype == 'integer': generator = itertools.count() remaining = np.inf elif subtype == 'string': regex = field.get('regex', r'^[a-zA-Z]+$') generator, remaining = utils.strings_from_regex(regex) elif subtype == 'datetime': raise NotImplementedError('Datetime ids are not yet supported') else: raise ValueError('Only `integer` or `string` id columns are supported.') self._primary_key_generators[table_name] = generator self._remaining_primary_keys[table_name] = remaining else: remaining = self._remaining_primary_keys[table_name] if remaining < num_rows: raise ValueError( 'Not enough unique values for primary key of table {}' ' to generate {} samples.'.format(table_name, num_rows) ) self._remaining_primary_keys[table_name] -= num_rows primary_key_values = pd.Series([x for i, x in zip(range(num_rows), generator)]) return primary_key_values def _sample(self, table_name=None, num_rows=None, sample_children=True): """Generate synthetic data for one table or the entire dataset.""" raise NotImplementedError() def sample(self, table_name=None, num_rows=None, sample_children=True, reset_primary_keys=False): """Generate synthetic data for one table or the entire dataset. If a ``table_name`` is given and ``sample_children`` is ``False``, a ``pandas.DataFrame`` with the values from the indicated table is returned. Otherwise, if ``sample_children`` is ``True``, a dictionary containing both the table and all its descendant tables is returned. If no ``table_name`` is given, the entire dataset is sampled and returned in a dictionary. If ``num_rows`` is given, the root tables of the dataset will contain the indicated number of rows. Otherwise, the number of rows will be the same as in the original dataset. Number of rows in the child tables cannot be controlled and always will depend on the values from the sampled parent tables. If ``reset_primary_keys`` is ``True``, the primary key generators will be reset. Args: table_name (str): Name of the table to sample from. If not passed, sample the entire dataset. num_rows (int): Amount of rows to sample. If ``None``, sample the same number of rows as there were in the original table. sample_children (bool): Whether or not sample child tables. Used only if ``table_name`` is given. Defaults to ``True``. reset_primary_keys (bool): Whether or not reset the primary keys generators. Defaults to ``False``. Returns: dict or pandas.DataFrame: - Returns a ``dict`` when ``sample_children`` is ``True`` with the sampled table and child tables. - Returns a ``pandas.DataFrame`` when ``sample_children`` is ``False``. Raises: NotFittedError: A ``NotFittedError`` is raised when the model has not been fitted yet. """ if not self.fitted: raise NotFittedError('SDV instance has not been fitted') if reset_primary_keys: self._reset_primary_keys_generators() return self._sample(table_name, num_rows, sample_children) def save(self, path): """Save this instance to the given path using pickle. Args: path (str): Path where the instance will be serialized. """ self._package_versions = get_package_versions(getattr(self, '_model', None)) with open(path, 'wb') as output: pickle.dump(self, output) @classmethod def load(cls, path): """Load a model from a given path. Args: path (str): Path from which to load the instance. """ with open(path, 'rb') as f: model = pickle.load(f) throw_version_mismatch_warning(getattr(model, '_package_versions', None)) return model
<filename>pages/migrations/__init__.py # Ignore everything in this directory
// NewMockRESTClientGetter creates a new mock instance. func NewMockRESTClientGetter(ctrl *gomock.Controller) *MockRESTClientGetter { mock := &MockRESTClientGetter{ctrl: ctrl} mock.recorder = &MockRESTClientGetterMockRecorder{mock} return mock }
package main import ( "fmt" "log" "regexp" _ "github.com/go-sql-driver/mysql" "github.com/jinzhu/gorm" _ "github.com/jinzhu/gorm/dialects/mysql" "github.com/totsukapoker/totsuka-ps-bot/models" ) // ConnectDB - Provide connection to database with gorm func ConnectDB(url string) *gorm.DB { db, err := gorm.Open(connectionVars(url)) if err != nil { log.Fatal("Failed to connect database") } db.LogMode(true) db.DB().SetMaxIdleConns(0) // To avoid an error "Invalid Connection" on Heroku return db } // MigrateDB - Do migration database with gorm.DB func MigrateDB(db *gorm.DB) { db.AutoMigrate(&models.User{}) db.AutoMigrate(&models.Game{}) db.AutoMigrate(&models.Transaction{}) } func connectionVars(url string) (driver string, source string) { re, _ := regexp.Compile("([^:]+)://([^:]+):([^@]+)?@([^/]+)/([^?]+)") match := re.FindStringSubmatch(url) driver = match[1] if driver == "mysql" { source = fmt.Sprintf( "%s:%s@tcp(%s:3306)/%s?charset=utf8&parseTime=true&loc=Asia%%2FTokyo", match[2], match[3], match[4], match[5], ) } else { source = url } return }
// FindBetween returns timestamps falling between start inclusive and // end exclusive. func (t *timestampSeriesType) FindBetween( start, end float64) (result []float64) { var record Record t.lock.Lock() defer t.lock.Unlock() appender := AppenderFilterFunc( (*tsAppenderType)(&result), func(r *Record) bool { return r.TimeStamp >= start && r.TimeStamp < end }, ) if !t.pages.FetchForward(start, kPlusInf, &record, appender) { return } record.TimeStamp = t.lastTs appender.Append(&record) return }
Detection of a diffuse extended halo-like structure around 47 Tuc We constructed for the first time a stellar density profile of 47 Tucanae (47 Tuc) out of $\sim$ 5.5 times its tidal radius ($r_t$) using high-quality deep $BV$ photometry. After carefully considering the influence of photometric errors, and Milky Way and Small Magellanic Cloud composite stellar population contamination, we found that the cluster stellar density profile reaches a nearly constant value from $\sim$ 1.7$r_t$ outwards, which does not depend on the direction from the cluster's center considered. These results visibly contrast with recent distinct theoretical predictions on the existence of tidal tails or on a density profile that falls as $r^{-4}$ at large distances, and with observational outcomes of a clumpy structure as well. Our results suggest that the envelope of 47 Tuc is a halo- like nearly constant low density structure. INTRODUCTION Extended structures around Galactic globular clusters (GGCs) have been observed in a non-negligible number of objects (e.g. see Carballo-Bello et al. 2012). Olszewski et al. (2009) found an unprecedented extra-tidal, azimuthally smooth, halo-like diffuse spatial extension of the NGC 1851, while Correnti et al. (2011) discovered an extended stellar halo surrounding the distant NGC 5694. M 2 was also found to be embedded in a diffuse stellar envelope extending to a radial distance of at least five time the nominal tidal radius (Kuzma et al. 2016). Compelling evidence of long tidal tails have also been reported in the field of Pal 5 (Odenkirchen et al. 2003), Pal 14 (Sollima et al. 2011), Pal 15 (Myeong et al. 2017), and NGC 7492 (Navarrete et al. 2017), among others. From a theoretical point on view, N-body simulations have shown that the detection of extended envelopes around GGCs could be due, for instance, to potential escapers (Küpper et al. 2010) or potential observational biases (Balbinot & Gieles 2017). Recent theoretical models argued on very distinct features of the envelope of the 47 Tucanae (47 Tuc). Lane et al. (2012) modeled the cluster orbital motion to determine the locations and the stellar densities of cluster tidal tails, which predicted to be an increase of 3-4% above the Galactic background. The tails would seem to emerge from the cluster center towards opposite directions that are connected by a line oriented North-East to South-West. On the other hand, Peñarrubia et al. (2017) using statistical arguments and numerical techniques derived cluster stellar density profiles, assuming that they are embedded in a dark matter halo. They found that the cluster densities approach asymptotically ρ ∼ r −4 at large distances. Models with no dark matter produce much less shallower profiles. From an observational point of view, some previous results suggested a clumpy structure around the cluster (Chen & Chen 2010). However, they are based on 2MASS photometry that barely reaches the cluster's Main Sequence (MS) turnoff region. Leon et al. (2000) had also pointed out the serious challenge that represents the contamination of Small Magellanic Cloud (SMC) stars that caused they could not trace the cluster radial density profile in direction towards the galaxy. In this Letter we describe how we accomplish constructing a radial stellar density profile of 47 Tuc out of ∼ 5.5r t in direction to the SMC and between ∼ 1.7 and 3.7r t for any other direction from the cluster center. Nevertheless, these outcomes will be greatly benefit, for instance, from the ongoing DECam surveys (Abbott et al. 2016). In the following we describe the collection and processing of the data set, and the subsequent anal-ysis performed in order to produce the radial density profile as a function of the position angle. Finally, we briefly discuss our results. DATA ANALYSIS AND DISCUSSION We made use of publicly available 600 s B and 300 s V images obtained at the 4 m Blanco telescope (CTIO) with Mosaic II (36'×36' camera array) as part of a search for extra tidal structure in GGCs (CTIO 2009B-0199, PI= Olszewski). The 14 studied fields are placed around 47 Tuc (see Fig. 1, between 1.7 and 5.5 timed its tidal radius (= 56 pc, Harris 1996), and other two Milky Way (MW) fields are located at ∼ 9.3 o to the North-West from the cluster center. This data set, which also includes calibration images (zero, domeflats, skyflats) and standard field images, was processed as described in (e.g., Piatti 2012;Piatti 2015). Mean extinction coefficients of 0.211±0.024 (B) and 0.142±0.014 (V ) and color terms of -0.093±0.004 (B) and 0.038±0.005 (V ) were obtained, with rms of 0.030 (B) and 0.027 (V ). Point-spread-function photometry was performed as extensively described, for instance, in Piatti et al. (2014); Piatti & Bastian (2016); Piatti & Cole (2017). Particular success in isolating bona fide stellar objects was achieved by using roundness values between -0.5 and 0.5 and sharpness values between 0.2 and 1.0. Errors in V and B − V resulted to be < 0.010 mag for V < 19.0 mag. Fig. 2 depicts the colormagnitude diagram (CMD) obtained for stars in the 47 Tuc field # 1 and for one MW field. The former is dominated by the SMC stellar population, namely the old MS turnoff, the subgiant and red giant branches, and the red clump superimposed to the 47 Tuc's MS (see e.g., Piatti 2012Piatti , 2015. We have superimposed a theoretical isochrone from Bressan et al. (2012) of log(t yr −1 ) = 10.10, = -0.7 dex, (m − M ) V = 13.37 mag and E(B − V ) = 0.033 mag (Harris 1996). We dereddened the studied fields using the E(B − V ) values as a function of galactic coordinates obtained by Schlafly & Finkbeiner (2011) from a recalibration of the Schlegel et al. (1998)'s extinction map. The average color excess for the surveyed region is E(B − V ) = 0.030±0.003 mag. In order to build the cluster density profile, we counted the number of stars distributed inside the delineated region drawn in Fig. 2. The latter comprises the upper cluster MS and the onset of the subgiant branch, and minimizes the contamination from the SMC. As for cleaning the observed field CMDs from the MW contamination we applied the procedure outlined by Piatti & Bica (2012) and successfully used elsewhere (see, e.g. Piatti 2014; Piatti et al. 2015;. The MW CMDs served as the reference field Figure 1. Spatial distribution of studied fields ( labeled boxes); the unlabeled one refers to two MW fields located ∼ 9.3 o from the cluster center. The ellipse has a semi-major axis equals to the cluster tidal radius (56 pc) and a PA of 120 o (Chen & Chen 2010). The straight line represents the position and extension of the tidal tails near the cluster body predicted by Lane et al. (2012). The direction to the SMC is also indicated. CMD which was subtracted to those observed around 47 Tuc. Statistically speaking, no residual was left. This is because of the relatively small number of stars in the MW CMD and of the uniformity of the MW stellar population throughout the surveyed region. Indeed, we used two synthetic CMDs generated from the Besançon galactic model (Robin et al. 2003), one centered on 47 Tuc and the other one at the position of our MW field and, after applying the aforementioned cleaning precepts, we found none star in the decontaminated CMD. Fig. 3 shows with magenta dots the observed stars in each 47 Tuc fields that were subtracted using this procedure. As can be seen, the MW marginally affects the cluster CMD region where we carry out the star counts. The contamination from the SMC represents a more serious challenge, mainly because its CMD changes with the position in the sky. Particularly, the region delineated to count cluster stars is contamination by supergiants stars, so that the younger (closer to the SMC center) a composite SMC stellar population, the larger the number of supergiants. In order to cope with this stellar pollution we used two equal-sized adjacent regions to that traced in Fig. 2 (see gray contours in Fig 3). We used these areas to build their respective luminos- ity functions, using every star not subtracted previously (those drawn with green symbols). Then, we adopted the average of both luminosity functions to subtract the respective number of stars per magnitude interval from the defined cluster star count region. We used intervals of ∆V = 0.10 mag and subtracted the appropriate number of stars, randomly. A similar method was employed by Olszewski et al. (2009). The stars that survived this step were drawn with black symbols in Fig. 3. We counted the number of measured stars, i.e., stars seen in the observed CMDs without any cleaning procedure, distributed along the designed path in the 47 Tuc field CMDs as a function of the distance to the cluster center. To do this, we employed the method described by (Piatti 2016;, and reference therein), based on star counts carried out within statistically meaningful sized boxes distributed throughout the whole field, and then computed the number of stars per unit area as a function of the distance r to the cluster center. This method does not necessarily require a complete circle of radius r within the observed field to estimate the mean stellar density at that distance. This is an important consideration since having a stellar density profile that extends far away from the cluster center allows us to estimate it with high precision. We binned the whole 47 Tuc fields mosaic (see Fig. 1 Tuc MS strip, we took into account that a star, owing to its errors, has the chance of falling outside it. This was done by repeating the star counting with the designed 47 Tuc MS strip shifted in magnitude and color by ± 0.01 mag. We divided Fig. 1 in 8 angular sections of 45 • wide centered on the cluster, which resulted suitable for our statistical purposes. ) into Radial profiles for stars that were kept unsubtracted after cleaning the CMDs from the MW and SMC field star contamination were also built. In this case, the un-certainties were estimated taking into account a 20% fluctuation of the number of stars after cleaning the CMDs from the MW contamination (∼ 4 times larger in average than the negligible residuals from MW field star variation and cleaning procedure described above), and twice as large the difference of the number of SMC stars subtracted using both previously constructed luminosity functions, in addition to photometric errors. We added in quadrature all the involved uncertainties. Fig. 4 shows the results with black and magenta circles for observed and cleaned density profiles. We also included the King (1962)'s andElson et al. (1987)'s profiles depicted with black and orange lines, respectively, for comparison purposes. The resultant density profiles along the directions with negligible contamination by SMC stars (-135 o ≤ PA ≤ 45 o ) and between 70 and 200 pc (1.25 and 3.6r t , respectively) show mean stellar excesses of log(stars/deg 2 ) = 1.8±0.2 . We found slightly larger values (1.9±0.3) along the remaining directions (45 o < PA < 225 o ), possibly due to residuals of SMC stellar populations. Note that along PA = 90 o our density profile just starts at the cluster King (1962)'s radius and expands until ∼ 310 pc (5.5r t ). We recall that these density profiles have been built using mainly upper MS stars, while fainter MS stars could also unveil these extra tidal structures (Carballo-Bello et al. 2012), that have not been used because the SMC overshadows them. These outcomes suggest that: i) 47 Tuc is not tidally limited to its King (1962)'s radius; ii) the cluster extends out to at least ∼ 5.5r t ; iii) from ∼ 1.7r t outwards there is a halo-like and nearly constant low density structure. We did not find evidence of tidal tails as suggested by Lane et al. (2012). According to the authors they should emerge from the cluster as illustrated by the straight line in Fig. 1 and with peak stellar densities of ∼ 85-120 stars/deg 2 . Our results show stellar densities of the same order as those predicted by the models, though. Chen & Chen (2010) found a clumpy structure around the 47 Tuc's center at distances < 250 pc that makes the cluster slightly flattened in shape (axial ratio of 0.86) with a PA of the major axis of 120 o (see ellipse in Fig. 1). They used 2MASS photometry for stars brighter than K s = 15.6 mag, just barely above the limiting magnitude of such a database, which in turn nearly coincides with the cluster MS turnoff magnitude. However, our results suggest that the 47 Tuc's envelope is more likely a diffuse structure, since the stellar density profiles look similar along any direction from the cluster center. Such profiles seem to be rather flat, in contrast with the r −4 law suggested by Peñarrubia et al. (2017) as a prediction of expected stellar envelopes of GCs embedded in dark mini-haloes. Olszewski et al. (2009) found a symmetric density profile with a power law of r −1.24 profile out of ∼ 6r t in NGC 1851, instead, which is more alike to the one derived here for 47 Tuc. Although we did not survey uniformly all the sky around 47 Tuc, the present outcome could suggest that Galactic tidal interactions has been a relatively ineoffficient process for stripping stars off the cluster (Dinescu et al. 1997(Dinescu et al. , 1999. We thank the anonymous referee whose thorough comments and suggestions allowed us to improve the manuscript. Figure 4. Observed (black circles) and MW-SMC corrected (magenta circles) density profiles as a function of the distance to the cluster center. Central PA for radial profiles within PA±22.5 • are labeled at the top-left margin in degrees (right). King (1962)'s (black) and Elson et al. (1987)'s (orange) models with rt = 56 pc and c = 2.07 (Harris 1996) are also superimposed. The latter was drawn by adopting a value of γ =3.5 that best resembles the former up to the cluster tidal radius.
// Set edits and saves a setting within a configuration file. func Set(name string) error { if viper.ConfigFileUsed() == "" { configMissing("set") } keys := viper.AllKeys() sort.Strings(keys) if strings.HasPrefix(name, "directories.") { name = strings.Replace(name, "directories.", "directory.", 1) } for i, key := range keys { if strings.Contains(key, "."+name) { name = keys[i] } } if i := sort.SearchStrings(keys, name); i == len(keys) || keys[i] != name { logs.Printf("%s\n%s %s\n", color.Warn.Sprintf("invalid flag value %v", fmt.Sprintf("--name %s", name)), "to see a list of usable settings run:", color.Bold.Sprint("df2 config info")) os.Exit(1) } Config.nameFlag = name if err := sets(name); err != nil { return fmt.Errorf("set %s: %w", name, err) } return nil }
<reponame>kirbycool/baseweb import React from 'react'; import {useStyletron} from 'baseui'; import {FormControl} from 'baseui/form-control'; import {FlexGrid} from 'baseui/flex-grid'; import {StatefulDatepicker} from 'baseui/datepicker'; import {TimezonePicker} from 'baseui/timezonepicker'; import {TimePicker} from 'baseui/timepicker'; export default () => { const [css, theme] = useStyletron(); return ( <React.Fragment> Disabled state <FlexGrid flexDirection="row"> <div className={css({ width: '120px', marginRight: theme.sizing.scale500, })} > <FormControl label="DatePicker"> <StatefulDatepicker disabled /> </FormControl> </div> <div className={css({ width: '120px', marginRight: theme.sizing.scale500, })} > <FormControl label="TimePicker"> <TimePicker disabled /> </FormControl> </div> <div className={css({flex: 1})}> <FormControl label="TimezonePicker"> <TimezonePicker disabled /> </FormControl> </div> </FlexGrid> Positive state <FlexGrid flexDirection="row"> <div className={css({ width: '120px', marginRight: theme.sizing.scale500, })} > <FormControl label="DatePicker"> <StatefulDatepicker positive /> </FormControl> </div> <div className={css({ width: '120px', marginRight: theme.sizing.scale500, })} > <FormControl label="TimePicker"> <TimePicker positive /> </FormControl> </div> <div className={css({flex: 1})}> <FormControl label="TimezonePicker"> <TimezonePicker positive /> </FormControl> </div> </FlexGrid> Error state <FlexGrid flexDirection="row"> <div className={css({ width: '120px', marginRight: theme.sizing.scale500, })} > <FormControl label="DatePicker"> <StatefulDatepicker error /> </FormControl> </div> <div className={css({ width: '120px', marginRight: theme.sizing.scale500, })} > <FormControl label="TimePicker"> <TimePicker error /> </FormControl> </div> <div className={css({flex: 1})}> <FormControl label="TimezonePicker"> <TimezonePicker error /> </FormControl> </div> </FlexGrid> </React.Fragment> ); };
from django.db import models from datetime import datetime class Payer(models.Model): name = models.CharField(max_length=255, unique=True) total_points = models.PositiveIntegerField(default=0) def __str__(self): return self.name class Transaction(models.Model): payer = models.ForeignKey( 'payer', on_delete=models.CASCADE ) points = models.IntegerField() timestamp = models.DateTimeField(default=datetime.now) remaining_points = models.IntegerField(default=0, blank=True, null=True) class Spend(models.Model): points = models.PositiveIntegerField(default=0) receipt = models.JSONField()
// MasterMessage handles messages from master func MasterMessage(update *tgbotapi.Update) { if update.Message.Chat.ID == connection.MasterTelegramID { var msg tgbotapi.MessageConfig if update.Message.ForwardFrom != nil { msg = tgbotapi.NewMessage(connection.MasterTelegramID, strconv.Itoa(update.Message.ForwardFrom.ID)) } else { msg = tgbotapi.NewMessage(connection.SquadTelegramID, update.Message.Text) } connection.Telegram.Send(msg) } }
/** * Generates an unbounded sequence of {@link java.lang.String}. * * @author Christophe Pollet */ public class StringIndexSequence extends BaseFieldGenerator<String> { private final String format; private final Long start; private Long next; /** * Generates a @{code String} based on a format and a ever growing number. * * @param format the format to use. Must be compliant with {@link String#format(String, Object...)}. */ public StringIndexSequence(String format) { this(format, 0L); } /** * Generates a @{code String} based on a format and a ever growing number. * * @param format the format to use. Must be compliant with {@link String#format(String, Object...)}. * @param start the start index of the sequence */ public StringIndexSequence(String format, Long start) { this.format = format; this.start = start; this.next = start; } /** * Does nothing. */ @Override public void reset() { // nothing } /** * Returns the next sequence value. * * @return the next sequence value */ @Override public String current() { return next(); } /** * Always returns false. This is mandatory to make sure this generator does not force a generator to generate values * forever. * * @return {@code false}. */ @Override public boolean hasNext() { return false; } /** * Returns the next generated value. * * @return the next generated value. */ @Override public String next() { Long current = next; next += 1; return String.format(format, current); } @Override public String toString() { return "StringIndexSequence{" + start + '}'; } }
// // MCTipsUtils.h // MotionCamera // // Created by 原鹏飞 on 2019/8/10. // Copyright © 2019 Apeman. All rights reserved. // #import <Foundation/Foundation.h> NS_ASSUME_NONNULL_BEGIN typedef NS_ENUM(NSUInteger, MCTipsOrientation) { MCTipsOrientationPortrait = 0, // 竖屏(默认) MCTipsOrientationPortraitUpsideDown = 1, // 竖屏(上下翻转) MCTipsOrientationLandscapeLeft = 2, // 左横屏(竖屏顺时针旋转90°) MCTipsOrientationLandscapeRight = 3, // 右横屏(竖屏逆时针旋转90°) }; /** 通用提示语类型 */ typedef NS_ENUM(NSInteger, NormalTips){ NormalTipsLoading = 1, /**< 正在加载*/ NormalTipsSending = 2, /**< 正在发送*/ NormalTipsConnecting = 3, /**< 正在连接*/ NormalTipsLogining = 4, /**< 正在登陆*/ NormalTipsNetworkUnusual = 100, /**< 网络异常*/ NormalTipsNetworkTimeout = 101, /**< 连接超时*/ NormalTipsRequestFailed = 102, /**< 请求失败*/ NormalTipsOperationSuccess = 103, /**< 操作成功*/ NormalTipsDownloadFail = 104, /**< 下载失败*/ }; /** 提示语工具类 */ @interface MCTipsUtils : NSObject #pragma mark - 通用提示 /// 常用提示语 /// @param type 通用提示语类型 + (void)showNormalTips:(NormalTips)type; #pragma mark - 提示(带菊花) /** 白色背景遮挡内容的菊花(view上,需手动隐藏) */ + (void)showMaskHUDWithMessage:(NSString *)message inView:(UIView *)view; /// 菊花(Windows上,需要手动隐藏) + (void)showHUD; /// 带信息菊花(Window上,需手动隐藏) /// @param message 提示信息 + (void)showHUDWithMessage:(NSString *)message; /// 带信息菊花(View上,需手动隐藏) /// @param message 提示信息 /// @param view 提示控件的父视图 + (void)showHUDWithMessage:(NSString *)message inView:(UIView *)view; /// 带信息菊花(View上,自动隐藏) /// @param message 提示信息 /// @param view 显示提示的View /// @param time 自动隐藏延迟时间 + (void)showHUDWithMessage:(NSString *)message inView:(UIView *)view delay:(NSTimeInterval)time; #pragma mark - 提示(不带菊花) /// 提示(Window上,1.5s后自动隐藏) /// @param message 提示信息 + (void)showTips:(NSString *)message; /// 提示(Window上,1.5s后自动隐藏) /// @param message 提示信息 /// @param orientation 提示框方向 + (void)showTips:(NSString *)message orientation:(MCTipsOrientation)orientation; /// 根据网络请求返回的code,显示提示信息(Window上,1.5s后自动隐藏) /// @param code 网络请求返回的code /// @param successMsg code==1000时的自定义提示 /// @param originalMsg 服务器返回的提示 + (void)showTipsWithCode:()code successMsg:(nullable NSString *)successMsg originalMsg:(nullable NSString *)originalMsg; /// 提示(Window上,显示在底部,1.5s后自动隐藏) /// @param message 提示信息 + (void)showBottomTips:(NSString *)message; /// 提示(Window上,显示在顶部,1.5s后自动隐藏) /// @param message 提示信息 + (void)showTopTips:(NSString *)text; /// 提示(View上,1.5s后自动隐藏) /// @param message 提示信息 /// @param view 提示控件的父视图 + (void)showTips:(NSString *)message inView:(UIView *)view; /// 提示(View上,自动隐藏) /// @param message 提示信息 /// @param view 提示控件的父视图 /// @param time 自动隐藏时间(default:1s) + (void)showTips:(NSString *)message inView:(UIView *)view delay:(NSTimeInterval)time; /// 提示(View上,自动隐藏) /// @param message 提示信息 /// @param view 提示控件的父视图 /// @param time 自动隐藏时间 /// @param yOffset 距离中心点的Y轴的位置 + (void)showTips:(NSString *)message inView:(UIView *)view delay:(NSTimeInterval)time yOffset:(CGFloat)yOffset; #pragma mark - 成功,失败 /// 成功提示(View上,自动隐藏) /// @param message 提示信息 + (void)showSuccessWithTips:(NSString *)message; /// 成功提示(View上,自动隐藏) /// @param message 提示信息 /// @param time 自动隐藏时间 + (void)showSuccessWithTips:(NSString *)message delay:(NSTimeInterval)time; /// 错误提示(View上,自动隐藏) /// @param message 提示信息 + (void)showErrorWithTips:(NSString *)message; /// 错误提示(View上,自动隐藏) /// @param message 提示信息 /// @param time 自动隐藏时间 + (void)showErrorWithTips:(NSString *)message delay:(NSTimeInterval)time; #pragma mark - 隐藏 /// 隐藏提示控件 + (void)hideHUD; /// 隐藏提示控件 /// @param view 提示控件的父视图 + (void)hideHUDWithView:(UIView *)view; @end @interface GHUDHelp : NSObject @property (nonatomic, strong) NSMutableArray * hudViews; + (GHUDHelp *)share; @end NS_ASSUME_NONNULL_END
/** * The web3j producer. */ public class Web3jProducer extends HeaderSelectorProducer { private static final Logger LOG = LoggerFactory.getLogger(Web3jProducer.class); private Web3j web3j; private Quorum quorum; private Web3jConfiguration configuration; public Web3jProducer(Web3jEndpoint endpoint, final Web3jConfiguration configuration) { super(endpoint, Web3jConstants.OPERATION, () -> configuration.getOperationOrDefault(), false); web3j = endpoint.getWeb3j(); this.configuration = configuration; if (web3j instanceof Quorum) { quorum = (Quorum) web3j; } } @Override public Web3jEndpoint getEndpoint() { return (Web3jEndpoint) super.getEndpoint(); } @Override protected void doStart() throws Exception { super.doStart(); } @Override protected void doStop() throws Exception { super.doStop(); } @InvokeOnHeader(Web3jConstants.WEB3_CLIENT_VERSION) void web3ClientVersion(Message message) throws IOException { Request<?, Web3ClientVersion> web3ClientVersionRequest = web3j.web3ClientVersion(); setRequestId(message, web3ClientVersionRequest); Web3ClientVersion response = web3ClientVersionRequest.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getWeb3ClientVersion()); } } @InvokeOnHeader(Web3jConstants.WEB3_SHA3) void web3Sha3(Message message) throws IOException { Request<?, Web3Sha3> web3Sha3Request = web3j.web3Sha3(message.getBody(String.class)); setRequestId(message, web3Sha3Request); Web3Sha3 response = web3Sha3Request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getResult()); } } @InvokeOnHeader(Web3jConstants.NET_VERSION) void netVersion(Message message) throws IOException { Request<?, NetVersion> netVersionRequest = web3j.netVersion(); setRequestId(message, netVersionRequest); NetVersion response = netVersionRequest.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getNetVersion()); } } @InvokeOnHeader(Web3jConstants.NET_LISTENING) void netListening(Message message) throws IOException { Request<?, NetListening> netListeningRequest = web3j.netListening(); setRequestId(message, netListeningRequest); NetListening response = netListeningRequest.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.isListening()); } } @InvokeOnHeader(Web3jConstants.NET_PEER_COUNT) void netPeerCount(Message message) throws IOException { Request<?, NetPeerCount> request = web3j.netPeerCount(); setRequestId(message, request); NetPeerCount response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getQuantity()); } } @InvokeOnHeader(Web3jConstants.ETH_PROTOCOL_VERSION) void ethProtocolVersion(Message message) throws IOException { Request<?, EthProtocolVersion> request = web3j.ethProtocolVersion(); setRequestId(message, request); EthProtocolVersion response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getProtocolVersion()); } } @InvokeOnHeader(Web3jConstants.ETH_SYNCING) void ethSyncing(Message message) throws IOException { Request<?, EthSyncing> request = web3j.ethSyncing(); setRequestId(message, request); EthSyncing response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.isSyncing()); } } @InvokeOnHeader(Web3jConstants.ETH_COINBASE) void ethCoinbase(Message message) throws IOException { Request<?, EthCoinbase> request = web3j.ethCoinbase(); setRequestId(message, request); EthCoinbase response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getAddress()); } } @InvokeOnHeader(Web3jConstants.ETH_MINING) void ethMining(Message message) throws IOException { Request<?, EthMining> request = web3j.ethMining(); setRequestId(message, request); EthMining response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.isMining()); } } @InvokeOnHeader(Web3jConstants.ETH_HASHRATE) void ethHashrate(Message message) throws IOException { Request<?, EthHashrate> request = web3j.ethHashrate(); setRequestId(message, request); EthHashrate response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getHashrate()); } } @InvokeOnHeader(Web3jConstants.ETH_GAS_PRICE) void ethGasPrice(Message message) throws IOException { Request<?, EthGasPrice> request = web3j.ethGasPrice(); setRequestId(message, request); EthGasPrice response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getGasPrice()); } } @InvokeOnHeader(Web3jConstants.ETH_ACCOUNTS) void ethAccounts(Message message) throws IOException { Request<?, EthAccounts> request = web3j.ethAccounts(); setRequestId(message, request); EthAccounts response = request.send(); message.setBody(response.getAccounts()); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getAccounts()); } } @InvokeOnHeader(Web3jConstants.ETH_BLOCK_NUMBER) void ethBlockNumber(Message message) throws IOException { Request<?, EthBlockNumber> request = web3j.ethBlockNumber(); setRequestId(message, request); EthBlockNumber response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getBlockNumber()); } } @InvokeOnHeader(Web3jConstants.ETH_GET_BALANCE) void ethGetBalance(Message message) throws IOException { String address = message.getHeader(Web3jConstants.ADDRESS, configuration::getAddress, String.class); DefaultBlockParameter atBlock = toBlockParameter(message.getHeader(Web3jConstants.AT_BLOCK, configuration::getAtBlock, Object.class)); Request<?, EthGetBalance> request = web3j.ethGetBalance(address, atBlock); setRequestId(message, request); EthGetBalance response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getBalance()); } } @InvokeOnHeader(Web3jConstants.ETH_GET_STORAGE_AT) void ethGetStorageAt(Message message) throws IOException { String address = message.getHeader(Web3jConstants.ADDRESS, configuration::getAddress, String.class); DefaultBlockParameter atBlock = toBlockParameter(message.getHeader(Web3jConstants.AT_BLOCK, configuration::getAtBlock, Object.class)); BigInteger position = message.getHeader(Web3jConstants.POSITION, configuration::getPosition, BigInteger.class); Request<?, EthGetStorageAt> request = web3j.ethGetStorageAt(address, position, atBlock); setRequestId(message, request); EthGetStorageAt response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getData()); } } @InvokeOnHeader(Web3jConstants.ETH_GET_TRANSACTION_COUNT) void ethGetTransactionCount(Message message) throws IOException { String address = message.getHeader(Web3jConstants.ADDRESS, configuration::getAddress, String.class); DefaultBlockParameter atBlock = toBlockParameter(message.getHeader(Web3jConstants.AT_BLOCK, configuration::getAtBlock, Object.class)); Request<?, EthGetTransactionCount> request = web3j.ethGetTransactionCount(address, atBlock); setRequestId(message, request); EthGetTransactionCount response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getTransactionCount()); } } @InvokeOnHeader(Web3jConstants.ETH_GET_BLOCK_TRANSACTION_COUNT_BY_HASH) void ethGetBlockTransactionCountByHash(Message message) throws IOException { String blockHash = message.getHeader(Web3jConstants.BLOCK_HASH, configuration::getBlockHash, String.class); Request<?, EthGetBlockTransactionCountByHash> request = web3j.ethGetBlockTransactionCountByHash(blockHash); setRequestId(message, request); EthGetBlockTransactionCountByHash response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getTransactionCount()); } } @InvokeOnHeader(Web3jConstants.ETH_GET_BLOCK_TRANSACTION_COUNT_BY_NUMBER) void ethGetBlockTransactionCountByNumber(Message message) throws IOException { DefaultBlockParameter atBlock = toBlockParameter(message.getHeader(Web3jConstants.AT_BLOCK, configuration::getAtBlock, Object.class)); Request<?, EthGetBlockTransactionCountByNumber> request = web3j.ethGetBlockTransactionCountByNumber(atBlock); setRequestId(message, request); EthGetBlockTransactionCountByNumber response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getTransactionCount()); } } @InvokeOnHeader(Web3jConstants.ETH_GET_UNCLE_COUNT_BY_BLOCK_HASH) void ethGetUncleCountByBlockHash(Message message) throws IOException { String blockHash = message.getHeader(Web3jConstants.BLOCK_HASH, configuration::getBlockHash, String.class); Request<?, EthGetUncleCountByBlockHash> request = web3j.ethGetUncleCountByBlockHash(blockHash); setRequestId(message, request); EthGetUncleCountByBlockHash response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getUncleCount()); } } @InvokeOnHeader(Web3jConstants.ETH_GET_UNCLE_COUNT_BY_BLOCK_NUMBER) void ethGetUncleCountByBlockNumber(Message message) throws IOException { DefaultBlockParameter atBlock = toBlockParameter(message.getHeader(Web3jConstants.AT_BLOCK, configuration::getAtBlock, Object.class)); Request<?, EthGetUncleCountByBlockNumber> request = web3j.ethGetUncleCountByBlockNumber(atBlock); setRequestId(message, request); EthGetUncleCountByBlockNumber response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getUncleCount()); } } @InvokeOnHeader(Web3jConstants.ETH_GET_CODE) void ethGetCode(Message message) throws IOException { DefaultBlockParameter atBlock = toBlockParameter(message.getHeader(Web3jConstants.AT_BLOCK, configuration::getAtBlock, Object.class)); String address = message.getHeader(Web3jConstants.ADDRESS, configuration::getAddress, String.class); Request<?, EthGetCode> request = web3j.ethGetCode(address, atBlock); setRequestId(message, request); EthGetCode response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getCode()); } } @InvokeOnHeader(Web3jConstants.ETH_SIGN) void ethSign(Message message) throws IOException { String address = message.getHeader(Web3jConstants.ADDRESS, configuration::getAddress, String.class); String sha3HashOfDataToSign = message.getHeader(Web3jConstants.SHA3_HASH_OF_DATA_TO_SIGN, configuration::getSha3HashOfDataToSign, String.class); Request<?, EthSign> request = web3j.ethSign(address, sha3HashOfDataToSign); setRequestId(message, request); EthSign response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getSignature()); } } @InvokeOnHeader(Web3jConstants.ETH_SEND_TRANSACTION) void ethSendTransaction(Message message) throws IOException { String fromAddress = message.getHeader(Web3jConstants.FROM_ADDRESS, configuration::getFromAddress, String.class); String toAddress = message.getHeader(Web3jConstants.TO_ADDRESS, configuration::getToAddress, String.class); BigInteger nonce = message.getHeader(Web3jConstants.NONCE, configuration::getNonce, BigInteger.class); BigInteger gasPrice = message.getHeader(Web3jConstants.GAS_PRICE, configuration::getGasPrice, BigInteger.class); BigInteger gasLimit = message.getHeader(Web3jConstants.GAS_LIMIT, configuration::getGasLimit, BigInteger.class); BigInteger value = message.getHeader(Web3jConstants.VALUE, configuration::getValue, BigInteger.class); String data = message.getHeader(Web3jConstants.DATA, configuration::getData, String.class); org.web3j.protocol.core.methods.request.Transaction transaction = new org.web3j.protocol.core.methods.request.Transaction( fromAddress, nonce, gasPrice, gasLimit, toAddress, value, data); Request<?, EthSendTransaction> request = web3j.ethSendTransaction(transaction); setRequestId(message, request); EthSendTransaction response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getTransactionHash()); } } @InvokeOnHeader(Web3jConstants.ETH_SEND_RAW_TRANSACTION) void ethSendRawTransaction(Message message) throws IOException { String signedTransactionData = message.getHeader(Web3jConstants.SIGNED_TRANSACTION_DATA, configuration::getSignedTransactionData, String.class); Request<?, EthSendTransaction> request = web3j.ethSendRawTransaction(signedTransactionData); setRequestId(message, request); EthSendTransaction response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getTransactionHash()); } } @InvokeOnHeader(Web3jConstants.ETH_CALL) void ethCall(Message message) throws IOException { String fromAddress = message.getHeader(Web3jConstants.FROM_ADDRESS, configuration::getFromAddress, String.class); String toAddress = message.getHeader(Web3jConstants.TO_ADDRESS, configuration::getToAddress, String.class); BigInteger nonce = message.getHeader(Web3jConstants.NONCE, configuration::getNonce, BigInteger.class); BigInteger gasPrice = message.getHeader(Web3jConstants.GAS_PRICE, configuration::getGasPrice, BigInteger.class); BigInteger gasLimit = message.getHeader(Web3jConstants.GAS_LIMIT, configuration::getGasLimit, BigInteger.class); BigInteger value = message.getHeader(Web3jConstants.VALUE, configuration::getValue, BigInteger.class); String data = message.getHeader(Web3jConstants.DATA, configuration::getData, String.class); DefaultBlockParameter atBlock = toBlockParameter(message.getHeader(Web3jConstants.AT_BLOCK, configuration::getAtBlock, Object.class)); org.web3j.protocol.core.methods.request.Transaction transaction = new org.web3j.protocol.core.methods.request.Transaction( fromAddress, nonce, gasPrice, gasLimit, toAddress, value, data); Request<?, EthCall> request = web3j.ethCall(transaction, atBlock); setRequestId(message, request); EthCall response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getValue()); } } @InvokeOnHeader(Web3jConstants.ETH_ESTIMATE_GAS) void ethEstimateGas(Message message) throws IOException { String fromAddress = message.getHeader(Web3jConstants.FROM_ADDRESS, configuration::getFromAddress, String.class); String toAddress = message.getHeader(Web3jConstants.TO_ADDRESS, configuration::getToAddress, String.class); String data = message.getHeader(Web3jConstants.DATA, configuration::getData, String.class); org.web3j.protocol.core.methods.request.Transaction transaction = new org.web3j.protocol.core.methods.request.Transaction( fromAddress, null, null, null, toAddress, null, data); Request<?, EthEstimateGas> request = web3j.ethEstimateGas(transaction); setRequestId(message, request); EthEstimateGas response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getAmountUsed()); } } @InvokeOnHeader(Web3jConstants.ETH_GET_BLOCK_BY_HASH) void ethGetBlockByHash(Message message) throws IOException { Boolean fullTransactionObjects = message.getHeader(Web3jConstants.FULL_TRANSACTION_OBJECTS, configuration::isFullTransactionObjects, Boolean.class); String blockHash = message.getHeader(Web3jConstants.BLOCK_HASH, configuration::getBlockHash, String.class); Request<?, EthBlock> request = web3j.ethGetBlockByHash(blockHash, fullTransactionObjects); setRequestId(message, request); EthBlock response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getBlock()); } } @InvokeOnHeader(Web3jConstants.ETH_GET_BLOCK_BY_NUMBER) void ethGetBlockByNumber(Message message) throws IOException { DefaultBlockParameter atBlock = toBlockParameter(message.getHeader(Web3jConstants.AT_BLOCK, configuration::getAtBlock, Object.class)); Boolean fullTransactionObjects = message.getHeader(Web3jConstants.FULL_TRANSACTION_OBJECTS, configuration::isFullTransactionObjects, Boolean.class); Request<?, EthBlock> request = web3j.ethGetBlockByNumber(atBlock, fullTransactionObjects); setRequestId(message, request); EthBlock response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getBlock()); } } @InvokeOnHeader(Web3jConstants.ETH_GET_TRANSACTION_BY_HASH) void ethGetTransactionByHash(Message message) throws IOException { String transactionHash = message.getHeader(Web3jConstants.TRANSACTION_HASH, configuration::getTransactionHash, String.class); Request<?, EthTransaction> request = web3j.ethGetTransactionByHash(transactionHash); setRequestId(message, request); EthTransaction response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getTransaction().isPresent() ? response.getTransaction().get() : null); } } @InvokeOnHeader(Web3jConstants.ETH_GET_TRANSACTION_BY_BLOCK_HASH_AND_INDEX) void ethGetTransactionByBlockHashAndIndex(Message message) throws IOException { String blockHash = message.getHeader(Web3jConstants.BLOCK_HASH, configuration::getBlockHash, String.class); BigInteger transactionIndex = message.getHeader(Web3jConstants.INDEX, configuration::getIndex, BigInteger.class); Request<?, EthTransaction> request = web3j.ethGetTransactionByBlockHashAndIndex(blockHash, transactionIndex); setRequestId(message, request); EthTransaction response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getTransaction().isPresent() ? response.getTransaction().get() : null); } } @InvokeOnHeader(Web3jConstants.ETH_GET_TRANSACTION_BY_BLOCK_NUMBER_AND_INDEX) void ethGetTransactionByBlockNumberAndIndex(Message message) throws IOException { DefaultBlockParameter atBlock = toBlockParameter(message.getHeader(Web3jConstants.AT_BLOCK, configuration::getAtBlock, Object.class)); BigInteger transactionIndex = message.getHeader(Web3jConstants.INDEX, configuration::getIndex, BigInteger.class); Request<?, EthTransaction> request = web3j.ethGetTransactionByBlockNumberAndIndex(atBlock, transactionIndex); setRequestId(message, request); EthTransaction response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getTransaction().isPresent() ? response.getTransaction().get() : null); } } @InvokeOnHeader(Web3jConstants.ETH_GET_UNCLE_BY_BLOCK_HASH_AND_INDEX) void ethGetUncleByBlockHashAndIndex(Message message) throws IOException { String blockHash = message.getHeader(Web3jConstants.BLOCK_HASH, configuration::getBlockHash, String.class); BigInteger uncleIndex = message.getHeader(Web3jConstants.INDEX, configuration::getIndex, BigInteger.class); Request<?, EthBlock> request = web3j.ethGetUncleByBlockHashAndIndex(blockHash, uncleIndex); setRequestId(message, request); EthBlock response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getBlock()); } } @InvokeOnHeader(Web3jConstants.ETH_GET_UNCLE_BY_BLOCK_NUMBER_AND_INDEX) void ethGetUncleByBlockNumberAndIndex(Message message) throws IOException { DefaultBlockParameter atBlock = toBlockParameter(message.getHeader(Web3jConstants.AT_BLOCK, configuration::getAtBlock, Object.class)); BigInteger uncleIndex = message.getHeader(Web3jConstants.INDEX, configuration::getIndex, BigInteger.class); Request<?, EthBlock> request = web3j.ethGetUncleByBlockNumberAndIndex(atBlock, uncleIndex); setRequestId(message, request); EthBlock response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getBlock()); } } @InvokeOnHeader(Web3jConstants.ETH_GET_TRANSACTION_RECEIPT) void ethGetTransactionReceipt(Message message) throws IOException { String transactionHash = message.getHeader(Web3jConstants.TRANSACTION_HASH, configuration::getTransactionHash, String.class); Request<?, EthGetTransactionReceipt> request = web3j.ethGetTransactionReceipt(transactionHash); setRequestId(message, request); EthGetTransactionReceipt response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getTransactionReceipt()); } } @InvokeOnHeader(Web3jConstants.ETH_GET_COMPILERS) void ethGetCompilers(Message message) throws IOException { Request<?, EthGetCompilers> request = web3j.ethGetCompilers(); setRequestId(message, request); EthGetCompilers response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getCompilers()); } } @InvokeOnHeader(Web3jConstants.ETH_COMPILE_LLL) void ethCompileLLL(Message message) throws IOException { String sourceCode = message.getHeader(Web3jConstants.SOURCE_CODE, configuration::getSourceCode, String.class); Request<?, EthCompileLLL> request = web3j.ethCompileLLL(sourceCode); setRequestId(message, request); EthCompileLLL response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getCompiledSourceCode()); } } @InvokeOnHeader(Web3jConstants.ETH_COMPILE_SOLIDITY) void ethCompileSolidity(Message message) throws IOException { String sourceCode = message.getHeader(Web3jConstants.SOURCE_CODE, configuration::getSourceCode, String.class); Request<?, EthCompileSolidity> request = web3j.ethCompileSolidity(sourceCode); setRequestId(message, request); EthCompileSolidity response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getCompiledSolidity()); } } @InvokeOnHeader(Web3jConstants.ETH_COMPILE_SERPENT) void ethCompileSerpent(Message message) throws IOException { String sourceCode = message.getHeader(Web3jConstants.SOURCE_CODE, configuration::getSourceCode, String.class); Request<?, EthCompileSerpent> request = web3j.ethCompileSerpent(sourceCode); setRequestId(message, request); EthCompileSerpent response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getCompiledSourceCode()); } } @InvokeOnHeader(Web3jConstants.ETH_NEW_FILTER) void ethNewFilter(Message message) throws IOException { DefaultBlockParameter fromBlock = toBlockParameter(message.getHeader(Web3jConstants.FROM_BLOCK, configuration::getFromBlock, Object.class)); DefaultBlockParameter toBlock = toBlockParameter(message.getHeader(Web3jConstants.TO_BLOCK, configuration::getToBlock, Object.class)); List<String> addresses = message.getHeader(Web3jConstants.ADDRESSES, configuration::getAddresses, List.class); List<String> topics = message.getHeader(Web3jConstants.TOPICS, configuration::getTopics, List.class); org.web3j.protocol.core.methods.request.EthFilter ethFilter = Web3jEndpoint.buildEthFilter(fromBlock, toBlock, addresses, topics); Request<?, EthFilter> request = web3j.ethNewFilter(ethFilter); setRequestId(message, request); EthFilter response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getFilterId()); } } @InvokeOnHeader(Web3jConstants.ETH_NEW_BLOCK_FILTER) void ethNewBlockFilter(Message message) throws IOException { Request<?, EthFilter> request = web3j.ethNewBlockFilter(); setRequestId(message, request); EthFilter response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getFilterId()); } } @InvokeOnHeader(Web3jConstants.ETH_NEW_PENDING_TRANSACTION_FILTER) void ethNewPendingTransactionFilter(Message message) throws IOException { Request<?, EthFilter> request = web3j.ethNewPendingTransactionFilter(); setRequestId(message, request); EthFilter response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getFilterId()); } } @InvokeOnHeader(Web3jConstants.ETH_UNINSTALL_FILTER) void ethUninstallFilter(Message message) throws IOException { BigInteger filterId = message.getHeader(Web3jConstants.FILTER_ID, configuration::getFilterId, BigInteger.class); Request<?, EthUninstallFilter> request = web3j.ethUninstallFilter(filterId); setRequestId(message, request); EthUninstallFilter response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.isUninstalled()); } } @InvokeOnHeader(Web3jConstants.ETH_GET_FILTER_CHANGES) void ethGetFilterChanges(Message message) throws IOException { BigInteger filterId = message.getHeader(Web3jConstants.FILTER_ID, configuration::getFilterId, BigInteger.class); Request<?, EthLog> request = web3j.ethGetFilterChanges(filterId); setRequestId(message, request); EthLog response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getLogs()); } } @InvokeOnHeader(Web3jConstants.ETH_GET_FILTER_LOGS) void ethGetFilterLogs(Message message) throws IOException { BigInteger filterId = message.getHeader(Web3jConstants.FILTER_ID, configuration::getFilterId, BigInteger.class); Request<?, EthLog> request = web3j.ethGetFilterLogs(filterId); setRequestId(message, request); EthLog response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getLogs()); } } @InvokeOnHeader(Web3jConstants.ETH_GET_LOGS) void ethGetLogs(Message message) throws IOException { DefaultBlockParameter fromBlock = toBlockParameter(message.getHeader(Web3jConstants.FROM_BLOCK, configuration::getFromBlock, Object.class)); DefaultBlockParameter toBlock = toBlockParameter(message.getHeader(Web3jConstants.TO_BLOCK, configuration::getToBlock, Object.class)); List<String> addresses = message.getHeader(Web3jConstants.ADDRESSES, configuration::getAddresses, List.class); List<String> topics = message.getHeader(Web3jConstants.TOPICS, configuration::getTopics, List.class); org.web3j.protocol.core.methods.request.EthFilter ethFilter = Web3jEndpoint.buildEthFilter(fromBlock, toBlock, addresses, topics); Request<?, EthLog> request = web3j.ethGetLogs(ethFilter); setRequestId(message, request); EthLog response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getLogs()); } } @InvokeOnHeader(Web3jConstants.ETH_GET_WORK) void ethGetWork(Message message) throws IOException { Request<?, EthGetWork> request = web3j.ethGetWork(); setRequestId(message, request); EthGetWork response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getResult()); } } @InvokeOnHeader(Web3jConstants.ETH_SUBMIT_WORK) void ethSubmitWork(Message message) throws IOException { String nonce = message.getHeader(Web3jConstants.NONCE, configuration::getNonce, String.class); String headerPowHash = message.getHeader(Web3jConstants.HEADER_POW_HASH, configuration::getHeaderPowHash, String.class); String mixDigest = message.getHeader(Web3jConstants.MIX_DIGEST, configuration::getHeaderPowHash, String.class); Request<?, EthSubmitWork> request = web3j.ethSubmitWork(nonce, headerPowHash, mixDigest); setRequestId(message, request); EthSubmitWork response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.solutionValid()); } } @InvokeOnHeader(Web3jConstants.ETH_SUBMIT_HASHRATE) void ethSubmitHashrate(Message message) throws IOException { String hashrate = message.getHeader(Web3jConstants.ETH_HASHRATE, configuration::getHashrate, String.class); String clientId = message.getHeader(Web3jConstants.CLIENT_ID, configuration::getClientId, String.class); Request<?, EthSubmitHashrate> request = web3j.ethSubmitHashrate(hashrate, clientId); setRequestId(message, request); EthSubmitHashrate response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.submissionSuccessful()); } } //deprecated operations @InvokeOnHeader(Web3jConstants.DB_PUT_STRING) void dbPutString(Message message) throws IOException { String databaseName = message.getHeader(Web3jConstants.DATABASE_NAME, configuration::getDatabaseName, String.class); String keyName = message.getHeader(Web3jConstants.KEY_NAME, configuration::getKeyName, String.class); Request<?, DbPutString> request = web3j.dbPutString(databaseName, keyName, message.getBody(String.class)); setRequestId(message, request); DbPutString response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.valueStored()); } } @InvokeOnHeader(Web3jConstants.DB_GET_STRING) void dbGetString(Message message) throws IOException { String databaseName = message.getHeader(Web3jConstants.DATABASE_NAME, configuration::getDatabaseName, String.class); String keyName = message.getHeader(Web3jConstants.KEY_NAME, configuration::getKeyName, String.class); Request<?, DbGetString> request = web3j.dbGetString(databaseName, keyName); setRequestId(message, request); DbGetString response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getStoredValue()); } } @InvokeOnHeader(Web3jConstants.DB_PUT_HEX) void dbPutHex(Message message) throws IOException { String databaseName = message.getHeader(Web3jConstants.DATABASE_NAME, configuration::getDatabaseName, String.class); String keyName = message.getHeader(Web3jConstants.KEY_NAME, configuration::getKeyName, String.class); Request<?, DbPutHex> request = web3j.dbPutHex(databaseName, keyName, message.getBody(String.class)); setRequestId(message, request); DbPutHex response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.valueStored()); } } @InvokeOnHeader(Web3jConstants.DB_GET_HEX) void dbGetHex(Message message) throws IOException { String databaseName = message.getHeader(Web3jConstants.DATABASE_NAME, configuration::getDatabaseName, String.class); String keyName = message.getHeader(Web3jConstants.KEY_NAME, configuration::getKeyName, String.class); Request<?, DbGetHex> request = web3j.dbGetHex(databaseName, keyName); setRequestId(message, request); DbGetHex response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getStoredValue()); } } @InvokeOnHeader(Web3jConstants.SHH_VERSION) void shhVersion(Message message) throws IOException { Request<?, ShhVersion> request = web3j.shhVersion(); setRequestId(message, request); ShhVersion response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getVersion()); } } @InvokeOnHeader(Web3jConstants.SHH_POST) void shhPost(Message message) throws IOException { String fromAddress = message.getHeader(Web3jConstants.FROM_ADDRESS, configuration::getFromAddress, String.class); String toAddress = message.getHeader(Web3jConstants.TO_ADDRESS, configuration::getToAddress, String.class); List<String> topics = message.getHeader(Web3jConstants.TOPICS, configuration::getTopics, List.class); String data = message.getHeader(Web3jConstants.DATA, configuration::getData, String.class); BigInteger priority = message.getHeader(Web3jConstants.PRIORITY, configuration::getPriority, BigInteger.class); BigInteger ttl = message.getHeader(Web3jConstants.TTL, configuration::getTtl, BigInteger.class); org.web3j.protocol.core.methods.request.ShhPost shhPost = new org.web3j.protocol.core.methods.request.ShhPost(fromAddress, toAddress, topics, data, priority, ttl); Request<?, ShhPost> request = web3j.shhPost(shhPost); setRequestId(message, request); ShhPost response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.messageSent()); } } @InvokeOnHeader(Web3jConstants.SHH_NEW_IDENTITY) void shhNewIdentity(Message message) throws IOException { Request<?, ShhNewIdentity> request = web3j.shhNewIdentity(); setRequestId(message, request); ShhNewIdentity response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getAddress()); } } @InvokeOnHeader(Web3jConstants.SHH_HAS_IDENTITY) void shhHasIdentity(Message message) throws IOException { String identityAddress = message.getHeader(Web3jConstants.ADDRESS, configuration::getAddress, String.class); Request<?, ShhHasIdentity> request = web3j.shhHasIdentity(identityAddress); setRequestId(message, request); ShhHasIdentity response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.hasPrivateKeyForIdentity()); } } @InvokeOnHeader(Web3jConstants.SHH_NEW_GROUP) void shhNewGroup(Message message) throws IOException { Request<?, ShhNewGroup> request = web3j.shhNewGroup(); setRequestId(message, request); ShhNewGroup response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getAddress()); } } @InvokeOnHeader(Web3jConstants.SHH_ADD_TO_GROUP) void shhAddToGroup(Message message) throws IOException { String identityAddress = message.getHeader(Web3jConstants.ADDRESS, configuration::getAddress, String.class); Request<?, ShhAddToGroup> request = web3j.shhAddToGroup(identityAddress); setRequestId(message, request); ShhAddToGroup response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.addedToGroup()); } } @InvokeOnHeader(Web3jConstants.SHH_NEW_FILTER) void shhNewFilter(Message message) throws IOException { String data = message.getHeader(Web3jConstants.DATA, configuration::getData, String.class); List<String> topics = message.getHeader(Web3jConstants.TOPICS, configuration::getTopics, List.class); org.web3j.protocol.core.methods.request.ShhFilter shhFilter = Web3jEndpoint.buildShhFilter(data, topics); Request<?, ShhNewFilter> request = web3j.shhNewFilter(shhFilter); setRequestId(message, request); ShhNewFilter response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getFilterId()); } } @InvokeOnHeader(Web3jConstants.SHH_UNINSTALL_FILTER) void shhUninstallFilter(Message message) throws IOException { BigInteger filterId = message.getHeader(Web3jConstants.FILTER_ID, configuration::getFilterId, BigInteger.class); Request<?, ShhUninstallFilter> request = web3j.shhUninstallFilter(filterId); setRequestId(message, request); ShhUninstallFilter response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.isUninstalled()); } } @InvokeOnHeader(Web3jConstants.SHH_GET_FILTER_CHANGES) void shhGetFilterChanges(Message message) throws IOException { BigInteger filterId = message.getHeader(Web3jConstants.FILTER_ID, configuration::getFilterId, BigInteger.class); Request<?, ShhMessages> request = web3j.shhGetFilterChanges(filterId); setRequestId(message, request); ShhMessages response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getMessages()); } } @InvokeOnHeader(Web3jConstants.SHH_GET_MESSAGES) void shhGetMessages(Message message) throws IOException { BigInteger filterId = message.getHeader(Web3jConstants.FILTER_ID, configuration::getFilterId, BigInteger.class); Request<?, ShhMessages> request = web3j.shhGetMessages(filterId); setRequestId(message, request); ShhMessages response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getMessages()); } } // Quorum API Operations @InvokeOnHeader(Web3jConstants.QUORUM_NODE_INFO) void quorumNodeInfo(Message message) throws IOException { if (quorum == null) { setQuorumEndpointError(message); return; } Request<?, QuorumNodeInfo> request = quorum.quorumNodeInfo(); setRequestId(message, request); QuorumNodeInfo response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getNodeInfo()); } } @InvokeOnHeader(Web3jConstants.QUORUM_CANONICAL_HASH) void quorumCanonicalHash(Message message) throws IOException { if (quorum == null) { setQuorumEndpointError(message); return; } Request<?, CanonicalHash> request = quorum.quorumCanonicalHash(message.getBody(BigInteger.class)); setRequestId(message, request); CanonicalHash response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getCanonicalHash()); } } @InvokeOnHeader(Web3jConstants.QUORUM_VOTE) void quorumVote(Message message) throws IOException { if (quorum == null) { setQuorumEndpointError(message); return; } String blockHash = message.getHeader(Web3jConstants.BLOCK_HASH, configuration::getBlockHash, String.class); Request<?, Vote> request = quorum.quorumVote(blockHash); setRequestId(message, request); Vote response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getTransactionHash()); } } @InvokeOnHeader(Web3jConstants.QUORUM_MAKE_BLOCK) void quorumMakeBlock(Message message) throws IOException { if (quorum == null) { setQuorumEndpointError(message); return; } Request<?, MakeBlock> request = quorum.quorumMakeBlock(); setRequestId(message, request); MakeBlock response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getBlockHash()); } } @InvokeOnHeader(Web3jConstants.QUORUM_PAUSE_BLOCK_MAKER) void quorumPauseBlockMaker(Message message) throws IOException { if (quorum == null) { setQuorumEndpointError(message); return; } Request<?, VoidResponse> request = quorum.quorumPauseBlockMaker(); setRequestId(message, request); VoidResponse response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.isValid()); } } @InvokeOnHeader(Web3jConstants.QUORUM_RESUME_BLOCK_MAKER) void quorumResumeBlockMaker(Message message) throws IOException { if (quorum == null) { setQuorumEndpointError(message); return; } Request<?, VoidResponse> request = quorum.quorumResumeBlockMaker(); setRequestId(message, request); VoidResponse response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.isValid()); } } @InvokeOnHeader(Web3jConstants.QUORUM_IS_BLOCK_MAKER) void quorumIsBlockMaker(Message message) throws IOException { if (quorum == null) { setQuorumEndpointError(message); return; } String address = message.getHeader(Web3jConstants.ADDRESS, configuration::getAddress, String.class); Request<?, BlockMaker> request = quorum.quorumIsBlockMaker(address); setRequestId(message, request); BlockMaker response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.isBlockMaker()); } } @InvokeOnHeader(Web3jConstants.QUORUM_IS_VOTER) void quorumIsVoter(Message message) throws IOException { if (quorum == null) { setQuorumEndpointError(message); return; } String address = message.getHeader(Web3jConstants.ADDRESS, configuration::getAddress, String.class); Request<?, Voter> request = quorum.quorumIsVoter(address); setRequestId(message, request); Voter response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.isVoter()); } } @InvokeOnHeader(Web3jConstants.QUORUM_GET_PRIVATE_PAYLOAD) void quorumGetPrivatePayload(Message message) throws IOException { if (quorum == null) { setQuorumEndpointError(message); return; } Request<?, PrivatePayload> request = quorum.quorumGetPrivatePayload(message.getBody(String.class)); setRequestId(message, request); PrivatePayload response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getPrivatePayload()); } } @InvokeOnHeader(Web3jConstants.QUORUM_ETH_SEND_TRANSACTION) void quorumEthSendTransaction(Message message) throws IOException { if (quorum == null) { setQuorumEndpointError(message); return; } //the same as a regular transaction, but there is no gasPrice, instead there is optional privateFor String fromAddress = message.getHeader(Web3jConstants.FROM_ADDRESS, configuration::getFromAddress, String.class); String toAddress = message.getHeader(Web3jConstants.TO_ADDRESS, configuration::getToAddress, String.class); BigInteger nonce = message.getHeader(Web3jConstants.NONCE, configuration::getNonce, BigInteger.class); BigInteger gasLimit = message.getHeader(Web3jConstants.GAS_LIMIT, configuration::getGasLimit, BigInteger.class); BigInteger value = message.getHeader(Web3jConstants.VALUE, configuration::getValue, BigInteger.class); String data = message.getHeader(Web3jConstants.DATA, configuration::getData, String.class); List<String> privateFor = message.getHeader(Web3jConstants.PRIVATE_FOR, configuration::getPrivateFor, List.class); PrivateTransaction transaction = new PrivateTransaction(fromAddress, nonce, gasLimit, toAddress, value, data, privateFor); Request<?, EthSendTransaction> request = quorum.ethSendTransaction(transaction); setRequestId(message, request); EthSendTransaction response = request.send(); boolean hasError = checkForError(message, response); if (!hasError) { message.setBody(response.getTransactionHash()); } } private void setRequestId(Message message, Request request) { final Long id = message.getHeader(Web3jConstants.ID, Long.class); LOG.debug("setRequestId {}", id); if (id != null) { request.setId(id); } } private DefaultBlockParameter toBlockParameter(Object o) { DefaultBlockParameter defaultBlockParameter = null; if (o != null) { try { defaultBlockParameter = DefaultBlockParameter.valueOf(o.toString()); } catch (Throwable e) { //not one of: "latest", "earliest" or "pending" } if (defaultBlockParameter == null) { defaultBlockParameter = DefaultBlockParameter.valueOf(new BigInteger(o.toString())); } } return defaultBlockParameter; } private boolean checkForError(Message message, Response response) { if (response.hasError()) { int code = response.getError().getCode(); String data = response.getError().getData(); String messages = response.getError().getMessage(); message.setHeader(Web3jConstants.ERROR_CODE, code); message.setHeader(Web3jConstants.ERROR_DATA, data); message.setHeader(Web3jConstants.ERROR_MESSAGE, messages); message.getExchange().setException(new CamelExchangeException("Web3j failed. Error code: " + code + " data: " + data + " messages: " + messages, message.getExchange())); return true; } else { return false; } } private void setQuorumEndpointError(Message message) { message.getExchange().setException(new CamelExchangeException("This is not a Quorum endpoint. Create one by specifying quorumAPI=true", message.getExchange())); } }
/** * Check the validity of an annotated query based on 3 rules: * 1. The last keyword cannot be an OperatorTerm * 2. For each term matching an AggregateFunction the next * should match an attribute name or an Aggregate Function. * 3. For each term matching an GroupBy or Count the next * should match a relation name or an attribute name. * Also report log a waring about the Queries that failed. * * @param annotatedQuery * @return */ public static boolean checkAnnotatedQueryValidity(AnnotatedQuery annotatedQuery) { for (int index = 0; index < annotatedQuery.getKeywords().size(); index++) { Keyword keyword = annotatedQuery.getKeywords().get(index); if (index == annotatedQuery.getKeywords().size() - 1 && keyword.getType() == Keyword.TermType.OperatorTerm) { return false; } else if (keyword.isAggregateFunctionExceptCOUNT()) { Keyword nextKeyword = annotatedQuery.getKeywords().get(index + 1); Tag tag = annotatedQuery.getKeywordsTag(nextKeyword); if (! ((tag != null && tag.refersToAttribute()) || nextKeyword.isAggregateFunction()) ) { return false; } } else if (keyword.isGroupByFunction() || keyword.isCountFunction()) { Keyword nextKeyword = annotatedQuery.getKeywords().get(index + 1); Tag tag = annotatedQuery.getKeywordsTag(nextKeyword); if (!(tag.refersToAttribute() || tag.refersToRelation())) { return false; } else if (keyword.isCountFunction() && ( ! (tag != null && ((tag.refersToAttribute() || tag.refersToRelation())) || nextKeyword.isAggregateFunction()))) { return false; } } } return true; }
// Comp_04_0386 import React from 'react'; import { incModCount } from '../modCount'; import Comp_05_1930 from './Comp_05_1930'; import Comp_05_1931 from './Comp_05_1931'; import Comp_05_1932 from './Comp_05_1932'; import Comp_05_1933 from './Comp_05_1933'; import Comp_05_1934 from './Comp_05_1934'; const Comp_04_0386: React.FC = () => { React.useEffect(() => { incModCount(); }, []); return <div> I'm component Comp_04_0386 <div> <Comp_05_1930></Comp_05_1930>'; <Comp_05_1931></Comp_05_1931>'; <Comp_05_1932></Comp_05_1932>'; <Comp_05_1933></Comp_05_1933>'; <Comp_05_1934></Comp_05_1934>'; </div> </div>; }; export default Comp_04_0386;
class Int2Printer: def __init__(self, val): self.val = val def to_string(self): return "{%d,%d}" % (self.val['x'], self.val['y']) class RangePrinter: def __init__(self, val): self.val = val def to_string(self): return "{%s...%s}" % (self.val['start'], self.val['end']) class RectPrinter: def __init__(self, val): self.val = val def to_string(self): return "{pos = %s, size = %s}" % (self.val['pos'], self.val['size']) class StringPrinter: def __init__(self, val): self.val = val def to_string(self): size = int(self.val['chars']['i']['size']) if size == 0: return "<empty String>" return "{\"" + self.val['chars']['i']['data'].string('utf-8').replace('"', '\"') + \ "\" size = %d" % size + "}" class ObjectPrinter: def __init__(self, val): self.val = val def to_string(self): return "{instance of %s, refCount = %d}" % (self.val['classObj'], self.val['refCount']) class PtrSetPrinter: def __init__(self, val): self.val = val def to_string(self): values = self.val['values'] set_range = (int(values['range']['start']), int(values['range']['end'])) set_size = set_range[1] - set_range[0] if set_size == 0: return "<empty PtrSet>" return "{size = %d}" % (set_size) def lookup_type(val): type_str = str(val.type) if type_str.startswith('const '): type_str = type_str[6:] if type_str.endswith(' *'): type_str = type_str[:-2] if type_str == 'iString': return StringPrinter(val) if type_str.startswith('iRange'): return RangePrinter(val) if type_str == 'iInt2': return Int2Printer(val) if type_str == 'iRect': return RectPrinter(val) if type_str == 'iObject': return ObjectPrinter(val) if type_str == 'iPtrSet': return PtrSetPrinter(val) return None gdb.pretty_printers.append(lookup_type)
<filename>src/main/java/com/robidium/demo/compiler/transformation/synthetic/DataTransformer.java package com.robidium.demo.compiler.transformation.synthetic; import org.json.simple.JSONArray; import org.json.simple.parser.JSONParser; import org.json.simple.parser.ParseException; import java.io.FileReader; import java.io.IOException; import java.util.List; import java.util.Map; public abstract class DataTransformer { Map<String, List<String>> transformations; String fileName; public void ReadDataTransformationFromFile() { JSONParser jsonParser = new JSONParser(); try (FileReader reader = new FileReader(fileName)) { Object obj = jsonParser.parse(reader); parseJSONTransformations((JSONArray) obj); } catch (IOException | ParseException e) { e.printStackTrace(); } } public String createTransformation(String id) { return transformations.get(id) .stream() .reduce("", (t, f) -> t + Functions.getFunction(f)) + Functions.assignTransformedValue(); } public boolean isDataTransformationPresent(String id) { return transformations.containsKey(id); } protected abstract void parseJSONTransformations(JSONArray dataTransformationList); }
<reponame>comprakt/comprakt-fuzz-tests class Ed { } class QUhs9Z4 { public static void P9p_wAxUxG (String[] NXSQc0) { { int a; -null.C_MjCdC4BtD(); !!241186.yKRFtF9dzVA; XhiKRwLkjlor zjEUy; ; return; void FggkBD; if ( -false[ new boolean[ !true[ null.cC]][ ( l7sBA()[ new boolean[ eBcIU9().LZAefSd].EEI()]).Y()]]) while ( true[ true[ !357606[ this.bTkU5H()]]]) { void[][][] bl2HZy; } } } public static void wfc6TYYJ (String[] nGPA) { { boolean BUd; while ( ( -false.S5()).VpdtbI7) { RucntJ[][][] zM; } ; while ( -!!Pztk3RdC[ false.CO()]) ; while ( EzooxGR[ !DZ5ZjpB7yVylSt[ --20222.h7V21GRSEeCKl()]]) --!new WO5TSMkgx8().TAW5; boolean[] K5NivTfvHtP; gEWiyXOv7YK aFXVcny0t; boolean[][][] BR; boolean[] csCIO828aI6; return; int[] f5cwBYqI; void[][][] pa3n1g4; } void vrf_Mz7kVcY; { -( false.mK).oOLBUtmaKNKwS; int[] JME; int[] WifN_S; CZjKJXe j1sY4mZ; int wia2Wbkm3; if ( new rDl1s2P[ -false.KPO3cMUJVs_Tth()].mc) new int[ -( this.dugspG())[ new void[ this.e()].Myop]][ _fvIjo9.lbE43D08AVAq()]; boolean[] igQgUui5b6s; boolean[] ZKieiWuX8KkH; void[][][][] g; void[][] Vo; { return; } return; boolean[][][][] ns; while ( false[ !WB3JGTPnJ0Htl()[ !!new void[ ULnoEJz8E()[ -this[ !-!-!!!79158561.ZyHKpHHUlh4]]].tYuIkBj_()]]) while ( true.T) ; } } public int[][][][] AD; public boolean NdHEGJ; public int ZbNMaPkdHU6b9; public boolean[][][] rM (void[][] ud4, boolean[][] ZjYWKvKeV, boolean G, boolean fwjw, boolean[][] fDPGslrQM64, void[] aiBk) { if ( !-883[ 132.Mds0xx5FIRM()]) return;else 0[ -!499.Sx8CLU1KkndQE()]; boolean[] kGiOnm = !this.V8DDd(); { return; { void[][][] Y9D1CHDn2x; } { boolean Zf64ebqgFA; } boolean Kff9X; boolean tUHf7QU3HGmIkF; void[][][][][] mTlz; xD89SGAZhIO[][] P3HLVSFQ; void[] IT; this[ true[ this.UNxxFD_TbitO]]; boolean[] ie6P; return; int[][][][] K14VT4fa3fN; HpETffZBs3 f_rvyu6R; return; { int[] VP8_btL; } } if ( JM_GdUiMDP.I()) { boolean[] eO8B; }else if ( -!Nwq9CfCcKpyDd().l()) while ( -new rbxLdX6iyJ().DbcjC8NnlUKDJ()) { void wAmm; } if ( null[ -!!!-( null.VkqgG())[ -045158140[ !!!-( 465.J())[ !!--Z3()[ new Co6Sp9u[ -( null.Ufn68X3KjfU9()).g5yMvNuQ0qwHt][ -M4Ax.aHnGVIrf4]]]]]]) !72392378[ !true[ true[ 470352157.URE3arkdL8C9lg()]]]; r88b1zl[] Dq7nTkPDDB; return this[ !!-!6.xLd2wun0JF()]; while ( l_fg7kQ132Ap.oQv) ( -!!null.m5RPTqP5gmBt())[ T3F4wKhh4wT1L()[ new YkZ().qYt]]; boolean[] pTpr0yFC; int dvsQ5XpyzGaIP; { while ( new void[ -!!!this.Z9eu].ZBkbvfkom()) return; Hti8[] SRJnwQBBdMN; if ( this.pb1np1d) return; ; boolean[][] f; while ( !--new P_ItB().p0ldEa7HIzl) { ; } return; void X5kfYvXSJOi7U; void[] LGMfHHV0cY; return; void U6UxT; } void[][][][][] CsT2nIqITS; thM3AI[][][] T = fYEDtdK.JD(); void aGt1SI8t = fwqK195Rey.KK1raL1IkqMPq(); boolean bMB8QFJ3sNy2ye = 432129.Edh0 = !-!new VQbUg3Wwvz()[ new q_Ty6fne2cGv().Vw8hL()]; return; { boolean[][][][] IH; void[] DM4m1MV1UrW96; } OKC7KGSrn_j[] exGko = 9831.TWfC() = false.BuB4; void wUtuKGnTtFbgU = ( GcyNn98m.Whu()).BfOXKx = null.Gt1izxHc; void kmsSvBp0G; } } class FsgL { public static void El9trgc (String[] GYyw8BkJdCvCe) throws G { if ( BxrcI_Rq2a5B().BXXV06) if ( new int[ --y1O_d().zRjivQuJRn].wD4uMwYOE4()) return; -!( true.ZbG7qBdUg6MY)[ -!xtC[ new void[ null.OdroRG4oA][ J9OKHwX4aa()[ new g2DZ2().e7zI()]]]]; return; OJAY qx2n_mr3X = IESckQWLPRb.i_8k(); void[][] e2 = XQhu1SDRLAIzVC.EK(); pfK3a A5kDZrc; void _x6nXRYL; } public static void Eg0Hcwi6Wk8 (String[] oT4g9sszJq28) { ; if ( new wv7().D()) while ( true[ ---this[ -!null.nyhX69t0swzZ()]]) if ( !-jkkahvxk_Cu().EzbaoNN1aoG()) KgC_cCU4uvT()[ -!-( -true[ true[ 7247417.qx_36Xvi]]).sDqcWB7Nw]; int N12xHvfOl2Du2; int FMTjQXR; boolean lNVool_Xgf5o6K = -this.BCx(); void[][][][][] ivxgjEeWL = -false.ECegvV_x(); boolean lpDNN0kWPqiM; while ( this.w9jK8cNTL5()) return; ; { if ( this[ NnGY2GbjJ[ zJn3lsgcFz4vsE()[ BfNu4YhLZwE2.O1UGaNTu5()]]]) { u3A6PBk[] P4W9; } } return -null.zEaaaaROTIR4; boolean[][][][][][][] qfsaUu8QotY1 = -null.arhpUGOA = bt_PP9g().QHv; ; boolean[] LxX = this.r_(); } public static void v1flVziMfw (String[] G) { void v0C_YUTCaNfITw = 655574.RwWT6TfkRqMmV() = -!new pI9OjcQ()[ -!new boolean[ false.MYBwKbm()].UJos2HJ]; gPj.rhMBNRzLlvr(); DAYps1TxLX5XOi Qgs0g1erF = true.J6cRxzSLdaP; while ( -T3WCevDi2GV().oiSaaNWAMSLY) while ( -new hzZMbsbQSfzo6[ null.ts()].QCI) ; while ( rkkKAoVU_mo8L().AnJcLRxUv) while ( !new Tm()[ !-true[ -Neq0()[ -null.Bj6daQ1GA()]]]) ; void[][] uZd4fyOuJyN; void uroeD = !--!!TQCZC6ucxNq().ABTch(); return -new boolean[ eBI3xBGQrvZ.GVkZo2].vI(); PtpJ_0yME88 rZj; while ( ( !true.ybhDB3()).BNSmHcBs6c) !---!new KygdTDM()[ true.pp3oWOj3]; } public boolean[] dsWQFgFaHRJ4 (DP1knXUh6LF9[][] A9zC1PSeEjg4, void JKZrqcvktpKP, xAabp81QHD PSFTXjsVqV6X, int VB9e6um, void[][][] vd, void kTPl, YDhzJoCBZEu sp7B) { return this[ !new void[ k0nHvWNGpASa.I_GaC].ZwXfhbieTy]; if ( true[ -!this.k3]) return;else new BNUFHCEH()[ -----!this[ -!false.snQKcGLtnh]]; M0eBA8Tg3N E_XOnm; k7MDoiPb3e ZTHPQc5C_RnseP = this.S5g0rRNK31() = new sV3BoRcbOJ4Hnj().mqG6bWDt7m0bE; return; while ( null[ -!!--( -67480292.cezIUU6QNWI7fU())[ new boolean[ zcEugBK.EagJXmrOVOwKD][ 438061877.bLlyaM0mZyObIW]]]) { void nYW4mj8j_3T; } if ( -( -( !( --!this._LI_JnFJ).tUcBbZ()).Ca0930PJfudlmg())[ m6R9JGS7().WvJ2]) if ( new _7IO().Lil()) ; return -true.rarMX_yeltj(); ; } public U_MVYB6iUYXTE[][][] ZfWD8uZf; public boolean HVzOXF () { while ( -true.gNchW()) { if ( false.w7()) if ( O0z_()[ -new evYWF()[ 8[ WpoBmp1Eo_v()[ true.CDDanYF7bf0]]]]) while ( -new CyM8().ZKIGpEvlpK5W()) { void[] P5; } } } public static void t8jQG5130Cb9 (String[] lxqJ) throws c { while ( !-!!new boolean[ this.p6SAv()].LS) { void[][] ZXVoNc; } ; } public int oEsD4z; public void[] RevbCZONeiqbF (boolean[][] rEqGlA17w5pJcq, int[] Ugwhw_u9O1nR, boolean[][][][] VPs8NUC, int lSZRhn_U7Y3ch6, void XupEuir5, void[][] z9SFck6EX, MphdSnAE8mkLp rOssT) throws J9_9H8712wTqnY { void loCn7NZ6ut; STBkbWZDFmP[][][][] J8p0rL = ( RsgEmEn1TZE_[ --!this.muqYX4HK]).B5BMk2C; if ( false[ awP60NUJ.jnZNTh()]) ; void[] EG = !null[ !true.rYgHHh()]; ; int[] DjKZdfe = !true[ null.veBFFl]; boolean VroFA = -vkl8MSVoo6nV8()[ -( new ZvFIJ().z8z_())[ !( new cwgYxVRVcKGWZB[ new boolean[ !-this[ null[ new X2D6yTdo_Y().KFLPGMIK_gXpA()]]].CY()][ true.Ny54wJg()]).Hv8LkZKDwl()]]; { int DUywMnWBE1; { boolean[] iR4LQQUWaMJ; } -true.toQyBP2FPQ(); wd[] nlzr; { { wMMRPSnS[] Z6dtc3kPWemQR; } } } } public boolean SOfsG_TlEGDm; public void[][] u; } class lGpgF9bEX { } class TU { public void[] TR8n6IChtAFb4h; public void[][] IOG8hjKJQ8R; public static void kV7WNGnVxz (String[] DrTQXqw7) throws NC0eQ { SylWU0wRf().pbI(); int gKg4; while ( -!new aiHFf().st7E()) return; } public pBZDP JCYNQ; } class Q { public int i3; public boolean[][][][][][][][] EH9QwvdrZ () { while ( new PDtgjv0cC[ -!new bYQz()[ null.VRhHsa063eb]].vepHbMPu()) ; void b = tM[ -!( this.o)[ !false.hqmzWrqv6c4W()]]; while ( ( this.dXe1BV).A_csNUBNw6_WJr()) while ( -false._dcf8GcELJmx_6()) !null.qn(); null.fhhHg6w(); this.A; void iYhN7V = QXzn9AUxlFzIJQ().zAjvJ; -oZeWmOUDe0n3.WgsQslMG1rV; if ( SDMZuhKU0AG5j.u0q3()) while ( -!new Ri().xp()) while ( null.t) if ( 11843072.n9TEG()) new KnFPwmby()[ null[ -!true.i9CCL()]];else ; 60885739.PxRg86Fje6(); return; void[][] TgDeHziglDun; } public void[][][] sx6wzpVC; public iJzj10tK[][][] s; public static void ISy_5SPui (String[] ANV) throws pmr8HVRkClGmrS { return !---null.WX55oRCgM(); glmywKOVgoLw nxtBSUDU = Gt89t1a3xlQ[ !new JiS5B6f8kmbzB3().Lo77t3I2ULjtK] = null[ -new boolean[ -true.GAASHNpW9V][ false.pii()]]; void[][][] jd5KyylkMzE = null.h0LluJlEj_wW() = --( this.ivT1)[ true[ null.PEEPy_()]]; return; CD[][] YLsyzS = !( true.wrYa9Pgj_1gh())[ null[ o0yPMwEEr()[ !-!true[ null.KSVsHcmD2BXm()]]]]; ; while ( !!Oto().Mt4n()) while ( !-false.kyUutAN()) !-!61027[ !-!Fdhc6N3NqY.mSR()]; return vywvWVc[ !14.s20jkmYRQkgq5]; M shFa6WOSO = false[ -null.auKJP] = -CjcQoCf.EN1Ay7H(); boolean m; T0NvOg22GABlj6 ZShl0AZT; void vNY = uuMC()[ true[ new FM7OpA7b().G9b8Iu]] = -new UQv1TUUURx[ !-true.RTwzUnrK8S7Fv].WXZGPRGv; boolean wcKujLdU_j7J = false[ 316.n_kpYCdF3]; boolean f2h3Raei; int mNZ = true.p(); { YAnVo buaQ; while ( --new boolean[ -!new boolean[ this.VksXQWGIz()].e1NqA29rfxtBhH()][ this.Wkq()]) if ( new boolean[ -true.r9eS].o7()) if ( -false.G73reHYjwK8gHP) return; true.i5_H3; if ( !-this.NtBg) ; DEW3IBpmQuzP jggM8F; a1El BsdEaof; int[] zUEWwoG; jzSp iGq; i_YbB6[] pg0KusZ; void[] m; !!!!---8226.XkRPTSdBmoS; { while ( new void[ 977081[ ( !!null[ wOf[ new void[ 09595881._3_wL].xXVf()]])[ !new pZy4QmRmG().u]]].rtq()) ; } 218.r(); aGvxDZFX3P1dr VTBwZo; { int[] OGsQFZdSUI8E; } int[] Djyd; } } public static void j0Y (String[] UkoYXmz9hG) { boolean[][] bR2g9BjMcg5; void C8jfclDW4qhU = false[ new us7PNm8VLvOm2()[ -!!---null.HWC7z6QDzzla]] = ( -this.oHPAXg).bqx(); return; ; NeJmTSB[] Z = -dlxI372cXH1Fj.vohu() = new void[ -!true.A7t8CF4LsSCW()][ true.Mj5KGQkGM6me()]; Ba[][][] _B4ppox56wH; void[][] rCK6tcJlD3; return ZssSUptFF98R()[ false[ m7CSZun.Q33zIR2BWLcW]]; int nqlgTx5zqD8f = !!!!-!npUJ9N9NotBx.K5CEng(); { ; false.bAI3Q(); WIxc5hvUa J6HsW8q3_WuimQ; LOHNnikay[] oBaOLiM4ten9eV; boolean ZNN2; true.tmfpkKLgwsoEr; boolean[] NHIrXPCwnUe; } swJHZKeNOh x3; while ( !b51KeTi0RyVyG().veSM()) while ( new aakxZefdpc74pL().ryMomp6kAJ30) ; return; while ( true.Vl()) if ( !-4238999[ --true.BZ()]) return; QdNXx17tIzn9e lfu0XC4; { while ( ( -!!-gkop.T).qpJFttEuDbGU) while ( -!-Q5mnb_ae().VUA) while ( !null[ !false[ this[ !-!-( ( O8dCK2()[ new p()[ -!new lEzkr()[ TH7qSz68Jb().gr0Cm2buYVlXs]]]).Z).blkQh()]]]) ; w_UK[] pnC3P5O; boolean[][] UMD; boolean i2G2_wY3oLre; int alukunMovQ4p5; { void[] EawF; } { void LJSMhUcbZO9Tps; } return; aYjm2smNmiq IMtvphVsL05; if ( ( --!25[ -( -( !877905864.fsZUyAG_mCr_mY()).VgY6B5zB)[ ( -eJZ3Bqi._TG6kjui).S]]).L4()) null.Q; while ( true[ !!-!false.iQ22wew]) { boolean VyTAlp6KX; } wM a6pF; ns2KM G0; while ( -!true[ GKnK4heHJKqs()[ true.qkPGv3Eykln]]) ( !-new DLsYhhdnfnF5().SOOEA).F0KGeceKX8N(); { int P; } QQbBB5KIk9 Kc_oJVit8DCiN; boolean uJ; AXa9CJNLvIVQaA[] IExGL; } } public void[][] qlySMU; } class qKnVxHGXrFx { public void[] T () { { this.ez(); int NZl4SP9jq; ( -null.T1())[ !---!!--new sauX5().nItV9JW()]; while ( !-!-!!new uaudCKSmL7yQ()[ true.NnQr1W()]) if ( true.BzBwMe7OsI) !this.R0KXXlYDzV2MUp; zBjO4K[] vk; void[] zj; } if ( false[ true[ --null[ !!XugzP73()[ this.Fp7KuHGhKZyx85]]]]) ; int[][] Vxy9HtI88J7A; gok168by[] MWz1V; int Z9eLvAFu = ( null.XX()).lTobWx = false.yi(); while ( !!-true.cohXUGALn7w) while ( !MRdNJ.mZpDk()) return; ; } public void[][][] aFZO_BsXB; public static void tw (String[] rlQALXiLfA_) throws RDijjOQ { { VG2iebEcwj9[] KYfhEBzJDP; } { return; ; void Q_SwyYjK; if ( -!!!true[ !Py5nJQjg()[ true[ !JMkLB8PqDbI2r7().JCLU4]]]) { ; } if ( new uZi9aYJ1QPaV()[ ( myaFseQ3ah.yK0et53Q7).rzpk7FYMWZjyxp]) ; if ( -!!!!false.RtCjJC9()) ; int e; return; ; } vyqKZ0nmOyeKC[] akgzDeF = --this.WnQtAdbkha7N(); boolean N3; if ( -this[ -0075213.gCA4v1YDGrM()]) ( -qIlalkWhOAkLl.h)[ !-null.SVHI4y12ts]; int[] kn; int qURHiJQ = this.k_IWNca6KKKm; if ( -new void[ 7[ -null.b()]][ this.BV_LzVM()]) return; ; Q qxK16zV_xz0; } public void[][][] TFgLTPRh8w8l (void[][] xBECZ, int PFPHyRaXM, boolean[][][][] qZ) throws kpaQThAM { int Q_Iiq; int[][][] z_Fel2Xbnf; gL_BE[] S = -!-null[ false[ true.zry6vOck()]]; !!false.Dp; { while ( ( new Q()[ ( new void[ 78.hLh].VhIy6C4P5l)[ !false.nW6t8k1HJTj6()]])[ !new DKPG4v3().AnH3W9]) ; return; return; return; while ( !true._1TeuI4sbx) while ( this[ false[ --yZSLfyNdgCBvOM()[ ( bhGy2rp()[ !-( new xphD().LHqguBNfb()).J_dt]).G]]]) if ( !!!!-8.L6vbRf27F()) while ( -!Br.PPBIf2cQyr()) while ( !true.UGyP2b_()) while ( !null.fbrWQzE8VfTp) if ( -( hO0RAtG.R9Btu)[ -!( !( ---!false.jMquG_).RXskS0tcK).OAfxTMPEX1]) while ( !!false.tt7t4bi6IwuN_()) false.Xrtj7dJyR; while ( 337.LLhd7()) ; X709[] EekFIJG8Cz; ; int QS62ouqQE; new wgjRjkwMMY().GC(); ; boolean Hl5x; while ( !--11739.db()) return; } rsluOIYRPmWR0T[] H; { boolean[][][] Do3ixWGS; void L; boolean[] A3MkaJ_L0HvppJ; !---this.H0CAUjlUMT5; } { { { ; } } int MIDGl6PpNI; while ( new uqdlEsKVhhqf().ST5xc4AX0V()) !-false.BUhAj3Qb4cV(); return; HwyFssW[] M2; ; if ( false.UCG3MH3QwTm) { return; } int ShjtnAldWX1Dh; } int[][] AgusIiFO630yx; Bbl[] apomjAxBnyI6y2; { -!!295627986.uyCcrFw8t; void hN4GKp4; void[][][][][][] VT7v83l3Q8; void j4g8cg; } while ( !!z17b7wW().IToVlhjO) ; if ( !---true[ !true.c3NK_6UIG]) ;else while ( this[ --new jVn()[ 9.CcM]]) ; { !new bDpDCB[ !-new Rk9o5aAlSQP().yAiHof9].JN54jsDi9lBU(); tS _mdjI1U; int _Mvyur; return; } } } class iEqr0suQB3S2 { public void[][][][][][][] G29bhjn5yBGvM; public void[][] GIXb6RwJ43nk; public boolean B6LD; public B RzuQ; public static void iR1E (String[] eZX3mJtyjK) { if ( this.Xq5XRK()) { SMsGxxuouUt[][] KzqyrL9buvnmRh; } int lHH1 = true.Oi3AfCNBDVHL6 = null.pebst4n; boolean y; z6[][][] MIp2J2dS = !!jfwkieecvw()[ !false[ false.i6CCH5qnU]] = true.KFNUsLo8U6(); while ( new Ko9aBK6o6P5bw2[ 864548307[ wK6K7EH_H2oG[ null[ !!new int[ true.TKN9veVvZzEA()].jdT2z]]]][ new int[ --771804236[ !-!-!-new OBHVsGdsrmked()[ L77ozlJ_rJ.QllMjh()]]].nyOUPlIiESFS]) new void[ new boolean[ new BUsjZUd9fK().UHLzFMPoms2mCP].VV71Pn()][ 579112.N3wmcTrd6L]; while ( -!!-34078169.DObq9Pi) ; int[] LFt = -!-!new tzxEpUJEBQozb().epJz; void e; H2dy2WL[] uS6FCI4jF5ddkC; boolean[] joaLNtz = !( ( false.c).lwsV7sgRU())[ !this.ftDbs] = -true.doKdJ_; true[ -!SZfCdF.FbD_vWfi6WwmLh]; boolean[] SnA; } public static void dPPBtL_2yt32 (String[] SydRPqySn6lsS) { if ( false.sbzaAv()) NX9ibQBbfCM().sPcO(); void[] nUBWd = !-!!!this[ new TOR4YooauWmO[ -new boolean[ true[ !new MrtzxdMysPMu1[ -mGf.OFzD4uj9()].IzHO_FjvnRQ()]][ new RDJJAHbL4CDg().FEpS3()]].r3pxuy()] = -CyBnlJ.ckcnqbbY7WiS; h[] Ehvt3rRbJ = new YULkd()[ !( --new Mm3DoRYD()[ pVFUTzsbHSczh0().yO4()])[ -G.AIXtX]]; ; int[][] oBvqmsFEjkU6 = -true.cT5wjDPzYGB() = !R_gkDmwa3._sJ7cAx_M(); return Yf5zWnY1n.xcW9F0zG6(); return --!!new oYCzCDtyAp1lT6().WBXKF; { if ( -hlfxMS.F5t) while ( false[ !-fFr_1GChLYLc()[ new s6HNm().ZLB_UqYxvYofEP]]) -----new boolean[ !-this.j()].I5YnE43JZwn2; ; return; if ( 30044865[ ( -!false.G())[ 763681749.drjmtQjg]]) ; if ( !null[ -!!false.HXs5EFbPndvI]) while ( jeoYDBABLkswW()[ --this.wEKgJgqG]) return; boolean AcZhPWHV9Hd0e; return; void qLo272I0; boolean zoTcWK_GCjZa; void[][] nhNFuEldF45ICy; return; boolean WLROo0sW; boolean[][] zOfcMpJ2bXf; void[][][][] N_SphphEmYvV; if ( -new C_hKBB()[ -523625.WREd52BmawET()]) return; !this.XEcc(); boolean[][] Hb5kO; return; return; } G5Ilbd8Y[ 7.XwDoXJYD]; while ( ----12.gVrsyHc96j7rw()) while ( dfhPG()[ !-!!-d4TAAJgxuyYBv().CxWD]) return; } public boolean jd_YlIL; public int ouS0 (wH92N bE2j8m, int QfFt3496iFH) { int veFIhYok9dO; int[][] pkurpJWIL4hY_ = H0W2z[ -new int[ uR8vuy5l()[ true.V78wnM3BbF9]]._IcSaFGaeYaJGM()]; boolean eQ = false.sMoDKV; Heqvgf[] wavwlHV; while ( !!( -!QWoACCXg()[ --this[ new boolean[ !false[ 81134[ -!-!!false.K()]]].lhq39_9N]]).o51HXtl) return; int s35TmZwaCVgr; while ( true[ 263.d53GhMc8tNvW]) if ( --!new usAcSnMLWQQ_ph()[ !this.vgRWdS5SSCBJm()]) while ( zhnqUlfs741scd().TYyYwq) return; boolean dWjkr = new y().Z = true.tb2gM7Bm9DMVJ; void[] Y5gyy; ; if ( exmgr.l4w()) if ( -!!-!-!null.ioL0vmTzMwZ) return;else while ( -6[ -null.ygNWPxDS()]) while ( -Qdtg().J()) if ( -null.d6hA5()) if ( !-97602.jpWfH1()) if ( -( false.sK9anBS)[ -!--new EPu6FZOZ()[ null[ --!-20875[ !-vL().M_Ml2m()]]]]) if ( !new int[ new int[ avG.CkVHHt8ScT9h].XhBoZC()].a2MUaAz) -!-!false[ !!( Xn[ --true.cBNSf6Z7S()])[ ( -FtfMHolq().EmC()).EQ5X()]]; return; x76IuLagy JhXNe; return; while ( --null[ !-183066297.gHwc8f_3pNX()]) if ( -true.F4jBDiff()) return; oRFyNtq PLhKCsFzoY; } public static void z (String[] XKqt) { ; } public int F () throws DyyV00LxUL9n { return bEXhXaDZ3pqsUa().gOSgIxcT; Xx0bDQDDk[][] tFvom = !!-this.DLWGIE() = false.zP806; iu[][] bE75L5Pgn; boolean t5mDc1zQhaxRe = VQBmiSj0H.are6Ov9Hqx() = this.bbGA1t9ywm3iwG(); int d4 = 254.mmhls() = --true[ -true.DpHcncXX()]; int WLI4; { if ( 73623.R_gHh89Ug8H8()) ; { { { 6.D(); } } } while ( -new B().ZSBmL1_wp) while ( --lRRPnb2()._StLWeZ3kcxt5S()) if ( false[ --false[ !new s2I6()[ false.FHxEFKg1t]]]) ; int[][][] Y4gk0OwJavwMwp; void QA8I0u2cSC7Pe; int lRD; void nrHXEHPBGW; ( !-!null.R68P0K7SuhIlA).qIKeB6dRGDSN; ; ; if ( -new int[ this.t_bDHNDJ].NC()) return; { boolean[][][] bTP_dCS2JXS; } void QyIiwixio; boolean kI; { if ( null[ this.p0]) ; } if ( null[ ---new zEB4tkKZ().cSkuMCKr1]) ; } if ( this.qyNh1zWnVYrU) while ( false[ -false.i6nl2wL_QLTG_i()]) { int TBSqqGz7IXTE9e; }else { boolean OuyDayw9ED; } ; int OsJ = ( !bcWuf[ -!GeI.szZBFsEN9kR0()]).qy_adE() = -null[ --!false.ARTnHDA]; while ( !!false[ --true.FIGWMmGsj3Nsel()]) if ( false.sb) return; boolean awbKp7wc; if ( !true[ null.erhcFakPXg3r()]) ; int[] J9DAFGz = QvLfzukzFEk4l().YZ6BNhY = null[ !true[ !-!-115764.yVFkgSZH]]; int[] Cx0oAZpH1DPk; } public static void ve6M (String[] hHMPh) throws ZSCv { void sUR = -!--new aUAk0TkH5[ new void[ !-true[ ( 374482.CjY0nGO3)[ !!null.__7BzW]]][ -( -false[ new P()[ -new boolean[ -1924983.SkDo()].oV()]]).Dz()]].St(); RMKOFdViqcoOk I7kUPULl6vrpu4 = Er93u8ai()[ !!!--!-DPHEZniZ8m.C7jwbVFI7c2()]; while ( Xeg5j98cF().P8) if ( true.j2uc4mttWxVYj()) while ( -0810216[ -null.XBxxG2Q2koW()]) if ( ClG0BuiIGm()[ this.AgdRYLljkzIDOn()]) ; -!-null.x(); while ( rbyhSe0T0().CB75wah()) { sYB5QOHgO7oDp4[][][] H8ug_o; } int[] JFMt3; while ( 4503820.xX1Hqge10CO()) ; return -null.VGW; void V2ABb; TbmzVFAOmbBwy[] zRuUA3dtSAK; int[] kRnh8MOgqRlJN = new void[ !-!!-!NekD().VPtxEEpJ][ !!new m01jD43_KJ[ true.dk()][ !!false.Pwj]] = null.FawJyaHIC6(); } public static void r86qMaklZBu8et (String[] hU3IV3) throws WJbW { void[][] gy_b4cc5px; boolean pTugNzn; --false.K9s1_bNrHeJJS; if ( ivrnyMXqMBemA.gi1OGCJcVu) { int pMz0; }else if ( !null.lqmhjM9bYc3ET) -new int[ new WR0eiM().n].On; } }
# # Pyserini: Reproducible IR research with sparse and dense representations # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import argparse from typing import List from tqdm import tqdm from transformers import AutoTokenizer from pyserini.search import get_topics from pyserini.search.lucene.irst import LuceneIrstSearcher def normalize(scores: List[float]): low = min(scores) high = max(scores) width = high - low if width != 0: return [(s-low)/width for s in scores] return scores def query_loader(topic: str): queries = {} bert_tokenizer = AutoTokenizer.from_pretrained("bert-base-uncased") topics_dic = get_topics(topic) line_num = 0 for topic_id in topics_dic: line_num += 1 query_text = topics_dic[topic_id]['title'] text_bert_tok = bert_tokenizer.tokenize(query_text.lower()) if len(text_bert_tok) >= 0: query = {"raw": query_text, "contents": ' '.join(text_bert_tok)} queries[topic_id] = query if line_num % 10000 == 0: print(f"Processed {line_num} queries") print(f"Processed {line_num} queries") return queries def baseline_loader(base_path: str): result_dic = {} with open(base_path, 'r') as f: for line in f: tokens = line.strip().split() topic = tokens[0] doc_id = tokens[2] score = float(tokens[-2]) if topic in result_dic.keys(): result_dic[topic][0].append(doc_id) result_dic[topic][1].append(score) else: result_dic[topic] = [[doc_id], [score]] return result_dic def generate_maxP(preds: List[float], docs: List[str]): scores = {} for index, (score, doc_id) in enumerate(zip(preds, docs)): docid = doc_id.split('#')[0] if (docid not in scores or score > scores[docid]): scores[docid] = score docid_scores = sorted(scores.items(), key=lambda kv: kv[1], reverse=True) return docid_scores def sort_dual_list(pred: List[float], docs: List[str]): zipped_lists = zip(pred, docs) sorted_pairs = sorted(zipped_lists) tuples = zip(*sorted_pairs) pred, docs = [list(tuple) for tuple in tuples] pred.reverse() docs.reverse() return pred, docs if __name__ == "__main__": parser = argparse.ArgumentParser( description='use ibm model 1 feature to rerank the base run file') parser.add_argument('--tag', type=str, default="ibm", metavar="tag_name", help='tag name for resulting Qrun') parser.add_argument('--base-path', type=str, required=False, metavar="path_to_base_run", help='path to base run') parser.add_argument('--topics', type=str, required=True, help='existing topics name or path to query topics') parser.add_argument('--index', type=str, required=True, metavar="path_to_lucene_index", help='path to lucene index folder') parser.add_argument('--output', type=str, required=True, metavar="path_to_reranked_run", help='the path to store reranked run file') parser.add_argument('--alpha', type=float, default="0.3", metavar="type of field", help='interpolation weight') parser.add_argument('--num-threads', type=int, default="24", metavar="num_of_threads", help='number of threads to use') parser.add_argument('--max-sim', default=False, action="store_true", help='whether we use max sim operator or avg instead') parser.add_argument('--segments', default=False, action="store_true", help='whether we use segmented index or not') parser.add_argument('--k1', type=float, default="0.81", metavar="bm25_k1_parameter", help='k1 parameter for bm25 search') parser.add_argument('--b', type=float, default="0.68", metavar="bm25_b_parameter", help='b parameter for bm25 search') parser.add_argument('--hits', type=int, metavar='number of hits generated in runfile', required=False, default=1000, help="Number of hits.") args = parser.parse_args() print('Using max sim operator or not:', args.max_sim) f = open(args.output, 'w') reranker = LuceneIrstSearcher(args.index, args.k1, args.b, args.num_threads) queries = query_loader(args.topics) query_text_lst = [queries[topic]['raw'] for topic in queries.keys()] qid_lst = [str(topic) for topic in queries.keys()] i = 0 for topic in queries: if i % 100 == 0: print(f'Reranking {i} topic') query_text_field = queries[topic]['contents'] query_text = queries[topic]['raw'] bm25_results = reranker.bm25search.search(query_text, args.hits) if args.base_path: baseline_dic = baseline_loader(args.base_path) docids, rank_scores, base_scores = reranker.rerank( query_text, query_text_field, baseline_dic[topic], args.max_sim, bm25_results) else: docids, rank_scores, base_scores = reranker.search( query_text, query_text_field, args.max_sim, bm25_results) ibm_scores = normalize([p for p in rank_scores]) base_scores = normalize([p for p in base_scores]) interpolated_scores = [ a * args.alpha + b * (1-args.alpha) for a, b in zip(base_scores, ibm_scores)] preds, docs = sort_dual_list(interpolated_scores, docids) i = i+1 if args.segments: docid_scores = generate_maxP(preds, docs) rank = 1 for doc_id, score in docid_scores: if rank > 1000: break f.write(f'{topic} Q0 {doc_id} {rank} {score} {args.tag}\n') rank = rank + 1 else: for index, (score, doc_id) in enumerate(zip(preds, docs)): rank = index + 1 f.write(f'{topic} Q0 {doc_id} {rank} {score} {args.tag}\n') f.close()
<filename>server/src/modules/files/services/CreateFileService.ts import { injectable, inject } from 'tsyringe'; import AppError from '@shared/errors/AppError'; import File from '@modules/files/infra/typeorm/entities/File'; import IUsersRepository from '@modules/users/repositories/IUsersRepository'; import IFilesRepository from '@modules/files/repositories/IFilesRepository'; import IStorageProvider from '@shared/container/providers/StorageProvider/models/IStorageProvider'; interface IRequest { user_id: string; uploadedFile: { originalName: string; fileName: string; mimeType: string; }; } @injectable() class CreateFileService { constructor( @inject('FilesRepository') private filesRepository: IFilesRepository, @inject('UsersRepository') private usersRepository: IUsersRepository, @inject('StorageProvider') private storageProvider: IStorageProvider, ) {} public async execute({ user_id, uploadedFile }: IRequest): Promise<File> { const user = await this.usersRepository.findById(user_id); if (!user) { throw new AppError('Only authenticated users can upload files.'); } const file = await this.filesRepository.create({ user_id: user.id, name: uploadedFile.fileName, original_filename: uploadedFile.originalName, mime_type: uploadedFile.mimeType, }); const { $response: { error }, } = await this.storageProvider.saveFile({ s3Path: 'storage', fileName: file.name, mimeType: file.mime_type, }); if (error) { throw new AppError('An error occurred while saving file.'); } file.url = `${process.env.AWS_S3_URL}/storage/${file.name}`; return file; } } export default CreateFileService;
/** * Implements the generated visitor class to do the mapping to criteria api. */ public class EvalVisitor<T extends ResourceEntity> extends LogicalOperatorRulesBaseVisitor<Predicate> { private final FilterParser<T> filterParser; private final Root<T> root; public EvalVisitor(FilterParser<T> filterParser, Root<T> root) { this.filterParser = filterParser; this.root = root; } @Override public Predicate visitAndExp(LogicalOperatorRulesParser.AndExpContext ctx) { Predicate left = this.visit(ctx.expression(0)); Predicate right = this.visit(ctx.expression(1)); return filterParser.entityManager.getCriteriaBuilder().and(left, right); } @Override public Predicate visitBraceExp(LogicalOperatorRulesParser.BraceExpContext ctx) { return this.visit(ctx.expression()); } @Override public Predicate visitSimpleExp(LogicalOperatorRulesParser.SimpleExpContext ctx) { FilterExpression<T> filterExpression = getFilterExpressionFromContext(ctx); FilterChain<T> filterChain = filterParser.createFilterChain(filterExpression); return filterChain.createPredicateAndJoin(root); } private FilterExpression<T> getFilterExpressionFromContext(LogicalOperatorRulesParser.SimpleExpContext ctx) { String fieldName = ctx.FIELD().getText(); String value = ctx.VALUE().getText(); value = value.substring(1, value.length() - 1); // removed first and last quote value = value.replace("\\\"", "\""); // replaced \" with " FilterConstraint operator = FilterConstraint.fromString(ctx.OPERATOR().getText()); return filterParser.createFilterExpression(fieldName, operator, value); } @Override public Predicate visitSimplePresentExp(LogicalOperatorRulesParser.SimplePresentExpContext ctx) { String fieldName = ctx.FIELD().getText(); FilterConstraint operator = FilterConstraint.fromString(ctx.PRESENT().getText()); FilterExpression<T> filterExpression = filterParser.createFilterExpression(fieldName, operator, null); FilterChain<T> filterChain = filterParser.createFilterChain(filterExpression); return filterChain.createPredicateAndJoin(root); } @Override public Predicate visitNotExp(LogicalOperatorRulesParser.NotExpContext ctx) { Predicate term = this.visit(ctx.expression()); return filterParser.entityManager.getCriteriaBuilder().not(term); } @Override public Predicate visitOrExp(LogicalOperatorRulesParser.OrExpContext ctx) { Predicate left = this.visit(ctx.expression(0)); Predicate right = this.visit(ctx.expression(1)); return filterParser.entityManager.getCriteriaBuilder().or(left, right); } }
#ifndef NUMBER_H #define NUMBER_H #include <regex> #include "Utils/string.h" #include "Exceptions/compilerexception.h" class Number { private: static std::regex binPrefix; // finds 0b00000 or 0B00000 numbers static std::regex binPostfix; // finds 00000b or 00000B numbers static std::regex dec; static std::regex hexPrefix; // finds 0x00000 or 0X00000 numbers static std::regex hexPostfix; // finds 00000h or 00000H numbers public: static bool isBin(const std::string& str); static bool isDec(const std::string& str); static bool isHex(const std::string& str); static bool isNumber(const std::string& str); static unsigned int getUInt(const std::string& str); static std::string replaceNumbersWithDec(const std::string& str); }; #endif // NUMBER_H
package main import ( "fmt" "math" "unicode/utf8" ) func main() { //name := "张鑫qq" //hobby := "睡觉" // 字符串拼接 //ss := name + hobby //fmt.Println(ss) //ss1 := fmt.Sprintf("%s%s", name, hobby) //fmt.Println(ss1) // 字符串分割 //s1 := strings.Split(ss1, "") //fmt.Println(s1) // 包含 //fmt.Println(strings.Contains(ss, "睡觉")) // for a, s := range ss { // fmt.Println(a) // fmt.Printf("%c\n", s) // } //m := "big" //bytem := []byte(m) //fmt.Printf("%T\n", bytem[0]) //bytem[0] = 'p' //fmt.Println(string(bytem[0])) // //s2 := "白萝卜" //runeS2 := []rune(s2) //fmt.Println(runeS2) //homework() //var s = "hello, 我喜欢编程!" //// 获取字符数量 //length:=utf8.RuneCountInString(s) //// len获取字节长度 //fmt.Println(length,len(s)) //for i,v:=range []rune(s) { // //fmt.Println(i,v) // fmt.Printf("(%d,%c)\n",i,v) //} //s := "颜色感染是一个有趣的游戏。" //bs := []byte(s) // string -> []byte //s = string(bs) // []byte -> string //rs := []rune(s) // string -> []rune //s = string(rs) // []rune -> string //rs = bytes.Runes(bs) // []byte -> []rune //bs = Runes2Bytes(rs) // []rune -> []byte var s = "babad" fmt.Println(s[1:4]) longestPalindrome(s) } func Runes2Bytes(rs []rune) []byte { n := 0 for _, r := range rs { n += utf8.RuneLen(r) } n, bs := 0, make([]byte, n) for _, r := range rs { n += utf8.EncodeRune(bs[n:], r) fmt.Println(n) } return bs } // 统计字符串中的中文个数 func homework() string { var str = "hello张鑫大屌哥" var count int runeStr := []rune(str) for _, s := range runeStr { if s > 255 { count++ } } math.Max(1, 2) fmt.Println(count) return "" } func longestPalindrome(s string) string { if len(s) < 1 { return "" } start, end := 0, 0 for index, _ := range s { var len1 = expandAroundCenter(s, index, index) var len2 = expandAroundCenter(s, index, index+1) var len = 0 if len1>len2 { len=len1 }else { len=len2 } if len > end-start { start = index - (len-1)/2 end = index + len/2 } } fmt.Println(start,end,s[start:end+1]) return s[start:end] } func expandAroundCenter(s string, left int, right int) int { l := left r := right for l >= 0 && r < len(s) && s[l] == s[r] { l-- r++ } return r - l - 1 }
import numpy as np import torch import gym import argparse import os import utils import datetime from torch.utils.tensorboard import SummaryWriter from sliding_window import SlidingMin # Runs policy for X episodes and returns average reward # A fixed seed is used for the eval environment def eval_policy(policy, env_name, seed, eval_episodes=10): eval_env = gym.make(env_name) eval_env.seed(seed + 100) avg_reward = 0. for _ in range(eval_episodes): state, done = eval_env.reset(), False while not done: action = policy.select_action(np.array(state), test=True) state, reward, done, _ = eval_env.step(action) avg_reward += reward avg_reward /= eval_episodes print("---------------------------------------") print("Evaluation over {} episodes: {:.3f}".format(eval_episodes, avg_reward)) print("---------------------------------------") return avg_reward if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("--policy", default="GRAC") # Policy name (GRAC) parser.add_argument("--env", default="Ant-v2") # OpenAI gym environment name parser.add_argument("--seed", default=0, type=int) # Sets Gym, PyTorch and Numpy seeds parser.add_argument("--start_timesteps", default=1e4, type=int) # Time steps initial random policy is used parser.add_argument("--eval_freq", default=5e3, type=int) # How often (time steps) we evaluate parser.add_argument("--max_timesteps", default=3e6, type=int) # Max time steps to run environment parser.add_argument("--expl_noise", default=0.1) # Std of Gaussian exploration noise parser.add_argument("--batch_size", default=256, type=int) # Batch size for both actor and critic parser.add_argument("--discount", default=0.99) # Discount factor parser.add_argument("--noise_clip", default=0.5) # Range to clip target policy noise parser.add_argument("--save_model", action="store_true") # Save model and optimizer parameters parser.add_argument("--load_model", default="") # Model load file name, "" doesn't load, "default" uses file_name parser.add_argument('--use_expl_noise', action="store_true") parser.add_argument('--log_freq',default=200,type=int) parser.add_argument('--cem_loss_coef',default=1.0, type=float) parser.add_argument('--loss_decay',default=0.95) parser.add_argument("--debug", action="store_true") parser.add_argument("--comment", default="") parser.add_argument("--exp_name", default="exp_ant") parser.add_argument("--which_cuda", default=0, type=int) args = parser.parse_args() device = torch.device('cuda:{}'.format(args.which_cuda)) file_name = "{}_{}_{}".format(args.policy, args.env, args.seed) file_name += "_{}".format(args.comment) if args.comment != "" else "" folder_name = datetime.datetime.now().strftime('%b%d_%H-%M-%S_') + file_name result_folder = 'runs/{}'.format(folder_name) if args.exp_name is not "": result_folder = '{}/{}'.format(args.exp_name, folder_name) if args.debug: result_folder = 'debug/{}'.format(folder_name) if not os.path.exists('{}/models/'.format(result_folder)): os.makedirs('{}/models/'.format(result_folder)) print("---------------------------------------") print("Policy: {}, Env: {}, Seed: {}".format(args.policy, args.env, args.seed)) print("---------------------------------------") if not os.path.exists("./results"): os.makedirs("./results") if args.save_model and not os.path.exists("./models"): os.makedirs("./models") env = gym.make(args.env) # Set seeds env.seed(args.seed) torch.manual_seed(args.seed) np.random.seed(args.seed) state_dim = env.observation_space.shape[0] action_dim = env.action_space.shape[0] max_action = float(env.action_space.high[0]) if args.save_model is False: args.save_model = True kwargs = { "state_dim": state_dim, "action_dim": action_dim, "max_action": max_action, "batch_size": args.batch_size, "discount": args.discount, "log_freq": args.log_freq, "loss_decay": args.loss_decay, "cem_loss_coef": args.cem_loss_coef, "max_timesteps": args.max_timesteps, "device": device, } # Initialize policy if "GRAC" == args.policy: # Target policy smoothing is scaled wrt the action scale GRAC = __import__(args.policy) policy = GRAC.GRAC(**kwargs) if "GRAC_single" == args.policy: GRAC = __import__(args.policy) policy = GRAC.GRAC(**kwargs) if "GRAC_single_v2" == args.policy: GRAC = __import__(args.policy) policy = GRAC.GRAC(**kwargs) if args.load_model != "": policy_file = 'model' if args.load_model == "default" else args.load_model policy.load("./{}/{}".format(result_folder, policy_file)) replay_buffer = utils.ReplayBufferTorch(state_dim, action_dim, device=device, discount=args.discount) # Evaluate untrained policy evaluations = [eval_policy(policy, args.env, args.seed)] state, done = env.reset(), False episode_reward = 0 episode_timesteps = 0 episode_num = 0 reward_max = 1.0 reward_min = np.inf episode_step_max = 1 episode_step_min = 1000 reward_min_buffer = SlidingMin(int(1e5)) episode_step_min_buffer = SlidingMin(int(1e3)) episode_step_min_buffer.insert(1000) # writer = utils.WriterLoggerWrapper(result_folder, comment=file_name, max_timesteps=args.max_timesteps) writer = SummaryWriter(log_dir=result_folder, comment=file_name) #record all parameters value with open("{}/parameters.txt".format(result_folder), 'w') as file: for key, value in vars(args).items(): file.write("{} = {}\n".format(key, value)) for t in range(int(args.max_timesteps)): episode_timesteps += 1 # Select action randomly or according to policy if t < args.start_timesteps: action = np.random.uniform(-max_action, max_action, action_dim) else: if args.use_expl_noise: action = ( policy.select_action(np.array(state),writer=writer) + np.random.normal(0, max_action * args.expl_noise, size=action_dim) ).clip(-max_action, max_action) else: action = ( policy.select_action(np.array(state),writer=writer) ).clip(-max_action, max_action) # Perform action next_state, reward, done, _ = env.step(action) if reward > reward_max: reward_max = reward reward_min_buffer.insert(reward) writer.add_scalar('test/reward', reward, t+1) done_bool = float(done) if episode_timesteps < env._max_episode_steps else 0 # Store data in replay buffer replay_buffer.add(state, action, next_state, reward, done_bool) state = next_state episode_reward += reward # Train agent after collecting sufficient data if t >= args.start_timesteps: policy.train(replay_buffer, args.batch_size, writer, 20.0, reward_max, episode_step_max, reward_min_buffer.get_min(), episode_step_min_buffer.get_min())#, replay_buffer.reward_min, episode_step_min)#reward_max - reward_min) if done: # +1 to account for 0 indexing. +0 on ep_timesteps since it will increment +1 even if done=True print("Total T: {} Episode Num: {} Episode T: {} Reward: {:.3f}".format(t+1, episode_num+1, episode_timesteps, episode_reward)) if episode_timesteps > episode_step_max: episode_step_max = episode_timesteps episode_step_min_buffer.insert(episode_timesteps) # Reset environment state, done = env.reset(), False episode_reward = 0 episode_timesteps = 0 episode_num += 1 # Evaluate episode if (t + 1) % args.eval_freq == 0: evaluation = eval_policy(policy, args.env, args.seed) evaluations.append(evaluation) writer.add_scalar('test/avg_return', evaluation, t+1) np.save("{}/evaluations".format(result_folder), evaluations) if (t + 1) % 5000 == 0: args.save_model: policy.save("./{}/models/iter_{}_model".format(result_folder, t + 1)) #replay_buffer.save(result_folder) # save to txt # if (t + 1) % 50000 == 0: # writer.logger.save_to_txt()
Triggering and guiding high-voltage discharge in air by single and multiple femtosecond filaments. The abilities to trigger and guide high-voltage discharge by using single and <em>multiple filaments</em> (MFs) are experimentally studied. It is shown that the discharge voltage threshold can be reduced significantly in both regimes of single and MF; however, the MF does not gain a larger reduction than a single filament. This behavior of the MF is attributed to the single discharge path rather than simultaneous multiple ones as one might expect during the discharge process.
// url: POST /uploadstrings?app=$appName&secret=$uploadSecret // POST data is in the format: /* AppTranslator strings string to translate 1 string to translate 2 ... */ func handleUploadStrings(w http.ResponseWriter, r *http.Request) { appName := strings.TrimSpace(r.FormValue("app")) app := findApp(appName) if app == nil { logger.Noticef("Someone tried to upload strings for non-existing app %s", appName) httpErrorf(w, "Application %q doesn't exist", appName) return } secret := strings.TrimSpace(r.FormValue("secret")) if secret != app.UploadSecret { logger.Noticef("Someone tried to upload strings for %s with invalid secret %s", appName, secret) httpErrorf(w, "Invalid secret for app %q", appName) return } s := r.FormValue("strings") if newStrings, err := parseUploadedStrings(s); err != nil { logger.Noticef("parseUploadedStrings() failed with %s", err) httpErrorf(w, "Error parsing uploaded strings") return } else { logger.Noticef("handleUploadString(): uploading %d strings for %s", len(newStrings), appName) added, deleted, undeleted, err := app.store.UpdateStringsList(newStrings) if err != nil { logger.Errorf("UpdateStringsList() failed with %s", err) } else { msg := "" if len(added) > 0 { msg += fmt.Sprintf("New strings: %v\n", added) } if len(deleted) > 0 { msg += fmt.Sprintf("Deleted strings: %v\n", deleted) } if len(undeleted) > 0 { msg += fmt.Sprintf("Undeleted strings: %v\n", undeleted) } if len(msg) > 0 { logger.Notice(msg) } w.Write([]byte(msg)) } } }
import java.io.*; import java.util.StringTokenizer; public class Main { public static void main(String[] args) throws IOException{ BufferedReader br = new BufferedReader(new FileReader("input.txt")); int n = Integer.parseInt(br.readLine()); int[]P = new int[n]; for(int i = 0; i < n; i++){ StringTokenizer st = new StringTokenizer(br.readLine()); for(int j = 0; j < n; j++){ int a = Integer.parseInt(st.nextToken()); if(a == 1) P[j] = i+1; } } BufferedWriter bw = new BufferedWriter(new FileWriter("output.txt")); for(int i = 0; i < n; i++){ bw.write(P[i] + " "); } bw.close(); } }
<reponame>BuildJet/open-banking-gateway package de.adorsys.opba.protocol.facade.services.scoped.consentaccess; import de.adorsys.opba.db.domain.entity.Bank; import de.adorsys.opba.db.domain.entity.Consent; import de.adorsys.opba.db.domain.entity.psu.Psu; import de.adorsys.opba.db.domain.entity.sessions.ServiceSession; import de.adorsys.opba.db.repository.jpa.ConsentRepository; import de.adorsys.opba.protocol.api.services.EncryptionService; import de.adorsys.opba.protocol.api.services.scoped.consent.ConsentAccess; import de.adorsys.opba.protocol.api.services.scoped.consent.ProtocolFacingConsent; import de.adorsys.opba.protocol.facade.services.scoped.ConsentAccessUtil; import lombok.RequiredArgsConstructor; import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; @RequiredArgsConstructor public class PsuConsentAccess implements ConsentAccess { private final Psu psu; private final Bank aspsp; private final EncryptionService encryptionService; private final ServiceSession serviceSession; private final ConsentRepository consentRepository; @Override public boolean isFinTechScope() { return false; } @Override public ProtocolFacingConsent createDoNotPersist() { Consent newConsent = Consent.builder() .serviceSession(serviceSession) .psu(psu) .aspsp(aspsp) .build(); return new ProtocolFacingConsentImpl(newConsent, encryptionService); } @Override public void save(ProtocolFacingConsent consent) { consentRepository.save(((ProtocolFacingConsentImpl) consent).getConsent()); } @Override public void delete(ProtocolFacingConsent consent) { consentRepository.delete(((ProtocolFacingConsentImpl) consent).getConsent()); } @Override public Optional<ProtocolFacingConsent> findSingleByCurrentServiceSession() { return ConsentAccessUtil.getProtocolFacingConsent(findByCurrentServiceSessionOrderByModifiedDesc()); } @Override public List<ProtocolFacingConsent> findByCurrentServiceSessionOrderByModifiedDesc() { return consentRepository.findByServiceSessionIdOrderByModifiedAtDesc(serviceSession.getId()) .stream() .map(it -> new ProtocolFacingConsentImpl(it, encryptionService)) .collect(Collectors.toList()); } @Override public Collection<ProtocolFacingConsent> getAvailableConsentsForCurrentPsu() { return consentRepository.findByPsu(psu).stream() .map(it -> new ProtocolFacingConsentImpl(it, encryptionService)) .collect(Collectors.toList()); } }
import { render } from '@testing-library/react-native' import { TotalValueLocked } from './TotalValueLocked' jest.mock('@shared-contexts/ThemeProvider') jest.mock('@react-navigation/native', () => ({ useNavigation: jest.fn() })) describe('Total Value Locked', () => { it('should match snapshot', async () => { const component = ( <TotalValueLocked tvl={123} /> ) const rendered = render(component) expect(rendered.toJSON()).toMatchSnapshot() }) })
#!/usr/bin/env node import * as fs from "fs" import * as path from "path" import Arguments from "yargs" /** * Array of Regex to match valid paths for renaming */ export const DIRECTORY_MATCH = "How to" /** * Filename put beside original file to track action history */ export const NFO_NAME = "original.json" /** * Exact amount of sub-dirs under DIRECTORY_MATCH */ export const SUBDIR_DEPTH = 3 /** * Force the --depth to be exact */ export const EXACT_DEPTH_DEFAULT = false export const yargs = Arguments.option("directory", { required: true, type: "string", description: "folder containing torrent", }) .option("complete", { type: "number", description: "has torrent completed", default: 1, }) .option("dir-match", { type: "string", required: true, default: DIRECTORY_MATCH, description: "Regex to match valid parent folder names for renaming", }) .option("depth", { type: "number", required: true, default: SUBDIR_DEPTH, description: "Minimum amount of sub-dirs under --dir-match", }) .option("exact-depth", { type: "boolean", default: EXACT_DEPTH_DEFAULT, description: "(un-wrap mode) if --depth is set, this will use sub-dir at exact depth for renaming and destination", }) .option("nfo-name", { type: "string", required: true, description: "Filename put beside original file to track action history", default: NFO_NAME, }) import { logger as makeLogger } from "../logger" import { CommandResponse } from "../command-response" const logger = makeLogger() export interface TorrentArgs { // required directory: string complete: number // optional name: string hash?: string tiedToFile?: number isMultiFile?: number // unused // basePath: string; // sessionPath: string; // hashing: number; } export interface CommandArgs { depth: number exactDepth: boolean dirMatch: string nfoName: string } export const getTorrentFromArgs = (args: any): TorrentArgs => { const torrent = { name: args.name, directory: args.directory, complete: args.complete, hash: args.hash, tiedToFile: args.tiedToFile, isMultiFile: args.isMultiFile, } return torrent } export function getOptionsFromArgs(args: any): CommandArgs { logger.silly("getOptionsFromArgs", args) return { depth: typeof args.depth !== "undefined" ? parseInt(args.depth) : SUBDIR_DEPTH, exactDepth: typeof args.exactDepth !== "undefined" ? args.exactDepth : EXACT_DEPTH_DEFAULT, nfoName: typeof args.nfoName !== "undefined" ? (args.nfoName as string) : NFO_NAME, dirMatch: typeof args.dirMatch !== "undefined" ? (args.dirMatch as string) : DIRECTORY_MATCH, } } export const getTorrentSubdirs = (torrent: TorrentArgs, options: CommandArgs) => { const directoryMatchRegex = new RegExp(options.dirMatch, "i") const dirs = torrent.directory.split("/") const matchIndex = dirs.findIndex((row) => { return directoryMatchRegex.test(row) ? true : 0 }) const subdirs = dirs.slice(matchIndex + 1) const subdirIndex = matchIndex + 1 + options.depth const destFolder = dirs.slice(0, subdirIndex).join("/") return { subdirs, destFolder, } } export const isTorrentRenamable = (torrent: TorrentArgs, options: CommandArgs): boolean => { // const directoryMatchRegex = new RegExp(options.dirMatch, "i") const directoryPasses = directoryMatchRegex.test(torrent.directory) // const completePasses = torrent.complete >= 1 // const subdirs = getTorrentSubdirs(torrent, options).subdirs let depthPasses = true if (options.depth > 0) { depthPasses = subdirs.length >= options.depth if (options.exactDepth === true) { logger.debug("exactDepth matching is turned on") depthPasses = subdirs.length === options.depth } } logger.silly("isTorrentRenamable", { completePasses, directoryPasses, depthPasses }) if (!completePasses && directoryPasses && depthPasses) { logger.warn("isTorrentRenameable failed", { completePasses, directoryPasses, depthPasses }) } return completePasses && directoryPasses && depthPasses } export const MovieTypes = [".mp4", ".wmv", ".avi"] export const renameTorrent = async (torrent: TorrentArgs, options: CommandArgs) => { // logger.debug("torrent.name " + torrent.name) let dest = torrent.directory.replace("/" + torrent.name, "").trim() if (path.basename(torrent.directory) !== torrent.name) { dest = torrent.directory } if (options.depth > 0) { const { destFolder } = getTorrentSubdirs(torrent, options) dest = destFolder } const folder_name = path.basename(dest) logger.debug("renameTorrent folder_name: " + folder_name) const movieFiles = fs .readdirSync(torrent.directory) .filter((row) => { return ( path.basename(row).indexOf("sample") < 0 && path.basename(row).indexOf("preview") < 0 && path.basename(row).indexOf("trailer") < 0 ) }) .filter((row) => { // only include movie MovieType extensions return MovieTypes.indexOf(path.extname(row)) >= 0 }) .map((row) => { return path.resolve(torrent.directory, row) }) logger.debug("renameTorrent dest: " + dest) if (movieFiles.length === 1) { const from_file = movieFiles[0] // for (const from_file of movieFiles) { const filename = path.basename(from_file) const ext = path.extname(from_file) const to_file = path.join(dest, folder_name + ext) logger.debug("renameTorrent to_file: " + to_file) const action = { from_file, to_file, filename, ext, date: new Date().toISOString(), } fs.renameSync(from_file, to_file) // write nfo log const nfoContent = JSON.stringify({ torrent, action }, null, 2) const nfoFile = path.join(dest, filename + ".rename.nfo") fs.writeFileSync(nfoFile, nfoContent) return { torrent, action } // } } return { torrent, reason: "Multiple movie files found, skipped", } } export const main = async (): Promise<CommandResponse> => { const argv = yargs.argv logger.silly("raw-arguments", argv) const torrent = getTorrentFromArgs(argv) const options = getOptionsFromArgs(argv) if (isTorrentRenamable(torrent, options)) { logger.info("Matched a Torrent!", torrent) if (torrent.complete <= 0) { logger.warn("Removed match but torrent incomplete!", torrent) return { exit_code: 1, message: `Removed match but ${torrent.name} is incomplete` } } else { return renameTorrent(torrent, options).then((nfo) => { return { exit_code: 0, message: nfo.action ? `${torrent.name} renamed ${torrent.name}` : "skipped", } }) } } else { logger.silly("ignored", argv.name) // write nfo log const nfoContent = JSON.stringify({ torrent }, null, 2) const nfoFile = path.resolve(torrent.directory, torrent.hash + ".rename-ignored.nfo") fs.writeFileSync(nfoFile, nfoContent) return { exit_code: 0, message: `${torrent.name}` } } } if (require.main === module) { main() ?.then(() => { process.exit(0) }) .catch((err) => { process.exit(err) }) } export default main
Mental health, self-esteem, and influences on sexual values and attitudes among black South African adolescent males This study investigated the relationships between sexual values and attitudes, self-esteem, and mental health among black South African adolescents. Adolescent males (n = 522) aged between 13 and 21 years participated in the study. They completed the Mathteck Sexuality Questionnaire for Adolescents: Attitude and Value Inventory, the Rosenburg Self-esteem Scale, and the Mental Health Continuum. Structural Equation Modelling (Mplus 7.31) was computed to identify an explanatory model for sexual values and attitudes as correlated to self-esteem and mental health of black adolescent males. Results suggest the youth's self-esteem significantly mediate the relationship between mental health and sexual values and attitudes. Specifically, mental health had indirect effects on values about sexuality and on healthy attitudes towards aspects of intimacy, through the teenagers’ self-esteem.
In vitro and in vivo evaluation of benzathine foscarnet microcrystals as a potential intravitreal drug depot Sodium foscarnet is an antiviral drug against cytomegalovirus retinitis, and clinically it is used via frequent intravitreal injection which causes various ocular complications. Here we propose to use benzathine foscarnet in a new salt form with much lower aqueous solubility, and as a potential long-acting intravitreally injectable solid form for foscarnet. Benzathine foscarnet (1 : 1) microcrystals were synthesized and evaluated both in vitro and in vivo. The aqueous solubility of benzathine foscarnet was 14.2 mM, which is in between those of the currently-used sodium foscarnet and our previously-reported calcium foscarnet salt. In a rabbit model, the injected microcrystals last for about 3 weeks in the vitreous, suggesting its solubility and dissolution profile is appropriate for its intended use. However, the injected benzathine foscarnet microcrystals also caused adverse effects in vivo. Introduction Foscarnet is one of the few drugs being clinically used for the treatment of cytomegalovirus (CMV) retinitis, which is a common ocular infection for immune-suppressed populations such as AIDS (Acquired Immune Deciency Syndrome) patients and organ transplant recipients. For those patients without immune recovery, CMV retinitis is still a devastating disease and eventually causes retina damage and blindness. 1,2 The currently approved form of foscarnet is its sodium salt solution and administered through intravenous infusion. However, due to the existence of the blood-retina barrier, the retina bioavailability of intravenously-infused foscarnet is inadequate for its efficacy. 3 Consequently, it is a common clinical practice to use the foscarnet sodium solution off-label by direct injection into the vitreous cavity (i.e. intravitreal injection), 1-2 times per week, 2.4 mg in 0.1 mL saline per injection, and oen lasting for the life time of the patients. 1,4 However, frequent intravitreal injections not only result in low patient compliance, but also cause ocular complications such as cornea damage, endophthalmitis and hemorrhage. 5,6 A longlasting intravitreal injectable form of foscarnet may signicantly reduce the injection frequency and is a clear unmet clinical need. Chemically, foscarnet has a phosphonoformate structure (Pfa, À OOC-PO 3 2À ). In our previous study, 7 we evaluated foscarnet calcium microcrystals as a potential intravitreal drug depot. Compared with the highly soluble sodium salt (Na 3 Pfa), foscarnet calcium (Ca 3 (Pfa) 2 ) has a much lower solubility (the solubility of the sodium and the calcium salts in deionized water are 260 mM and 0.3 mM, respectively). Aer intravitreal injection, the foscarnet calcium microcrystals with initial diameter around 10 mm slowly dissolved, and it was shown it took 3 months for their complete dissolution. Ideally, the gradual dissolution of foscarnet calcium should provide a continuous supply of the active Pfa ions and maintain its intravitreal concentration above the efficacious drug concentration. However, the reported half maximum inhibitory concentrations EC 50 for foscarnet against CMVs in the cell culture range from 0.05 to 0.8 mM. 8 Since the highest drug concentration can be achieved aer intravitreal injection of foscarnet calcium microcrystals is its equilibrium solubility (0.3 mM), the solubility of foscarnet calcium, though within the therapeutically relevant range, seems not high enough to maintain the efficacious drug concentration. The solubility of the drug crystal is a fundamental parameter for a drug depot. It should be low enough to slow down the drug crystal dissolution and last long in the vitreous cavity, while it cannot be too low to achieve the efficacious antiviral concentration. 7 In this study, we prepared and evaluated, both in vitro and in vivo in a rabbit model, the N,N 0 -dibenzyl ethylenediamine (benzathine, Scheme 1) salt of foscarnet as the drug depot solid with aqueous solubility in between that of the sodium and calcium salts. As a hydrophobic amine, benzathine has been clinically used as the cation in Penicillin G Benzathine (Bicillin L-A®) drug product, which is an intramuscularly injectable suspension of this low solubility penicillin salt to achieve a long lasting therapeutic effect. 9 The reported median lethal dose (LD 50 ) of benzathine in mouse is 388 mg kg À1 and 50 mg kg À1 via the oral and intraperitoneal route, respectively. 10 However, its ocular use has not been reported, and will be evaluated for the rst time in this study. Preparation of benzathine foscarnet Benzathine foscarnet microcrystals were synthesized via a simple salt metathesis reaction between foscarnet sodium and N,N 0 -dibenzyl ethylenediamine (benzathine) diacetate. In a typical experiment, 1.28 g (4 mmol) foscarnet sodium hexahydrate (Hubei Prosperity Galaxy Chemical Co., China) was dissolved in 50 mL deionized water, and 3.58 g (10 mmol) N,N 0dibenzyl ethylenediamine diacetate (Shanghai Sam Chemical Technology Co., China) was dissolved in 60 mL deionized water. Then the two solutions were mixed and stirred at room temperature for 24 hours. Aerwards, the stirring was stopped and the pH of the reaction system was measured (pH 6.2). The solid was separated by ltration and recrystallized once in the deionized water. Aer vacuum drying at room temperature for 12 hours, benzathine foscarnet product was collected (yield 41.7%). Crystal characterizations The benzathine foscarnet product was analyzed with Scanning Electron Microscope (SEM, JSM-7500F, JEOL Ltd., Japan) operated at an accelerating voltage of 3.0 kV. Polarized optical microscope (POM, BX41, Olympus) equipped with a digital camera (Moticam Pro 282A, Motic) was also used to observe the morphology of benzathine foscarnet. Thermogravimetric analysis (TGA) was used to check the existence of water or solvent in crystals and measure the degradation temperature. The sample was placed in an aluminum pan, and characterized using a thermogravimetric analyzer (TA Instruments TGA Q500IR). The samples were heated over the temperature range 30-500 C at a constant heating rate of 20 C min À1 , while purged with a stream of owing nitrogen at 50 mL min À1 throughout the experiment. Differential Scanning Calorimetry (DSC) tests were performed on a Shimadzu DSC-60 differential scanning calorimeter. 3-5 mg powder samples were placed in aluminum pans and heated from 30 C to 220 C at a rate of 10 C min À1 . 1 H-nuclear magnetic resonance ( 1 H-NMR) and 31 P-nuclear magnetic resonance ( 31 P-NMR) were used to conrm the chemical composition. 1 H-NMR and 31 P-NMR tests were performed on a JOEL JNM-ECA600 nuclear magnetic resonance spectrometer. Deuteroxide was used as the solvent. Analysis of the obtained spectra was performed using the MestReNova soware. Inductively coupled plasma optical emission spectrometry (ICP-OES) was used to conrm the chemical composition. Before testing, 100 mg benzathine foscarnet was fully dissolved in 2 mL nitric acid (65 wt%) and diluted with deionized water to make the nal solution volume 25 mL. The concentration of phosphorus was determined using inductively coupled plasma optical emission spectrometry (ICP-OES iCAP6300, Thermo Fisher). The plasma power is 1150 W and the speed of peristaltic pump is 50 rpm. The atomizing ow rate and the auxiliary ow rate were 0.6 L min À1 and 0.5 L min À1 , respectively. X-ray Photoelectron Spectroscopy (XPS) was also used to conrm the chemical composition. The sample was characterized using a PHI Quantera SXM X-ray photoelectron spectroscopy, by general rules for analysis of GB/T 19500-2004. Single crystal analysis was conducted to obtain the crystal structure of benzathine foscarnet. Single crystals of benzathine foscarnet were grown in aqueous solutions. A suitable crystal was selected and placed on a SuperNova AtlasS2 diffractometer. The crystal was kept at 100.00 (10) K during data collection. Using Olex2, the structure was solved with the ShelXT structure solution program using direct methods and rened with the ShelXL renement package using least squares minimization. X-ray diffraction (XRD) measurement was carried out with a Rigaku D/max-2500 X-ray diffractometer with Cu Ka radiation (1.54056Å). Samples were placed on the sample holder and continuous scans were performed at a speed of 4 2q per min and 0.02 2q per step in the range 2q ¼ 5-40 . Solubility determination Excess benzathine foscarnet were added into 2 mL different aqueous media: deionized water, 0.9% NaCl solutions and 0.1 mol L À1 Tris-HCl buffer (pH ¼ 7). The suspension was stirred at 37 C at 230 rpm for 48 h. Aerwards, the supernatant was withdrawn and ltered through MF-Millipore Membranes (220 nm). 1 mL supernatant was mixed with 1 mL potassium dihydrogen phosphite solution (20 mg mL À1 ). The concentration of foscarnet in the solution was determined by 31 P-NMR, using a JNM-ECA600 600M nuclear magnetic resonance spectrometer. Potassium dihydrogen phosphite was used as an internal calibration standard, whose P chemical shi in 31 P-NMR was around 1.6 ppm and 4.2 ppm. In contrast, the P chemical shi of foscarnet was around 0.5 ppm and clearly separated from the phosphite peak, and their peak area ratios can be used to quantitatively calculate the concentration of foscarnet in the solution. In vivo experiments Totally 6 healthy New Zealand white rabbits were used in this study. The Statement for the Use of Animals in Ophthalmic and Vision Research (ARVO) was followed, and local institutional approval was received from the review committee of the Beijing Chaoyang Hospital, Capital Medical University. Benzathine foscarnet suspension (100 mg mL À1 ) was prepared by adding 600 mg benzathine foscarnet into 6 mL normal saline, and sonicated to make it homogeneous. Each rabbit was given a single intravitreal injection of 0.2 mL benzathine foscarnet suspension in one eye and 0.2 mL saturated solution (260 mM) of foscarnet sodium in another eye. One rabbit was sacriced with overdose inhalation of general anesthesia (isourane) at 2 days aer intravitreal injection. Three rabbits were sacriced at 7 days and other two rabbits were sacriced at 21 days aer intravitreal injection. Aer extraction of the aqueous humor, the eyeballs of all rabbits were enucleated and the eyes were sectioned behind the lens, vitreous humor was obtained by dissecting it carefully from the retina. The vitreous samples of all six rabbits were observed under the polarized optical microscope (BX41, Olympus) equipped with a digital camera (Moticam Pro 282A, Motic). To conrm the chemical composition of remained solid, Energy Dispersive Spectroscopy (EDS) was used to characterize these residual crystals under the scanning electron microscope (JSM-7500F). Results and discussion Benzathine foscarnet was synthesized via a simple salt metathesis reaction between foscarnet sodium and benzathine acetate. The product was needle-shaped crystals with length around 100 mm and width in the order of 1-10 mm (Fig. 1A and B). Benzathine foscarnet crystals showed a single melting temperature at 168 C (Fig. 1D) and apparent degradation occurred above this temperature (Fig. 1C). No weight loss or thermal transition event was observed before the melting temperature, suggesting that the benzathine foscarnet crystal was not a hydrate or solvate. As a phosphonoformate compound, foscarnet can have up to 3 negative charges with pK a values of 0.48, 3.41 and 7.29. 11 The base benzathine (hereaer B for abbreviation) can have up to 2 positive charges with pK a values of 9.39 and 9.99. 10 Therefore, potentially multiple salt forms can be formed with different ratios between benzathine and foscarnet ions. The phosphorus element percentage in the benzathine foscarnet product was quantied by using ICP and XPS (Table 1), and both the phosphorus percentage value and the N/P ratio were consistent with those of a 1 : 1 benzathine foscarnet salt. This was also consistent with the fact that the reaction solution had a pH of 6.2, at which the dominating ion forms in the solution for both benzathine and foscarnet were divalent (i.e., B 2+ and Pfa 2À ), making it more likely to form a 1 : 1 salt as B 2+ Pfa 2À . In terms of the foscarnet content, 1 mg of benzathine foscarnet is equivalent to 0.82 mg of sodium benzathine hexahydrate, the actual drug form used in the current product. Therefore, as a potential foscarnet drug depot, the apparent drug loading of benzathine foscarnet microcrystals is about 82%, much higher than all the drug-encapsulated microsphere products in clinic, whose highest drug loading is 38% (i.e., Risperdal Consta®). 12 The ratio between benzathine and foscarnet in the product was further conrmed with single-crystal analysis (Fig. 2). Benzathine foscarnet crystal was determined to be a monoclinic system with one benzathine ion and one foscarnet ion in each unit cell. CCDC 1908677 contains the supplementary crystallographic data for this paper. More crystallography parameters of the benzathine foscarnet crystals are shown in Table 2. We then determined the aqueous solubility of benzathine foscarnet using 31 P-NMR with KH 2 PO 3 as the internal standard (Table 3). In the deionized water, the solubility of benzathine foscarnet was found to be 14.2 mM, whose order of magnitude is in between those of calcium foscarnet (0.3 mM) and sodium foscarnet (260 mM). 7 This solubility value (14.2 mM) is also one order of magnitude higher than the upper limit of the reported EC 50 range of foscarnet against cytomegalovirus (i.e., 0.8 mM), and likely to be high enough to maintain the intravitreal drug concentration above the efficacious level. The solubility of benzathine foscarnet in 0.9% NaCl solution and pH 7.0 Tris buffer (0.1 M) was even higher ( Table 3). The solubility difference among the three foscarnet salts was caused by the different interaction energy between the foscarnet anions and the cations (i.e., sodium, calcium and benzathine cations) in the solid states and in the solution. 10 Totally 6 rabbits were used in the in vivo evaluation study with two main objectives: one was to see how long it will take for the benzathine foscarnet microcrystals to completely dissolve in the vitreous cavity, and the other was to evaluate their intravitreal toxicity. Each rabbit received an intravitreal injection of On the other hand, needle-shaped crystals were clearly observed in the benzathine foscarnet-injected eye, from day 2 and up to day 21 (Fig. 3B-D). Energy Dispersive Spectroscopy (EDS) was used to characterize these residual crystals under the scanning electron microscope, and both phosphorus and nitrogen elements were observed (data not shown), conrming that these were benzathine foscarnet crystals. On the toxicity side, inammation (i.e., conjunctival congestion and pupil shrinkage) was observed at day 2 in all the animal eyes of both the benzathine foscarnet and the sodium foscarnet groups (Fig. 4A and B). At day 7, the inammation level was reduced ( Fig. 4C and D). Throughout the study period up to 21 days, the vitreous in the sodium foscarnet control group remained clear (Fig. 4E). However, in the benzathine foscarnet group, the vitreous was opaque, the light reex disappeared, and cataract was observed (Fig. 4E). The adverse effect of the sodium foscarnet group was suspected to be due to the high foscarnet concentration of the injected solution (equivalent to 78 mg mL À1 sodium foscarnet hexahydrate vs. normally used 24 mg mL À1 ). On the other hand, benzathine foscarnet microcrystals should gradually dissolve in the vitreous, and the foscarnet concentration in vivo should be lower than that of the sodium foscarnet group at each time point. The fact that the adverse effect was more severe in the Fig. 2 The ORTEP plot of the crystal structure of benzathine foscarnet. Displacement ellipsoids are drawn at the 30% probability level. benzathine foscarnet group may be due to the toxicity of benzathine component, whose toxicity prole in the ocular has not been previously reported in the literature. Conclusions In summary, we have synthesized the benzathine foscarnet (1 : 1) microcrystal, and evaluated it as a potential drug depot for the intravitreally injectable form of the anti-viral drug foscarnet. The aqueous solubility of benzathine foscarnet was found in between the previously-studied sodium and the calcium salts of foscarnet. The preliminary in vivo tests in a rabbit model successfully demonstrated that the injected benzathine foscarnet crystals last for 3 weeks in the vitreous. However, the benzathine component in the salt, which has been used in the intravenous route but never been tested in the ocular route, caused signicant adverse effect in the rabbit study. Therefore, we have demonstrated that foscarnet salts with lower solubility is indeed a valid approach to identify candidates for foscarnet drug depot, but other bases may need to be screened to achieve a balanced combination of solubility and safety prole. Conflicts of interest There are no conicts to declare.
package etcfs import "bazil.org/fuse/fs" type FS struct{} func (_ FS) Root() (fs.Node, error) { return Dir{}, nil }
def linear_problem(A,optimize_alpha,alpha): if optimize_alpha: alpha = dl.selregparam(y, A, ax, regtype, regparam, regorder=regorder) AtA, Aty = dl.lsqcomponents(y, A, L, alpha, weights, regtype=regtype) result = linSolver(AtA, Aty) linfit = parseResult(result) linfit = np.atleast_1d(linfit) return linfit, alpha
<reponame>roopansh/Graphics-Lab<gh_stars>1-10 const double PI = 3.142857; typedef struct { double x; double y; double z; } POINT3D; typedef struct { double x; double y; } POINT2D; typedef struct { int r, g, b; } COLOR; typedef struct { HDC hdcMem; HBITMAP hbmp; SIZE windowSize; POINT origin; SIZE maxBoundary; double r, theta, phi, screen_dist; } DRAWING_DATA; typedef struct { int nPoints; POINT3D *vertex; POINT3D *eye3D; POINT2D *proj2D; POINT2D min; POINT2D max; double A, B, C, D; // for plane eqn COLOR clr; } SURFACE3D; typedef struct { int nSurfaces; SURFACE3D *surface; POINT2D min; POINT2D max; } SHAPE3D; const int WINDOW_WIDTH=350; const int WINDOW_HEIGHT=250; void initialize_shapeData(); void initialize(HWND hwnd, HDC hdc); void compute3DCoefficients(double r, double theta, double phi); void move3D(POINT3D ptWorld); void line3D(POINT3D ptWorld); void setShapeData(); void cleanup(); void drawImage(HDC hdc); void get3DEyeFrom3DWorld(const POINT3D & pt3DWorld, POINT3D& pt3DEye); void get2DPerspectiveFrom3DWorld(const POINT3D & pt3DWorld, double &x, double &y); void crossProduct(const POINT3D pt[], int nPts, double& A, double& B,double& C); void drawWireframe(COLOR); void genImage(HWND hwnd); void addSurface(const POINT3D& a, const POINT3D& b, const POINT3D& c, const POINT3D& d); void addSurface(POINT3D pts[], const int &noOfPts ); void resetDataForShape(); void prepareShapeDataForDrawing();
import os import sys from emscripten_helpers import ( run_closure_compiler, create_engine_file, add_js_libraries, add_js_pre, add_js_externs, create_template_zip, ) from methods import get_compiler_version from SCons.Util import WhereIs from typing import TYPE_CHECKING if TYPE_CHECKING: from SCons import Environment def get_name(): return "Web" def can_build(): return WhereIs("emcc") is not None def get_opts(): from SCons.Variables import BoolVariable return [ ("initial_memory", "Initial WASM memory (in MiB)", 32), BoolVariable("use_assertions", "Use Emscripten runtime assertions", False), BoolVariable("use_ubsan", "Use Emscripten undefined behavior sanitizer (UBSAN)", False), BoolVariable("use_asan", "Use Emscripten address sanitizer (ASAN)", False), BoolVariable("use_lsan", "Use Emscripten leak sanitizer (LSAN)", False), BoolVariable("use_safe_heap", "Use Emscripten SAFE_HEAP sanitizer", False), # eval() can be a security concern, so it can be disabled. BoolVariable("javascript_eval", "Enable JavaScript eval interface", True), BoolVariable( "dlink_enabled", "Enable WebAssembly dynamic linking (GDExtension support). Produces bigger binaries", False ), BoolVariable("use_closure_compiler", "Use closure compiler to minimize JavaScript code", False), ] def get_doc_classes(): return [ "EditorExportPlatformWeb", ] def get_doc_path(): return "doc_classes" def get_flags(): return [ ("arch", "wasm32"), ("target", "template_debug"), ("builtin_pcre2_with_jit", False), ("vulkan", False), # Use -Os to prioritize optimizing for reduced file size. This is # particularly valuable for the web platform because it directly # decreases download time. # -Os reduces file size by around 5 MiB over -O3. -Oz only saves about # 100 KiB over -Os, which does not justify the negative impact on # run-time performance. ("optimize", "size"), ] def configure(env: "Environment"): # Validate arch. supported_arches = ["wasm32"] if env["arch"] not in supported_arches: print( 'Unsupported CPU architecture "%s" for iOS. Supported architectures are: %s.' % (env["arch"], ", ".join(supported_arches)) ) sys.exit() try: env["initial_memory"] = int(env["initial_memory"]) except Exception: print("Initial memory must be a valid integer") sys.exit(255) ## Build type if env.debug_features: # Retain function names for backtraces at the cost of file size. env.Append(LINKFLAGS=["--profiling-funcs"]) else: env["use_assertions"] = True if env["use_assertions"]: env.Append(LINKFLAGS=["-s", "ASSERTIONS=1"]) if env.editor_build: if env["initial_memory"] < 64: print('Note: Forcing "initial_memory=64" as it is required for the web editor.') env["initial_memory"] = 64 else: env.Append(CPPFLAGS=["-fno-exceptions"]) env.Append(LINKFLAGS=["-s", "INITIAL_MEMORY=%sMB" % env["initial_memory"]]) ## Copy env variables. env["ENV"] = os.environ # LTO if env["lto"] == "auto": # Full LTO for production. env["lto"] = "full" if env["lto"] != "none": if env["lto"] == "thin": env.Append(CCFLAGS=["-flto=thin"]) env.Append(LINKFLAGS=["-flto=thin"]) else: env.Append(CCFLAGS=["-flto"]) env.Append(LINKFLAGS=["-flto"]) # Sanitizers if env["use_ubsan"]: env.Append(CCFLAGS=["-fsanitize=undefined"]) env.Append(LINKFLAGS=["-fsanitize=undefined"]) if env["use_asan"]: env.Append(CCFLAGS=["-fsanitize=address"]) env.Append(LINKFLAGS=["-fsanitize=address"]) if env["use_lsan"]: env.Append(CCFLAGS=["-fsanitize=leak"]) env.Append(LINKFLAGS=["-fsanitize=leak"]) if env["use_safe_heap"]: env.Append(LINKFLAGS=["-s", "SAFE_HEAP=1"]) # Closure compiler if env["use_closure_compiler"]: # For emscripten support code. env.Append(LINKFLAGS=["--closure", "1"]) # Register builder for our Engine files jscc = env.Builder(generator=run_closure_compiler, suffix=".cc.js", src_suffix=".js") env.Append(BUILDERS={"BuildJS": jscc}) # Add helper method for adding libraries, externs, pre-js. env["JS_LIBS"] = [] env["JS_PRE"] = [] env["JS_EXTERNS"] = [] env.AddMethod(add_js_libraries, "AddJSLibraries") env.AddMethod(add_js_pre, "AddJSPre") env.AddMethod(add_js_externs, "AddJSExterns") # Add method that joins/compiles our Engine files. env.AddMethod(create_engine_file, "CreateEngineFile") # Add method for creating the final zip file env.AddMethod(create_template_zip, "CreateTemplateZip") # Closure compiler extern and support for ecmascript specs (const, let, etc). env["ENV"]["EMCC_CLOSURE_ARGS"] = "--language_in ECMASCRIPT6" env["CC"] = "emcc" env["CXX"] = "em++" env["AR"] = "emar" env["RANLIB"] = "emranlib" # Use TempFileMunge since some AR invocations are too long for cmd.exe. # Use POSIX-style paths, required with TempFileMunge. env["ARCOM_POSIX"] = env["ARCOM"].replace("$TARGET", "$TARGET.posix").replace("$SOURCES", "$SOURCES.posix") env["ARCOM"] = "${TEMPFILE(ARCOM_POSIX)}" # All intermediate files are just object files. env["OBJPREFIX"] = "" env["OBJSUFFIX"] = ".o" env["PROGPREFIX"] = "" # Program() output consists of multiple files, so specify suffixes manually at builder. env["PROGSUFFIX"] = "" env["LIBPREFIX"] = "lib" env["LIBSUFFIX"] = ".a" env["LIBPREFIXES"] = ["$LIBPREFIX"] env["LIBSUFFIXES"] = ["$LIBSUFFIX"] env.Prepend(CPPPATH=["#platform/web"]) env.Append(CPPDEFINES=["WEB_ENABLED", "UNIX_ENABLED"]) if env["opengl3"]: env.AppendUnique(CPPDEFINES=["GLES3_ENABLED"]) # This setting just makes WebGL 2 APIs available, it does NOT disable WebGL 1. env.Append(LINKFLAGS=["-s", "USE_WEBGL2=1"]) # Allow use to take control of swapping WebGL buffers. env.Append(LINKFLAGS=["-s", "OFFSCREEN_FRAMEBUFFER=1"]) if env["javascript_eval"]: env.Append(CPPDEFINES=["JAVASCRIPT_EVAL_ENABLED"]) # Thread support (via SharedArrayBuffer). env.Append(CPPDEFINES=["PTHREAD_NO_RENAME"]) env.Append(CCFLAGS=["-s", "USE_PTHREADS=1"]) env.Append(LINKFLAGS=["-s", "USE_PTHREADS=1"]) env.Append(LINKFLAGS=["-s", "PTHREAD_POOL_SIZE=8"]) env.Append(LINKFLAGS=["-s", "WASM_MEM_MAX=2048MB"]) if env["dlink_enabled"]: cc_version = get_compiler_version(env) cc_semver = (int(cc_version["major"]), int(cc_version["minor"]), int(cc_version["patch"])) if cc_semver < (3, 1, 14): print("GDExtension support requires emscripten >= 3.1.14, detected: %s.%s.%s" % cc_semver) sys.exit(255) env.Append(CCFLAGS=["-s", "SIDE_MODULE=2"]) env.Append(LINKFLAGS=["-s", "SIDE_MODULE=2"]) env.extra_suffix = ".dlink" + env.extra_suffix # Reduce code size by generating less support code (e.g. skip NodeJS support). env.Append(LINKFLAGS=["-s", "ENVIRONMENT=web,worker"]) # Wrap the JavaScript support code around a closure named Godot. env.Append(LINKFLAGS=["-s", "MODULARIZE=1", "-s", "EXPORT_NAME='Godot'"]) # Allow increasing memory buffer size during runtime. This is efficient # when using WebAssembly (in comparison to asm.js) and works well for # us since we don't know requirements at compile-time. env.Append(LINKFLAGS=["-s", "ALLOW_MEMORY_GROWTH=1"]) # Do not call main immediately when the support code is ready. env.Append(LINKFLAGS=["-s", "INVOKE_RUN=0"]) # callMain for manual start, cwrap for the mono version. env.Append(LINKFLAGS=["-s", "EXPORTED_RUNTIME_METHODS=['callMain','cwrap']"]) # Add code that allow exiting runtime. env.Append(LINKFLAGS=["-s", "EXIT_RUNTIME=1"]) # This workaround creates a closure that prevents the garbage collector from freeing the WebGL context. # We also only use WebGL2, and changing context version is not widely supported anyway. env.Append(LINKFLAGS=["-s", "GL_WORKAROUND_SAFARI_GETCONTEXT_BUG=0"])
/* Clear AUX pointers of all edges. */ void clear_aux_for_edges (void) { basic_block bb; edge e; FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb) { for (e = bb->succ; e; e = e->succ_next) e->aux = NULL; } }
/** * A wrapper class; this selection model allows only a single selection, * and does not allow the user to deselect. This model is useful when * the user should be required to have exactly one selection. * * @author Robert Futrell * * @version 1.0 */ public class RTreeSelectionModel extends DefaultTreeSelectionModel { @Serial private static final long serialVersionUID = 1L; // Constructor. public RTreeSelectionModel() { // Allow only a single element to be selected. setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION); } /** * Overrides <code>DefaultTreeSelectionModel</code>'s * <code>removeSelectionPath</code> to do nothing, so the user can't * "deselect". */ @Override public void removeSelectionPath(TreePath path) { } }
Plasmid Copy Number of pTRKH3 in Lactococcus lactis is Increased by Modification of the repDE Ribosome-Binding Site. Plasmids for DNA vaccination are exclusively produced in the Gram-negative Escherichia coli. One important drawback of this system is the presence of lipopolysaccharides. The generally recognized as safe Lactococcus lactis (L. lactis) would constitute a safer alternative for plasmid production. A key requirement for the establishment of a cost-effective L. lactis-based plasmid manufacturing is the availability of high-copy number plasmids. Unfortunately, the highest copy number reported in Gram-positive bacteria for the pAMβ1 replicon is around 100 copies. The purpose of this work is to engineer the repDE ribosome-binding site (RBS) of the pTRKH3 plasmid by site-directed mutagenesis in order to increase the plasmid copy number in L. lactis LMG19460 cells. The pTRKH3-b mutant is the most promising candidate, achieving 215 copies of plasmid per chromosome, a 3.5-fold increase when compared to the nonmodified pTRKH3, probably due to a stronger RBS sequence, a messenger RNA secondary structure that promotes the RepDE expression, an ideal intermediate amount of transcriptional repressors and the presence of a duplicated region that added an additional RBS sequence and one new in-frame start codon. pTRKH3-b is a promising high-copy number shuttle plasmid that will contribute to turn lactic acid bacteria into a safer and economically viable alternative as DNA vaccines producers.
Role of winds and tides in timing of beach strandings, occurrence, and significance of swarms of the jellyfish Crambione mastigophora Mass 1903 (Scyphozoa: Rhizostomeae: Catostylidae) in north-western Australia Very large swarms of the red jellyfish Crambione mastigophora in north-western Australia disrupt swimming on tourist beaches causing economic impacts. In October 2012, jellyfish stranding on Cable Beach (density 2.20 ± 0.43 ind. m−2) was estimated at 52.8 million individuals or 14,172 t wet weight along 15 km of beach. Reports of strandings after this period and up to 250 km south of this location indicate even larger swarm biomass. Strandings of jellyfish were significantly associated with a 2-day lag in conditions of small tidal ranges (<5 m). More than 90% of strandings occurred 2 days after winds were blowing onshore, but with the small number of days when satellite wind data were available during the study period, this result was not statistically significant. Dedicated instrument measurements of meteorological parameters, rather than the indirect measures used in this study (satellite winds and modelled currents) may improve the predictability of such events and help authorities to plan for and manage swimming activity on beaches. We also show a high incidence of predation by C. mastigophora on bivalve larvae which may have a significant impact on the reproductive output of pearl oyster broodstock in the region. Introduction Jellyfish are a ubiquitous and readily observed component of marine plankton. They may exist in high Electronic supplementary material The online version of this article (doi:10.1007/s10750-015-2525-5) contains supplementary material, which is available to authorized users. densities as swarms (aggregations from local or distant sources usually brought together by oceanographic or behavioural processes) or blooms (high density, locally derived population). The different manifestations of high densities of jellyfish and how they are formed have been reviewed previously (Graham et al., 2001;Hamner & Dawson, 2009). However, there has been recent interest in using and understanding oceanographic processes to develop predictive capacity to forecast when jellyfish might swarm near the shore bring them into contact with swimmers (Pontin et al., 2009;Gershwin et al., 2014). The extent to which jellyfish biology and behaviour can affect swarming and beach stranding (Fossette et al., 2015) has also gained recent attention. Jellyfish swarms at the coast cause other problems such as clogging power and desalination plant seawater intakes (Daryanabard & Dawson, 2008;Dong et al., 2010) and fish mortality in aquaculture farms (Doyle et al., 2008), and predictive capacity would assist in preparing for or preventing harmful and damaging effects of jellyfish swarms. Jellyfish swarms and blooms are natural phenomena, but their occurrence is increasingly being associated with anthropogenic disturbance and modification of the world's oceans and coastal seas. Climate change, overfishing and coastal development have all been suggested as contributing to jellyfish swarms and blooms, although the evidence is often equivocal (Mills, 2001;Purcell et al., 2007;Richardson et al., 2009;Dong et al., 2010;Lilley et al., 2011;Duarte et al., 2012;Purcell, 2012;Gibbons & Richardson, 2013), and the evidence of a world-wide trend in jellyfish swarms and blooms is debated (Condon et al., , 2013. The jellyfish swarms described in this study occurred in the remote northwestern Australia, where anthropogenic impacts are regarded as being very low on a global scale (Halpern et al., 2008) offering an opportunity to examine the drivers of swarms as well as their ecological, social and economic significance. The jellyfish Crambione mastigophora Maas 1903 is recorded from the eastern Indian Ocean and western Pacific (Kramp, 1961;Omori & Nakano, 2001;Kitamura & Omori, 2010). Despite its apparently common and sometimes abundant occurrence in north-western Australia (this study), Indonesia (Omori & Nakano, 2001), the ''Malayan Archipelago'', Sri Lanka and the ''Truk Islands'' (Micronesia) (Kramp, 1961), there is little published information on C. mastigophora beyond the taxonomic texts of Kramp (1961) and Stiansy (1929). C. mastigophora is capable of forming large swarms in north-western Australia where they are a nuisance due to stinging swimmers (Marsh & Slack-Smith, 2010). Hamner & Dawson (2009) categorised it as a putative bloomer on the basis that if they are fished commercially (Omori & Nakano, 2001), they must be very abundant, at least seasonally. However, there have been no previous studies anywhere describing the nature, extent, and time course of C. mastigophora swarms. Similarly, there are no published studies on any aspect of the biology of C. mastigophora. This study resulted from an unplanned opportunity which arose when very large swarms of C. mastigophora washed ashore in a small population centre on the only regularly lifeguard patrolled beach in what is a very remote part of Australia in 2012. The purpose of the paper is to record the incidence and scale of these, and earlier C. mastigophora swarms off the western coast of Australia, determine whether wind, currents and tides play an influential and predictable role in beach stranding events of these jellyfish and outline the likely socioeconomic and ecological implications of swarms of this species. Occurrence of swarms Details on the occurrence of C. mastigophora swarms in 1976, 2000, 2006, 2007, 2010, 2011, 2012, and 2013 were collated from a series of eye witness accounts by the authors and their associates: JK, TT, DB, JS, LG in 2010, 2012, TT, KO in 2011and TT in 2006 and others as well as reports to the Western Australian Museum (Marsh & Slack-Smith, 2010), the Western Australian Department of the Environment, the Broome Shire Council (Cable Beach life guard logbooks), the Eighty Mile Beach Caravan Park and from news reports. For the period of detailed investigation on Cable Beach (Fig. 1iii) in 2012, daily records of jellyfish strandings were made by TT and other Cable Beach life guards, for the period from 12 August to 31 October. Biomass of stranded jellyfish To determine the biomass of jellyfish washed up on Cable Beach, we first sampled jellyfish offshore to determine the size and weight distribution of the jellyfish population. We then compared the size distribution of jellyfish on the beach with those offshore using a Mann-Whitney U test (see Results section), and finding no difference, we applied the average weight of the jellyfish sampled offshore to counts of jellyfish washed ashore. A total of 159 individuals of C. mastigophora were collected with a dip net offshore of Gantheaume Point on 19 September 2012 at 17.977614°S, 122.173300°E from a 6-m boat taking care to collect all individuals that could be reached from the boat in the top 500 cm of water to ensure no bias in size selection. These were collected into buckets and then measured on shore for maximum bell width (diameter) and weighed (whole wet weight) to the nearest 1 gramme on an Accura ACC2070RD electronic scale. A further 186 individuals washed up on Cable Beach were measured for maximum bell width on 5 October 2012 by collecting all jellyfish from an area 4 by 8 m quadrat at 17.933556°S, 122.208500°E. Biomass in terms of wet weight to size was determined as above, and dry weight and ash-free dry weight (AFDW) were determined by drying samples of jellyfish tissue (principally from the bell) of known wet weight in a Labconco Freezone 2.5 freeze dryer at -40°C until constant weight was achieved and then reweighing before ashing at 450°C for 4.5 h and reweighing (Larson, 1986). Jellyfish tissue was not rinsed in freshwater before drying, so some residual salt may have remained when dried. The mean ratios of wet to dry and wet to AFDW were then used to calculate biomass for the full-size range of Fig. 1 jellyfish sampled. Total organic carbon, total nitrogen content and stable isotopes ( 13 C and 15 N) were assessed using freeze-dried tissue. Samples were analysed for d 15 N and d 13 C, using a continuous flow system consisting of a Delta V Plus mass spectrometer connected with a Thermo Flush 1112 via Conflo IV (Thermo-Finnigan/Germany) following the methods of Paul et al. (2007) and Skrzypek et al. (2010). To determine the biomass of jellyfish washed up on the Cable Beach (Fig. 1iii), we counted all jellyfish in twelve 20-by-1 m transects at six sites on 8 October 2012. The transects were set out in pairs along 2.5 km of beach heading south from a site seaward of the Broome Surf Life Saving Club (BSLSC). The location of the first transect was at 17.932639°S, 122.208833°E. The sites were 500 m apart along the beach, and at each site, two transects were conducted parallel to the shoreline. One transect was near the water's edge, and one transect was near the high-water mark so as to capture the range of densities at each site across the width of the beach. Figure 2iii shows the apparently even distribution of jellyfish across the beach on the day of sampling. The density of jellyfish from both high and low transects was calculated. To calculate total average biomass on the beach, the mean density was calculated from all 12 transects, and this density was converted to wet weight based on the size distribution measured on the same beach 3 days earlier and the relationship between size and wet weight. To enable the density of jellyfish on the full width of beach to be estimated, we used a conservative estimate of 100-m width based in two measurements made from aerial photographs (averaging 115.5 m) of the width of the beach adjacent to the BSLSC (127.5 m) and 3 km to the south (103.5 m). These measurements were made between the high (6.85 m) and low (3.32 m) tide points for 8 October 2012. A more extensive visual survey of jellyfish on the beach was made by driving a 4WD vehicle south from the BSLSC for 5 km to Gantheaume Point (17.970083°S, 122.191000°E) and north for 10 km to Coconut Wells (17.838611°S, 122.209028°E) (Fig. 1ii). This 15 km survey recorded the southern extent of the strandings, but they may have extended further north as the beach extends a further 7 km to a creek and then a further 13 km north after that. At one-km intervals along with this 15 km section of beach, a visual assessment and photograph along with the GPS position were recorded. This survey confirmed that densities measured over the 2.5 km stretch south of the BSLSC were representative of the entire beach. Diet To examine for diet, samples of live C. mastigophora were collected (as per the description above) from two locations on 19 September 2012: Offshore of Gantheaume Point at 17.966944°S, 122.178989°E and near Dampier Creek in Roebuck Bay at 17.967536°S, 122.243281°E. The jellyfish were measured (as per above) and placed in plastic bags and then on ice. The method of determining prey items followed that of Browne & Kingsford (2005) except it was necessary to freeze the samples in order to transport them to the laboratory where 14 individuals were thawed over 1-2 h in seawater filtered through Whatman Ò glass microfiber filters (0.6-0.8 lm) and then washed in the container to dislodge any prey items adhering to oral arms. Much of the jellyfish disintegrated during this process but otherwise large pieces of intact jellyfish were removed and washed with a wash bottle containing filtered seawater before all the remaining liquid was filtered through 1 mm and 63 lm sieves. The filtrate was fixed in 5% formalin before later being searched under a dissection microscope Leica M 205 C, magnification 6.3, objective 10x. Prey items were identified to the lowest taxonomic unit possible by an experienced zooplankton biologist (JS). The method used does not permit gelatinous prey items to be identified. Wind, tide, sea-surface temperature, chlorophyll-a, and modelled surface currents The satellite-derived sea-surface chlorophyll-a (Chla) concentration data are 8-day composited Level-3 global standard mapped images (SMI) derived from the moderate resolution imaging spectroradiometer (MODIS) aboard the Aqua satellite. The sea-surface temperature (SST) data were derived from long-wave (11 lm) SST algorithm (Franz, 2006). Only nighttime data were used to avoid any surface skin created by diurnal warming. The satellite-derived sea-surface wind data are from a daily near-real-time (NRT) product derived from WindSat Polarimetric Radiometer instrument. These satellite images were downloaded from the Remote Sensing Systems under the sponsorship of NASA (http://www.remss.com/). The Additional wind speed and direction data (daily averages of anemometer and wind vane) along with tide data were obtained from the land-based Broome station of the Australian Bureau of Meteorology (BOM) http://www.bom.gov.au (which is within 10 km of all our study sites). The land-based wind data are more readily available than satellite data (which have data gaps) but are less relevant to affecting sea conditions in the offshore location of the jellyfish swarms. In addition, the land-based wind data can be influenced by factors such as the daily land/sea breeze cycle, which sometimes occurs. Modelled surface current data were derived from BOM's OceanMAPS http://wp.csiro.au/bluelink/global/oceanmaps/ model using the analysis phase data. Model data are available on a 0.1°9 0.1°grid with 51 layers; the surface layer represents the top 5 m and does not take into account tides. Daily surface velocities for the region bounded by 17.5°S-18.5°S and 121.0°E-122.0°E were extracted and spatially averaged to produce a daily time series of current velocity and direction relative to north and magnitude in m.s -1 for 24 August 2012 to 20 October 2012 inclusive. Comparisons of swarm occurrence and the Southern Oscillation Index were made using data reported by the National Oceanic and Atmospheric Administration at https://www.ncdc. noaa.gov/teleconnections/enso/indicators/soi/. Predictors of beach stranding events In order to test the hypothesis that the incidence of beach strandings was independent of local wind, current and tide conditions we sought to compare the incidence of jellyfish strandings (19 out of 58 days) with the prevailing offshore oceanographic and weather conditions. We compared the conditions on days with strandings to conditions on days without strandings for the 58 day period from 24 August to 20 October (3 days before the first and 3 days after the last major stranding) using the v 2 statistic. As Cable Beach is oriented north-south, we were interested in the influence of onshore (north-west through to southwest) and offshore winds and currents (northeast through to south-east). Thus for wind and current, we used the satellite wind/current direction to determine for each day whether the wind/current was predominantly blowing/flowing from each of the NW (e.g., 270°-360°), SW, NE, or SE quarters. On days where both a morning (around 6 a.m.) and an evening (around 6 p.m.) wind reading was available and they were not the same (onshore vs offshore), we used the wind direction from the observation with the highest wind speed. This only occurred on three occasions and it made no difference to the result of the v 2 hypothesis testing whether we chose the wind direction with the slowest speed or omitted these three data points altogether. When the satellite did not provide any wind data (23 of 58 days), these days were not included in the analyses. For tides, we compared tidal ranges of\5 or C5 m (approximately half the maximum range of 9.82 m observed in this period). To calculate the tidal range, we used the difference between the highest and lowest of the semi-diurnal tides for that day. The reason we chose to do the likelihood test on wind direction and tide as categorical variables, in addition to the more dynamical multivariate approach described below, was to attempt to have a easy to use decision tool for lifeguards patrolling what is a popular tourist beach. Logistic regression analysis has previously been used (Decker et al., 2007) to relate jellyfish presence/ absence data to environmental variables. We used a similar approach using the LOGIT module in SYSTAT (Systat Software Inc., San Jose, USA) to determine which predictor variable contributed the best fit to the regression, and which combination of environmental variables best described the relationship between the occurrence of jellyfish strandings and the potential environmental predictor variables of tide range, model current direction and speed, wind speed and wind direction at Broome. We used only environmental variables for which we had data over all the jellyfish sampling dates. For this reason, chlorophylla, sea-surface temperature and satellite wind data obtained were not used because of gaps resulting from intermittent cloud cover. A total dataset of 58 days including 19 days when jellyfish strandings took place (stranding events) was analysed. We used a twostaged approach, first modelling the presence/absence of jellyfish strandings with individual environmental variables, and then we applied a backward and forward stepwise regression in SYSTAT to choose the best set of predictive variables. Descriptive measures of goodness of fit provided from the analysis for each variable (log likelihood, receiver operating characteristic (ROC) analysis, McFadden's q 2 , Cox and Snell R 2 and Nagelkerke R 2 ) were used to determine the variables which provided the highest level of prediction. The models with multiple predictors were compared using log likelihood, Aikaike's information criterion (AIC) and Bayesian information criterion (BIC) to determine the best model selection. The use of AIC and BIC is considered more rigorous that likelihood analysis alone in evaluating logistic regression models and their use is described by Neath & Cavanaugh (2012). We have had to ignore the possibility of autocorrelation in the daily observations of stranding events, and we did not account for the circular nature of wind and current direction data (e.g., 5°is closer to 355°than is 340°). However, we do not think this would have had a large bearing on the analyses, at least for the wind direction as just two values were from the northeast direction (i.e., between 0°and 90°). Current directions were more uniformly distributed around the compass. Results Location, occurrence and timing of swarms and beach strandings Observations and locations of known swarms and beach strandings of C. mastigophora are shown in Fig. 1ii Fig. 1ii) was forced into an emergency shutdown when the intake pipes became clogged with red jellyfish (Marsh & Slack-Smith, 2010) (see also ESM Table 1). The largest swarm is likely to have been in April 2000 when high densities of C. mastigophora were observed from the air stretching over 1,200 km from Ningaloo ( Fig. 1ii) south to Rottnest Island (Marsh & Slack-Smith, 2010, see also ESM Table 1). Photographs of the recent swarms, at Cable Beach (Fig. 2iii) and 80 Mile Beach (Fig. 2iv) in 2012 and in the water column at Ningaloo in 2013 (Fig. 3), are shown. With the exception of the 1976 swarm, the swarms were recorded from two regions: near Broome on the Dampier Peninsula and elsewhere in the southern Kimberley region in 2006 (September/October), 2010 (April), 2011 (September to November) and 2012 (late August to November) and further south of Exmouth and Ningaloo in 1987, 2000 (always in April/May for these large swarms). Except for the observation of high densities of C. mastigophora offshore of the Dampier Peninsula in April 2010, all swarms in the northern region have been in the late August/September to November period and those in the more southern region occur in April and May. Some non-serious stings to swimmers on Cable Beach in 2011 and 2012 were treated by one of us (TT; see ESM Table 1). It is difficult to compare the size and significance of swarms between years; however, observations recorded in daily log books by life guards at Cable Beach show 2006 and 2012 as the largest strandings, with swarms in the intervening years not of comparable size. During 2012, numbers of C. mastigophora built up from 12 August with the first significant strandings on 27-29 August, becoming more dense from 12 to 15 September (Fig. 2i). The beach was closed to swimmers on 12 September. On subsequent days, when jellyfish were present in large numbers, although the beach was not closed, life guards continued to patrol and advised visitors of the hazard of swimming with the jellyfish. Life guards treated 396 minor stings in September and 336 in October. The last significant stranding was on 17 October, and C. mastigophora were uncommon by 23 October. The jellyfish were present throughout this period, but the very large strandings of many thousands of individuals occurred on 12,13,14,15,28,29 September and 1,4,5,8,9,10,12,15,16,17 October (Fig. 4). On days in-between, there were fewer jellyfish washed up but very high densities were being seen offshore. Log book records of the Broome Shire Life Guards and our own observations confirmed very high densities offshore of Cable Beach on 3 September and at the mouth of Dampier Creek in Roebuck Bay on 17 September; jellyfish were still very abundant offshore of Gantheaume Point on 19 September and another large influx was observed offshore of Cable Beach observed on 28 September. In the days following 12 September and as late as 20 September, when we (JK, DB) used a vehicle to visit beaches along the coastline to about 60 km north of Broome, small numbers of jellyfish could be observed along the Dampier Peninsula but campers and tourists we interviewed reported no large strandings. We (TT) found jellyfish stranded in large numbers from Gantheaume Point as far north as Coconut Wells on 8 October. They may have occurred further north as this was as far as observations were made at that time. Oceanographic information in relation to jellyfish strandings in 2012 Tide range varied from 1.25 m to 9.82 m during the period 24 August to 20 October. Of the 19 days with significant jellyfish strandings, there were 9 days (47.4%) that occurred when the tidal range was \5 m although only 18 days (31.0%) of all days during the same period had tidal ranges \5 m. This association was not significant (P = 0.061, v 2 statistic = 3.522, 1 d.f.). However, tide range \5 m that occurred 1 and 2 days prior to the stranding days were both significantly associated with stranding events (P = 0.013, v 2 statistic = 6.158, 1 d.f. for both 1 and 2 day lags). This result indicates that strandings were significantly more likely to follow days of small tidal range than those of larger tidal ranges. However, as shown in Fig. 4, not all stranding events occurred on days which followed smaller than average tide ranges. Water temperature increased from 20.5 to 28.5°C over the two-month period from early August until late October (ESM Fig. 1). The most significant change was during September water temperatures of Cable Beach increasing from 24°C to 27°C between 28 August and 29 September. Chlorophyll-a levels of Cable Beach were low (0.4-0.5 mg m -3 ) from 4 August to 4 September and then increased to 1.3-1.5 mg m -3 as chlorophyll-a levels increased first in Roebuck Bay and south and north to Cable Beach and beyond (ESM Fig. 2). Winds were easterly on average from 4 to 19 August before shifting south-westerly to westerly between 20 August and 4 September (Fig. 5). Winds were again easterly from 4 to 12 September before shifting westerly to south-westerly between 13 September and 6 October. From 7 October, winds were variable but predominantly southerly and south-westerly before turning westerly again from 15 to 22 October. From 23 to 30 October, winds were again variable from the south and west (Fig. 5). Among the 35 days that satellite wind data were available during the 58-day period analysed (24 August to 20 October), 7 of 11 (63.6%) of strandings occurred when winds were blowing onshore (north-west through to southwest). However, the direction of the wind (all four quadrants compared) was not significantly associated with jellyfish strandings on the days they occurred (P = 0.432, v 2 statistic = 2.751, 3 d.f.). Neither was it significantly associated with wind direction one or 2 days prior to strandings (P = 0.921, v 2 statistic = 0.491, 3 d.f. and (P = 0.413, v 2 statistic = 2.867, 3 d.f. respectively) despite 90.9% of strandings occurring when winds were blowing onshore (north-west through to southwest) with a 2-day time lag. It is likely that the small sample size which was affected by the absence of wind data from 23 of the days during the period of observation during which time there were 8 stranding events. During the 58 days, 12 of 19 (63.2%) of strandings occurred when currents flowed onshore. However, current direction (all four quadrants compared) was not significantly associated with jellyfish strandings on the days they occurred (P = 0.863, v 2 statistic = 0.741, 3 d.f.) or one or 2 days prior to strandings (P = 0.290, v 2 statistic = 3.748, 3 d.f. and (P = 0.501, v 2 statistic = 2.363, 3 d.f. respectively). Logistic regression analysis reinforced the importance of tide range as a predictor of strandings. The strongest individual predictor was tide range 2 days before strandings (P = 0.009, see Table 1), and the next best was current direction, also with a 2-day lag. Wind direction was a worse predictor than either tide or current. The best logistic regression model which combined the two predictors of tide range (2 day lag) and wind speed (2 day lag) provided only slight improvement in the fit over tide range alone according to Aikaike's Information Criteria (AIC) ( Table 1). The Bayesian Information Criteria (Shwartz's BIC) did not show any improvement in the model fit when tide range and wind speed were combined (Table 1). This difference in model selection is easily explained by the way that Shwartz's BIC penalises attempts to improve model fit simply by an exhaustive exercise of adding more parameters and in analyses with small sample sizes such as ours. For this reason, Shwartz's BIC is favoured by some statisticians (Neath & Cavanaugh, 2012). Nevertheless, the improvement gained by the inclusion of current direction (P = 0.018) or wind speed with tide range in the model (P = 0.008) was slight, and the difference in wind speed between jellyfish stranding days (with a 2 day lag) and other days was negligible (means of 11.7 km h -1 , SD = 3.6 and 12.8 km h -1 , SD = 3.2, respectively). Both the univariate and multivariate approaches reinforced the importance of tide range as an important predictor of strandings. Neither of the two approaches found wind or current direction as a significant predictor of strandings. Size frequency and individual biomass Diameter of C. mastigophora collected offshore of Gantheaume Point on 19 September ranged from 3.5 to 18.0 cm in diameter with size frequency greatest at 5.0-5.9 cm and 11.0-11.9 cm (Fig. 6). Individuals between 10.0 and 12.9 cm made up 34.6% of the population and those \6.0 cm made up 17.0%. Diameter of jellyfish collected from Cable Beach on 5 October ranged from 4.3 to 21.5 cm in diameter with a peak in abundance at 10.0-10.9 cm. This sample had fewer small jellyfish (7.5%\6.0 cm), but there was no significant difference in mean size of medusa collected on 19 September and 5 October between the two samples (9.9 cm (SD = 3.5) and 10.1 cm (SD = 3.0) respectively), Mann-Whitney U test, U = 14,076, P = 0.442. Despite having equal variances (F test: F (158,185) = 1.298, P = 0.087), the samples were nonnormally distributed (Shapiro-Wilk test, W = 0.979, P = 0.015 and W = 0.983, P = 0.025 respectively) and could not be satisfactorily transformed, so the nonparametric test was applied instead of a t test. Biomass of C. mastigophora collected offshore of Gantheaume Point increased exponentially with diameter ( Fig. 7), as estimated from the equation W = 0.2665 9 D 2.8943 (Fig. 7) where W is whole wet weight in grammes and D is bell diameter in cm. The linear regression of log 10 W against D was highly significant with R 2 = 0.844, P \ 0.0001. In the population, sampled individuals C 13 cm made up 46.6% of the biomass but only 17.6% of the abundance. The mean water content of the jellyfish after drying was 95.8% (SE = 1.25, n = 21), and after ashing, the mean percentage of dry weight that was found to be organic matter was 34.0% (SE = 1.26, n = 10). The mean organic carbon and total nitrogen content of the dried tissue were 7.5% (SE = 0.4, n = 21) and 2.0% (SE = 0.1, n = 21), respectively. The tissue of the jellyfish had carbon (d 13 C) and nitrogen (d 15 N) isotopic signatures of -20.79 (SE = 0.06, n = 21) and 8.14 d 15 N (SE = 0.08, n = 21), respectively. Mean C:N ratio was 3.9 (± 0.02 SE, n = 21). Abundance and biomass of C. mastigophora stranded on Cable Beach The average wet weight of 168 C. mastigophora washed ashore on Cable Beach on 5 October 2012 (as predicted from mean bell diameter) was 268.4 g. The mean density (ind. per m 2 ± 1 standard error) of C. mastigophora washed ashore on Cable Beach on 8 October 2012 was 2.65 ± 0.64 (range 0.6-5.4) for the six high-water transects and 1.74 ± 0.58 (range 0.5-3.9) for the six low-water transects. The average was 2.20 ± 0.43 ind. per m 2 for all 12 transects along the whole beach. This was used as an estimate of density Table 1 Summary of logistic regression analysis using predictor variables of wind speed, wind direction and tide range (J = the day of a mass jellyfish stranding event, J-1 = 1 day before stranding, J-2 = 2 days before stranding) Descriptive measures of goodness of fit are provided for each variable by log likelihood, McFadden's q 2 , Cox and Snell R 2 , Nagelkerke R 2 and receiver operating characteristic (ROC) analysis. Models with combined variables were evaluated using Aikaike's information criterion (AIC) and Bayesian information criterion (BIC). The best parameter or model determined by each statistic is underlined and those that were significant (p \ 0.05) are bolded Hydrobiologia (2016) 768:19-36 29 for the whole beach and corresponded to a wet weight of 590.6 ± 115.4 g per m 2 . Using a conservative estimate of beach width of 100 m (measured average was 115.5 m, see Materials and Methods section) along the 2.5 km of beach where the density data were collected, means that the area of beach surveyed was 250,000 m 2 . Accordingly it can be calculated that along this stretch of beach, 147.6 ± 28.9 tonnes of jellyfish was deposited Power curve fitted by leastsquare best fit by the tide. This is equivalent to 59.0 ± 11.5 tonnes per linear kilometre of beach. The survey by vehicle of the extent of the swarm south and north of Cable Beach revealed that the jellyfish were washed up in similar numbers between Gantheaume Point and Coconut Wells, a distance of 15 km (Fig. 1iii). If the density and size distribution of C. mastigophora along this section of coastline were similar to that measured on Cable Beach as suggested by the photographic record obtained in our survey, then this would equate to a biomass of 886 ± 173 tonnes (3.30 million ± 0.64 million jellyfish) being deposited along the coast on 8 October 2012. Strandings of similar scale were observed on a total of 16 days between 12 September and 17 October 2012 meaning a total of 52.8 million ± 10.3 million (14,172 ± 2,770 tonnes) jellyfish may have washed ashore during this time. The estimate is conservative given on each of these 16 days that there were two tidal cycles each depositing jellyfish on the beach and there were smaller (but significant) numbers washed up on each of the other 57 days between 12 August and 23 October (see ESM Table 1). Diet and commensal organisms The prey items recovered from the tentacles of 14 C. mastigophora are shown in Fig. 8. On average, 220 organisms were recovered from tentacles (±68 SE). The taxonomic composition included small copepods, copepod fragments and other crustaceans, bivalves, invertebrate eggs, phytoplankton and microzooplankton (Fig. 8). All medusae had invertebrate eggs, and the majority had bivalves, pteropods, amphipods and copepods (Fig. 8). Proportionately bivalves, Fig. 8 Relative frequency of organisms recovered from arms of 14 Crambione mastigophora as a proportion of all items on arms. Percentages given after each prey type in the legend are the percentage of jellyfish that had captured that prey item pteropods and invertebrate eggs dominated the prey captured (Fig. 8). There was no difference in organisms found in tentacles of medusae collected from Dampier Creek or Gantheaume Point (Permanova Pseudo-F = 079, P = 0.6). Two jellyfish from Gantheaume Point harboured small (2 cm) carangid fishes as associate/commensal organisms. Distribution and occurrence of swarms This study found that significant jellyfish stranding events were associated with smaller than average tide range, with a time lag of 2 days. Using the same time lag, more than 90% of all strandings occurred when winds (as measured by satellite out over the oceans to the west of the beach) were blowing onshore, but this result was not statistically significant. Modelled currents flowing onshore were associated with 63-68% of strandings depending on the time lag used. Combining these three factors in a logistic regression model failed to provide a better prediction of stranding events than tide range on its own (but see comment on use of circular data in methods section). The influence of tide alone on meso-scale physical processes affecting jellyfish distribution would explain why days with significant beach strandings were interspersed with days of absent or minimal stranding. However, we expect that some combination of influence of wind or currents acting to bring the jellyfish close to the shore and the small tides preventing significant advection away from the beach is taking place. Tides, winds and currents are known to influence distribution of jellyfish and cause them to concentrate or ''swarm'' near to the shore (Zavodnik, 1987;Graham et al., 2001). The time lags account for the period between when physical processes are enacted and when their consequences (jellyfish on beach) are observed. The use of time lags in wind data has previously been shown (Pontin et al., 2009) to be important in predicting jellyfish distribution in Physalia (which have ''sails''). Our study shows that the use of time lags in tide range may be a useful predictor of rhizostome jellyfish distribution, at least, in areas where large tide ranges exist, such as in the north-west of Australia (Short, 2011). Tidal influence has been shown to be the dominant onshore-offshore current transport mechanism in this region (Condie et al., 2006). The element of predictability of jellyfish strandings may provide beach management authorities to plan for and manage beach use and swimming activity when jellyfish swarms are brought close to shore by tides and onshore winds. This has previously been emphasised by Gershwin et al. (2014) who show how changes in wind patterns can be responsible for onshore transport of cubozoan jellyfish and can be used to predict when irukandji jellyfish stings are most likely to occur. Although our results show tide as a significant predictor of strandings, our results did not reveal wind or modelled current direction as significant predictors. It is possible that for the v 2 analyses which relied on wind measurements from satellite made out to sea, we had an insufficient sample size to find an effect because the satellite did not provide any data during 23 of the 58 days over which the strandings occurred. For the logistic regression, we used wind measurements taken at a weather station on land to avoid missing values; however, these data are less relevant to where the jellyfish are located offshore before stranding, and thus, those winds will have less influence on the onshore transport of jellyfish. It is possible that direct measurements of wind and current related to jellyfish occurrence would yield better results than the satellite wind and modelled current data used here. Thus for the purposes of prediction and to be routinely applied, a method of more reliably obtaining current speed and direction and wind direction, from out over the ocean, west of the beach where the jellyfish are swarming, needs to be found. This could be in the form of moored weather stations and current metres. Another factor that could be important in confounding attempts to match stranding events to environmental conditions is that some scyphozoan jellyfish have been shown to exhibit swimming behaviour including prey searching and diel vertical migrations Moriarty et al., 2012) which will influence the extent to which ocean conditions and currents will influence onshore directional movement of jellyfish. Moreover, recent novel research (Fossette et al., 2015) using jellyfish tagged with accelerometers has shown how jellyfish can actively orient themselves to swim with or against a tidal current and that this behaviour can act to both maintain swarms and avoid beach strandings. The observations of Marsh & Slack-Smith (2010) and those from our study suggest that C. mastigophora can be seasonally abundant in tropical north-western Australia. However, the observed strandings may represent a small fraction of those that have occurred due to the remoteness and sparse human population (\100,000) in this region. The areas where it has been observed to be washed up in large numbers are popular tourist areas and strandings in more remote areas are unlikely to be recorded, especially given our observations of rapid disintegration of stranded jellyfish and their subsequent removal by large tidal ranges in the region. Any link between interannual variability in the timing and scale of observed swarms in relation to ocean climate variability is not evident. The swarm in 2012 coincided with an increase in both water temperature and chlorophyll-a, but it is not possible to say the swarm event is related to either as these are likely to be seasonal events. Economic and ecological significance of C. mastigophora swarms Large swarms, especially those that occur over protracted periods such as was the 2012 swarm, result in numerous stings on swimming beaches and also deter swimmers from beaches and from areas where tourism is of great economic importance (e.g., Cable Beach, Broome). Other significant impacts of the swarms have included blocking the water cooling intakes of the power station in Karratha in 1976 (Marsh & Slack-Smith, 2010) (see also ESM Table 1). C. mastigophora is an edible jellyfish species, and a fishery has been established in Indonesia (Java south coast) based on export to Japan (Omori & Nakano, 2001;Kitamura & Omori, 2010). To avoid confusion, it is worth noting here, that Rumpet (1991) provides an excellent description of another sea-east Asian fishery for ''red'' jellyfish, but it is unlikely that this is C. mastigophora. Rumpet's (1991) report from the South China Sea (Sarawak, Malaysia) almost certainly refers to Rhopilema esculentum Kishinouye 1891 based on the description of fisheries for this species and Rhopilema hispidum (Vanhöffen 1888) (white) elsewhere in the South China Sea (Vietnam) by Nishikawa et al. (2008). The economic feasibility of a fishery for C. mastigophora has not been evaluated in Australia and maybe worthy of examination especially if swarms continue to have detrimental economic impacts. Bivalve larvae, pteropods and invertebrate eggs dominated the prey items we found on C. mastigophora arms and 26% of all observed prey items were bivalve larvae. Nothing is known of the feeding rates of C. mastigophora; however, at the population sizes we recorded the potential for them to deplete plankton from the water column must be considered. Some jellyfish species feed predominantly and selectively on bivalve mollusc larvae including other rhizostome jellyfish. For example, Larson (1991) found very high feeding rates by Stomolophus meleagris Agassiz 1862 on oyster larvae which made up 56% of prey items and Ballard & Myers (1997) found bivalve larvae made up more than 80% of the diet of Proboscidactyla stellata (Forbes 1846). In the southern Kimberley region where we observed significant swarms in 2006, 2010, 2011, and 2012, the silver lipped pearl oyster Pinctada maxima (Jameson 1901) forms the basis of Australia's pearl industry valued at A$120 million per year (Hart & Joll, 2006). P. maxima spawn in the spring beginning in September when the peak spawning occurs although the spawning season can extend through to April (Rose et al., 1990). The peak spawning period coincides with the same period that C. mastigophora swarms have occurred near Broome and Eighty Mile Beach which is the most important area for pearl oyster broodstock in north-western Australia (Condie et al., 2006), meaning that when they are dense, C. mastigophora might be a significant predator of pearl oyster larvae. Condie et al. (2006) showed how tidal currents dominate pearl oyster larval transport in the Eighty Mile Beach region meaning larvae are retained in the area. This would make them vulnerable to large swarms of C. mastigophora which would be subject to the same transport patterns. Larson (1991) measured the feeding rate of the closely related S. meleagris and found a 325 g jellyfish would consume 6,000 prey items per day over half of which were oyster larvae. Based on these feeding rates, the population numbers of C. mastigophora estimated in this study (
<filename>GoExamplesReadOnly/go_pointer.go /* Go 语言中指针是很容易学习的,Go 语言中使用指针可以更简单的执行一些任务。 接下来让我们来一步步学习 Go 语言指针。 我们都知道,变量是一种使用方便的占位符,用于引用计算机内存地址。 Go 语言的取地址符是 &,放到一个变量前使用就会返回相应变量的内存地址。 以下实例演示了变量在内存中地址: */ package GoExamplesReadOnly import "fmt" func main() { var a int = 10 /* 声明实际变量 */ fmt.Printf("a变量的地址: %x\n", &a) var ip *int /* 声明指针变量 */ var ptr *int ptr = &a if ptr == nil { fmt.Printf("Nil ptr value is %x\n", ptr) } else { fmt.Printf("指针存储的变量地址: %x\n", ptr) } ip = &a /* 指针变量的存储地址 */ fmt.Printf("ip 变量储存的指针地址: %x\n", ip) /* 使用指针访问值 */ fmt.Printf("*ip变量值:%d\n", *ip) } /* 如何使用指针 指针使用流程: 定义指针变量。 为指针变量赋值。 访问指针变量中指向地址的值。 在指针类型前面加上 * 号(前缀)来获取指针所指向的内容。 */
There's a lot of debate surrounding Christopher Nolan's Interstellar, and one particular point isn't just about this film, but about the way we see storytelling. SPOILERS for Interstellar follow. I know, I know, another Interstellar think piece. You're sick of these by now, right? Well, I promise, I'm largely using Christopher Nolan's sci-fi epic here as a jumping-off point to get to something bigger, something that bugs me about the way we look at stories that just happened to resurface as reactions to this film began to populate the Internet. For the record, I quite liked Interstellar. I've seen it twice now and that feeling holds up. It dazzled me, thrilled me, and made me care about the characters, but I do acknowledge it's not a movie without flaws. Like a lot of Nolan's films, it feels a bit too self-serious, a bit too pleased with its own cleverness, and a bit too heavy-handed with its themes (how many times can you hear the same Dylan Thomas poem in one movie, I ask you?). I acknowledge these flaws, I'm willing to hear about potential others, and I've read some great pieces over the last week that were both highly critical of Nolan's film and solid pieces of thinking in their own right. I mention that because Nolan is one of those directors (I'd argue David Fincher is also one, and Paul Thomas Anderson is another) who inspires an almost battle-line way of thinking when it comes to the reaction to their films. For some people, Nolan's been a genius since The Dark Knight, and he always will be. For others, he's a popcorn director masquerading as an auteur. For me, he's a guy who makes good movies that can sometimes be great and sometimes be a little crumbly around the edges. I want you to know that upfront, because this is neither a defense nor a condemnation of Christopher Nolan and his work. I'm after something else, and it just so happens that Interstellar is what sparked it. In the days after Interstellar was released, the inevitable "look at all the plot holes" pieces started to pop up on the Internet, and while some of those pieces have legitimate gripes about the film, they're also filled with gems like this, from Entertainment Weekly: "Wouldn’t it have been way better for Professor Brand (Michael Caine) to just send the super-robots?" And then there's this one, from Vulture: "Who would have been the spaceship pilot if Cooper hadn't shown up? Would NASA have called him eventually? It seems weird to entrust the future of humanity with a semi-random trespasser." And another, also from EW: "Did anybody else get the impression if Mann just would have opened with, 'Sorry about the pings, I was crazy lonely and going nuts,' the other astronauts would have thought he was super unprofessional, but still let him tag along to the next planet?" There are even smaller nits picked in both of those pieces -- one of them complains that the aged Michael Caine simply doesn't look old enough -- but these questions stuck with me, because they're not actually "plot holes," and at least two of them are readily and easily explained by the characters and the story itself. A "plot hole" is exactly what it sounds like: Something missing from the plot that helps the story make sense. Imagine, for example, that Romilly hadn't explained time-stretching relativity to Brand and Cooper before they went to the water planet, and then when they got back he was 23 years older and they just went on with the movie. That's a plot hole. These complaints, on the other hand, are "Why didn't the character(s) just do this? It's what I would have done if I were placed in the same totally crazy and unreasonable situation. I know exactly how I'd behave in a spaceship that's through a wormhole in another galaxy and the future of the human race is at stake." I see this kind of thinking all the time. I hear it when I walk out of crowded movie theaters, read about it in Internet thinkpieces, see it on popular YouTube channels, and hell, I've probably been guilty of it a few times myself, but it infuriates me. It infuriates me because it comes from a way of thinking that's less about experiencing a story as its creators tell it to you and more about proving you're smarter than the movie. It's playing backseat driver to characters who are, in the best case scenario, supposed to feel like living, breathing, fallible humans in their own right. It's looking at stories like they're equations. Stories -- the good ones, anyway -- aren't logic puzzles that you're supposed to unravel after you've seen or read or heard them. Characters aren't therapy patients you're supposed to fix by reverse-engineering their decision-making. Dr. Mann, it's made quite clear, is not reasonable by the time he wakes up from hibernation, so why expect him to make reasonable choices? Why expect Professor Brand to send robots to do a job he has faith in his own daughter to do better? Characters, like us, make choices in the context of a story, and they're not always the right ones. If they were, the stories would be uninteresting tributes to rationalism, bloodless icons of agreeability. If you're perplexed or even infuriated by a choice a character makes, follow that feeling. It's part of the story. You're not supposed to always agree with them. You're supposed to find out what happens next. So, instead of spending the length of a movie trying to make the case for why you'd be a far better pilot for an interstellar mission, just let the story be itself. You'll have more fun, I promise.
/** * Register a new subscription to consume events. * @param subscription the subscription to register; must not be null; */ public void register(Subscription subscription) { if (subscription == null) { throw new IllegalArgumentException("subscription must not be null"); } LOG.info("Register subscription for mapping : {}", subscription.getName()); subscriptionsByQuery.putIfAbsent(subscription.getQuery(), new ArrayList<>()); subscriptionsByQuery.get(subscription.getQuery()).add(subscription); }
/** * The persistent class for the "WEIGHT" database table. * */ @Entity @IdClass(WeightId.class) public class Weight implements Serializable { private static final long serialVersionUID = 1L; @Id @ManyToOne private Food food; @Id private Integer seq; @Column(nullable = false, precision = 7, scale = 3) private BigDecimal amount; @Column(nullable = false, precision = 7, scale = 1) private BigDecimal gm_wgt; @Column(nullable = false, length = 84) private String description; @Column private Integer dataPts; @Column(precision = 7, scale = 3) private BigDecimal std_Dev; public Weight() { } }
/* * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ #include "modules/video_capture/video_capture_impl.h" #include <stdlib.h> #include <string.h> #include "api/video/i420_buffer.h" #include "api/video/video_frame_buffer.h" #include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/video_capture/video_capture_config.h" #include "rtc_base/logging.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" #include "third_party/libyuv/include/libyuv.h" namespace webrtc { namespace videocapturemodule { const char* VideoCaptureImpl::CurrentDeviceName() const { return _deviceUniqueId; } // static int32_t VideoCaptureImpl::RotationFromDegrees(int degrees, VideoRotation* rotation) { switch (degrees) { case 0: *rotation = kVideoRotation_0; return 0; case 90: *rotation = kVideoRotation_90; return 0; case 180: *rotation = kVideoRotation_180; return 0; case 270: *rotation = kVideoRotation_270; return 0; default: return -1; ; } } // static int32_t VideoCaptureImpl::RotationInDegrees(VideoRotation rotation, int* degrees) { switch (rotation) { case kVideoRotation_0: *degrees = 0; return 0; case kVideoRotation_90: *degrees = 90; return 0; case kVideoRotation_180: *degrees = 180; return 0; case kVideoRotation_270: *degrees = 270; return 0; } return -1; } VideoCaptureImpl::VideoCaptureImpl() : _deviceUniqueId(NULL), _requestedCapability(), _lastProcessTimeNanos(rtc::TimeNanos()), _lastFrameRateCallbackTimeNanos(rtc::TimeNanos()), _dataCallBack(NULL), _rawDataCallBack(NULL), _lastProcessFrameTimeNanos(rtc::TimeNanos()), _rotateFrame(kVideoRotation_0), apply_rotation_(false) { _requestedCapability.width = kDefaultWidth; _requestedCapability.height = kDefaultHeight; _requestedCapability.maxFPS = 30; _requestedCapability.videoType = VideoType::kI420; memset(_incomingFrameTimesNanos, 0, sizeof(_incomingFrameTimesNanos)); } VideoCaptureImpl::~VideoCaptureImpl() { DeRegisterCaptureDataCallback(); if (_deviceUniqueId) delete[] _deviceUniqueId; } void VideoCaptureImpl::RegisterCaptureDataCallback( rtc::VideoSinkInterface<VideoFrame>* dataCallBack) { MutexLock lock(&api_lock_); RTC_DCHECK(!_rawDataCallBack); _dataCallBack = dataCallBack; } void VideoCaptureImpl::RegisterCaptureDataCallback( RawVideoSinkInterface* dataCallBack) { MutexLock lock(&api_lock_); RTC_DCHECK(!_dataCallBack); _rawDataCallBack = dataCallBack; } void VideoCaptureImpl::DeRegisterCaptureDataCallback() { MutexLock lock(&api_lock_); _dataCallBack = NULL; _rawDataCallBack = NULL; } int32_t VideoCaptureImpl::DeliverCapturedFrame(VideoFrame& captureFrame) { UpdateFrameCount(); // frame count used for local frame rate callback. if (_dataCallBack) { _dataCallBack->OnFrame(captureFrame); } return 0; } void VideoCaptureImpl::DeliverRawFrame(uint8_t* videoFrame, size_t videoFrameLength, const VideoCaptureCapability& frameInfo, int64_t captureTime) { UpdateFrameCount(); _rawDataCallBack->OnRawFrame(videoFrame, videoFrameLength, frameInfo, _rotateFrame, captureTime); } int32_t VideoCaptureImpl::IncomingFrame(uint8_t* videoFrame, size_t videoFrameLength, const VideoCaptureCapability& frameInfo, int64_t captureTime /*=0*/) { MutexLock lock(&api_lock_); const int32_t width = frameInfo.width; const int32_t height = frameInfo.height; TRACE_EVENT1("webrtc", "VC::IncomingFrame", "capture_time", captureTime); if (_rawDataCallBack) { DeliverRawFrame(videoFrame, videoFrameLength, frameInfo, captureTime); return 0; } // Not encoded, convert to I420. if (frameInfo.videoType != VideoType::kMJPEG) { // Allow buffers larger than expected. On linux gstreamer allocates buffers // page-aligned and v4l2loopback passes us the buffer size verbatim which // for most cases is larger than expected. // See https://github.com/umlaeute/v4l2loopback/issues/190. if (auto size = CalcBufferSize(frameInfo.videoType, width, abs(height)); videoFrameLength < size) { RTC_LOG(LS_ERROR) << "Wrong incoming frame length. Expected " << size << ", Got " << videoFrameLength << "."; return -1; } } int stride_y = width; int stride_uv = (width + 1) / 2; int target_width = width; int target_height = abs(height); // SetApplyRotation doesn't take any lock. Make a local copy here. bool apply_rotation = apply_rotation_; if (apply_rotation) { // Rotating resolution when for 90/270 degree rotations. if (_rotateFrame == kVideoRotation_90 || _rotateFrame == kVideoRotation_270) { target_width = abs(height); target_height = width; } } // Setting absolute height (in case it was negative). // In Windows, the image starts bottom left, instead of top left. // Setting a negative source height, inverts the image (within LibYuv). rtc::scoped_refptr<I420Buffer> buffer = I420Buffer::Create( target_width, target_height, stride_y, stride_uv, stride_uv); libyuv::RotationMode rotation_mode = libyuv::kRotate0; if (apply_rotation) { switch (_rotateFrame) { case kVideoRotation_0: rotation_mode = libyuv::kRotate0; break; case kVideoRotation_90: rotation_mode = libyuv::kRotate90; break; case kVideoRotation_180: rotation_mode = libyuv::kRotate180; break; case kVideoRotation_270: rotation_mode = libyuv::kRotate270; break; } } const int conversionResult = libyuv::ConvertToI420( videoFrame, videoFrameLength, buffer.get()->MutableDataY(), buffer.get()->StrideY(), buffer.get()->MutableDataU(), buffer.get()->StrideU(), buffer.get()->MutableDataV(), buffer.get()->StrideV(), 0, 0, // No Cropping width, height, target_width, target_height, rotation_mode, ConvertVideoType(frameInfo.videoType)); if (conversionResult < 0) { RTC_LOG(LS_ERROR) << "Failed to convert capture frame from type " << static_cast<int>(frameInfo.videoType) << "to I420."; return -1; } VideoFrame captureFrame = VideoFrame::Builder() .set_video_frame_buffer(buffer) .set_timestamp_rtp(0) .set_timestamp_ms(rtc::TimeMillis()) .set_rotation(!apply_rotation ? _rotateFrame : kVideoRotation_0) .build(); captureFrame.set_ntp_time_ms(captureTime); DeliverCapturedFrame(captureFrame); return 0; } int32_t VideoCaptureImpl::StartCapture( const VideoCaptureCapability& capability) { _requestedCapability = capability; return -1; } int32_t VideoCaptureImpl::StopCapture() { return -1; } bool VideoCaptureImpl::CaptureStarted() { return false; } int32_t VideoCaptureImpl::CaptureSettings( VideoCaptureCapability& /*settings*/) { return -1; } int32_t VideoCaptureImpl::SetCaptureRotation(VideoRotation rotation) { MutexLock lock(&api_lock_); _rotateFrame = rotation; return 0; } bool VideoCaptureImpl::SetApplyRotation(bool enable) { // We can't take any lock here as it'll cause deadlock with IncomingFrame. // The effect of this is the last caller wins. apply_rotation_ = enable; return true; } bool VideoCaptureImpl::GetApplyRotation() { return apply_rotation_; } void VideoCaptureImpl::UpdateFrameCount() { if (_incomingFrameTimesNanos[0] / rtc::kNumNanosecsPerMicrosec == 0) { // first no shift } else { // shift for (int i = (kFrameRateCountHistorySize - 2); i >= 0; --i) { _incomingFrameTimesNanos[i + 1] = _incomingFrameTimesNanos[i]; } } _incomingFrameTimesNanos[0] = rtc::TimeNanos(); } uint32_t VideoCaptureImpl::CalculateFrameRate(int64_t now_ns) { int32_t num = 0; int32_t nrOfFrames = 0; for (num = 1; num < (kFrameRateCountHistorySize - 1); ++num) { if (_incomingFrameTimesNanos[num] <= 0 || (now_ns - _incomingFrameTimesNanos[num]) / rtc::kNumNanosecsPerMillisec > kFrameRateHistoryWindowMs) { // don't use data older than 2sec break; } else { nrOfFrames++; } } if (num > 1) { int64_t diff = (now_ns - _incomingFrameTimesNanos[num - 1]) / rtc::kNumNanosecsPerMillisec; if (diff > 0) { return uint32_t((nrOfFrames * 1000.0f / diff) + 0.5f); } } return nrOfFrames; } } // namespace videocapturemodule } // namespace webrtc
COLUMBUS, Ohio -- Already crossed out on signs around Ohio State's campus, citizens around the rest of the state are being asked to follow suit and boycott a certain letter of the alphabet in honor of the intense rivalry with Michigan. Ohio governor John Kasich posted a resolution on his Twitter account on Friday that recognizes "Scarlet Letter Saturday" and encourages people to avoid using the letter 'M' as the No. 3 Buckeyes travel to take on the Wolverines at Michigan Stadium. Ohio State took their rivalry with Michigan to another level, boycotting use of the letter 'M' ahead of Saturday's game. Austin Ward/ESPN Referencing other famous feuds, the fact that wolverines were added to the threatened species list and mentioning the only war between two states while crossing out every 'M' in the document, Kasich added yet another element to a series that already captures the attention of fans of both storied programs all year round. "We do hereby recognize Saturday, November 30, 2013 as 'Scarlet Letter Saturday' throughout Ohio," the resolution said. "[We] encourage all Ohioans to avoid using the letter 'M' when possible." The practice facility at Ohio State was already in compliance with the resolution earlier in the week, with scarlet tape crossing out all the outlawed letters as coach Urban Meyer's team prepared for its regular-season finale. Meyer had also covered up every other game on the schedule with white paper in the team meeting room, driving home the importance of beating "That Team Up North" above all else -- even a Big Ten championship game appearance next week or a potential spot in the BCS. "It is different," Meyer said. "It's not just another game. It's not, and our players know that. Does that mean we put less value on Indiana or less value on the following week coming up? I didn't say that. But there is an extra pep in the step. "Do we make a big deal out of this game? Absolutely. ... That's the way I was brought up. We kind of go over the top here, and we always have." That approach goes all the way up to the governor's office.
package cn.z201.delayed; import java.util.List; /** * @author <EMAIL> **/ public interface DelayOrderI<T> { /** * 添加延迟对象到延时队列 * * @param itemDelayed 延迟对象 * @return boolean */ boolean addToOrderDelayQueue(ItemDelayedI<T> itemDelayed); /** * 根据对象添加到指定延时队列 * * @param data 数据对象 * @return boolean */ boolean addToDelayQueue(T data); /** * 移除指定的延迟对象从延时队列中 * * @param data */ void removeToOrderDelayQueue(T data); /** * 获取列队总所有数据 * @return */ List<T> all(); }
/** * Attempts to edit the account specified by the login form. * If there are form errors (invalid password, missing fields, etc.), the * errors are presented and no actual edit attempt is made. */ private void attemptEdit() { passwordView1.setError(null); passwordView2.setError(null); String password1 = passwordView1.getText().toString(); String password2 = passwordView2.getText().toString(); String description = descriptionView.getText().toString(); String email = emailView.getText().toString(); boolean cancel = false; View focusView = null; if (!password1.equals(password2)) { passwordView1.setError(getString(R.string.error_invalid_password2)); focusView = passwordView2; cancel = true; } if (!isPasswordValid(password1) && !password1.isEmpty()) { passwordView1.setError(getString(R.string.error_invalid_password)); focusView = passwordView1; cancel = true; } if (!isEmailValid(email)) { emailView.setError("Not a valid email"); focusView = emailView; cancel = true; } if (cancel) { focusView.requestFocus(); } else { KeyboardUtil.hideKeyboard(this); showProgress(true); String username = PreferencesUtil.getUsername(getApplicationContext()); User user = new User(username, password1, description); UserExtra userExtra = new UserExtra(username, password1, email); userController.update(user, userExtra, path, editCallback); } }
/** * Return a string representation of this length using its short label. * * @param width The width for the label. * @param precision The precision for the label, if the length were in * meters. If the length is not in meters, this value will be * adjusted accordingly. This will only affect the fractional * portion of the number, and negative numbers are allowed. * @return The string. */ public String toShortLabelString(int width, int precision) { return new StringBuilder().append(format(width, precision)).append(' ').append(getShortLabel(getDisplayMagnitude() != 1.)) .toString(); }
def mkTruth(i=-1, flatHMF=False): truthPath = './../data/buzzard_v1.0/allbands/truth/' if not i == -1: with hdf.File('{}truth{}_Oii.hdf5'.format(truthPath, str(i).zfill(2)), 'r') as f: dset = f['truth' + str(i).zfill(2) + '_Oii'] print(dset.file) if not flatHMF: truth_part = dset['HALOID', 'RA', 'DEC', 'Z', 'Oii'] else: truth_part = dset['HALOID', 'RA', 'DEC', 'Z', 'Oii', 'VX', 'VY', 'VZ'] truth_part = rfns.append_fields( truth_part, ['g', 'r'], [dset['OMAG'][:, 1], dset['OMAG'][:, 2]], usemask=False) try: truth = np.append(truth, truth_part) except NameError: truth = truth_part return truth else: for i in range(20): with hdf.File('{}truth{}_Oii.hdf5'.format(truthPath, str(i).zfill(2)), 'r') as f: dset = f['truth' + str(i).zfill(2) + '_Oii'] print(dset.file) if not flatHMF: truth_part = dset['HALOID', 'RA', 'DEC', 'Z', 'Oii'] else: truth_part = dset['HALOID', 'RA', 'DEC', 'Z', 'Oii', 'VX', 'VY', 'VZ'] truth_part = rfns.append_fields( truth_part, ['g', 'r'], [dset['OMAG'][:, 1], dset['OMAG'][:, 2]], usemask=False) try: truth = np.append(truth, truth_part) except NameError: truth = truth_part return truth
package main import ( "fmt" "os" "path" "path/filepath" "strings" "github.com/crowdsecurity/crowdsec/pkg/csconfig" "github.com/crowdsecurity/crowdsec/pkg/cwhub" "github.com/crowdsecurity/crowdsec/pkg/cwversion" "github.com/crowdsecurity/crowdsec/pkg/database" log "github.com/sirupsen/logrus" "github.com/spf13/cobra" "github.com/spf13/cobra/doc" ) var trace_lvl, dbg_lvl, nfo_lvl, wrn_lvl, err_lvl bool var ConfigFilePath string var csConfig *csconfig.Config var dbClient *database.Client var OutputFormat string var downloadOnly bool var forceAction bool var purge bool var all bool var restoreOldBackup bool var prometheusURL string func initConfig() { var err error if trace_lvl { log.SetLevel(log.TraceLevel) } else if dbg_lvl { log.SetLevel(log.DebugLevel) } else if nfo_lvl { log.SetLevel(log.InfoLevel) } else if wrn_lvl { log.SetLevel(log.WarnLevel) } else if err_lvl { log.SetLevel(log.ErrorLevel) } logFormatter := &log.TextFormatter{TimestampFormat: "02-01-2006 03:04:05 PM", FullTimestamp: true} log.SetFormatter(logFormatter) csConfig, err = csconfig.NewConfig(ConfigFilePath, false, false) if err != nil { log.Fatalf(err.Error()) } log.Debugf("Using %s as configuration file", ConfigFilePath) if err := csConfig.LoadCSCLI(); err != nil { log.Fatalf(err.Error()) } if csConfig.Cscli == nil { log.Fatalf("missing 'cscli' configuration in '%s', exiting", ConfigFilePath) } if cwhub.HubBranch == "" && csConfig.Cscli.HubBranch != "" { cwhub.HubBranch = csConfig.Cscli.HubBranch } if OutputFormat != "" { csConfig.Cscli.Output = OutputFormat if OutputFormat != "json" && OutputFormat != "raw" && OutputFormat != "human" { log.Fatalf("output format %s unknown", OutputFormat) } } if csConfig.Cscli.Output == "" { csConfig.Cscli.Output = "human" } if csConfig.Cscli.Output == "json" { log.SetFormatter(&log.JSONFormatter{}) log.SetLevel(log.ErrorLevel) } else if csConfig.Cscli.Output == "raw" { log.SetLevel(log.ErrorLevel) } } var validArgs = []string{ "scenarios", "parsers", "collections", "capi", "lapi", "postoverflows", "machines", "metrics", "bouncers", "alerts", "decisions", "simulation", "hub", "dashboard", "config", "completion", "version", "console", "notifications", } func prepender(filename string) string { const header = `--- id: %s title: %s --- ` name := filepath.Base(filename) base := strings.TrimSuffix(name, path.Ext(name)) return fmt.Sprintf(header, base, strings.Replace(base, "_", " ", -1)) } func linkHandler(name string) string { return fmt.Sprintf("/cscli/%s", name) } func main() { var rootCmd = &cobra.Command{ Use: "cscli", Short: "cscli allows you to manage crowdsec", Long: `cscli is the main command to interact with your crowdsec service, scenarios & db. It is meant to allow you to manage bans, parsers/scenarios/etc, api and generally manage you crowdsec setup.`, ValidArgs: validArgs, DisableAutoGenTag: true, SilenceErrors: true, SilenceUsage: true, /*TBD examples*/ } var cmdDocGen = &cobra.Command{ Use: "doc", Short: "Generate the documentation in `./doc/`. Directory must exist.", Args: cobra.ExactArgs(0), Hidden: true, DisableAutoGenTag: true, Run: func(cmd *cobra.Command, args []string) { if err := doc.GenMarkdownTreeCustom(rootCmd, "./doc/", prepender, linkHandler); err != nil { log.Fatalf("Failed to generate cobra doc: %s", err.Error()) } }, } rootCmd.AddCommand(cmdDocGen) /*usage*/ var cmdVersion = &cobra.Command{ Use: "version", Short: "Display version and exit.", Args: cobra.ExactArgs(0), DisableAutoGenTag: true, Run: func(cmd *cobra.Command, args []string) { cwversion.Show() }, } rootCmd.AddCommand(cmdVersion) rootCmd.PersistentFlags().StringVarP(&ConfigFilePath, "config", "c", csconfig.DefaultConfigPath("config.yaml"), "path to crowdsec config file") rootCmd.PersistentFlags().StringVarP(&OutputFormat, "output", "o", "", "Output format : human, json, raw.") rootCmd.PersistentFlags().BoolVar(&dbg_lvl, "debug", false, "Set logging to debug.") rootCmd.PersistentFlags().BoolVar(&nfo_lvl, "info", false, "Set logging to info.") rootCmd.PersistentFlags().BoolVar(&wrn_lvl, "warning", false, "Set logging to warning.") rootCmd.PersistentFlags().BoolVar(&err_lvl, "error", false, "Set logging to error.") rootCmd.PersistentFlags().BoolVar(&trace_lvl, "trace", false, "Set logging to trace.") rootCmd.PersistentFlags().StringVar(&cwhub.HubBranch, "branch", "", "Override hub branch on github") if err := rootCmd.PersistentFlags().MarkHidden("branch"); err != nil { log.Fatalf("failed to make branch hidden : %s", err) } if len(os.Args) > 1 && os.Args[1] != "completion" && os.Args[1] != "version" && os.Args[1] != "help" { cobra.OnInitialize(initConfig) } /*don't sort flags so we can enforce order*/ rootCmd.Flags().SortFlags = false rootCmd.PersistentFlags().SortFlags = false rootCmd.AddCommand(NewConfigCmd()) rootCmd.AddCommand(NewHubCmd()) rootCmd.AddCommand(NewMetricsCmd()) rootCmd.AddCommand(NewDashboardCmd()) rootCmd.AddCommand(NewDecisionsCmd()) rootCmd.AddCommand(NewAlertsCmd()) // rootCmd.AddCommand(NewInspectCmd()) rootCmd.AddCommand(NewSimulationCmds()) rootCmd.AddCommand(NewBouncersCmd()) rootCmd.AddCommand(NewMachinesCmd()) rootCmd.AddCommand(NewParsersCmd()) rootCmd.AddCommand(NewScenariosCmd()) rootCmd.AddCommand(NewCollectionsCmd()) rootCmd.AddCommand(NewPostOverflowsCmd()) rootCmd.AddCommand(NewCapiCmd()) rootCmd.AddCommand(NewLapiCmd()) rootCmd.AddCommand(NewCompletionCmd()) rootCmd.AddCommand(NewConsoleCmd()) rootCmd.AddCommand(NewExplainCmd()) rootCmd.AddCommand(NewHubTestCmd()) rootCmd.AddCommand(NewNotificationsCmd()) if err := rootCmd.Execute(); err != nil { log.Fatal(err) } }
/** * Static helper methods and classes for sets. */ public abstract class Sets { private Sets() {} public static ColumnSpecification valueSpecOf(ColumnSpecification column) { return new ColumnSpecification(column.ksName, column.cfName, new ColumnIdentifier("value(" + column.name + ")", true), elementsType(column.type)); } private static AbstractType<?> unwrap(AbstractType<?> type) { return type.isReversed() ? unwrap(((ReversedType<?>) type).baseType) : type; } private static AbstractType<?> elementsType(AbstractType<?> type) { return ((SetType) unwrap(type)).getElementsType(); } /** * Tests that the set with the specified elements can be assigned to the specified column. * * @param receiver the receiving column * @param elements the set elements */ public static AssignmentTestable.TestResult testSetAssignment(ColumnSpecification receiver, List<? extends AssignmentTestable> elements) { if (!(receiver.type instanceof SetType)) { // We've parsed empty maps as a set literal to break the ambiguity so handle that case now if (receiver.type instanceof MapType && elements.isEmpty()) return AssignmentTestable.TestResult.WEAKLY_ASSIGNABLE; return AssignmentTestable.TestResult.NOT_ASSIGNABLE; } // If there is no elements, we can't say it's an exact match (an empty set if fundamentally polymorphic). if (elements.isEmpty()) return AssignmentTestable.TestResult.WEAKLY_ASSIGNABLE; ColumnSpecification valueSpec = valueSpecOf(receiver); return AssignmentTestable.TestResult.testAll(receiver.ksName, valueSpec, elements); } /** * Create a <code>String</code> representation of the set containing the specified elements. * * @param elements the set elements * @return a <code>String</code> representation of the set */ public static String setToString(List<?> elements) { return setToString(elements, Object::toString); } /** * Create a <code>String</code> representation of the set from the specified items associated to * the set elements. * * @param items items associated to the set elements * @param mapper the mapper used to map the items to the <code>String</code> representation of the set elements * @return a <code>String</code> representation of the set */ public static <T> String setToString(Iterable<T> items, java.util.function.Function<T, String> mapper) { return StreamSupport.stream(items.spliterator(), false) .map(e -> mapper.apply(e)) .collect(Collectors.joining(", ", "{", "}")); } /** * Returns the exact SetType from the items if it can be known. * * @param items the items mapped to the set elements * @param mapper the mapper used to retrieve the element types from the items * @return the exact SetType from the items if it can be known or <code>null</code> */ public static <T> AbstractType<?> getExactSetTypeIfKnown(List<T> items, java.util.function.Function<T, AbstractType<?>> mapper) { Optional<AbstractType<?>> type = items.stream().map(mapper).filter(Objects::nonNull).findFirst(); return type.isPresent() ? SetType.getInstance(type.get(), false) : null; } public static class Literal extends Term.Raw { private final List<Term.Raw> elements; public Literal(List<Term.Raw> elements) { this.elements = elements; } public Term prepare(String keyspace, ColumnSpecification receiver) throws InvalidRequestException { validateAssignableTo(keyspace, receiver); // We've parsed empty maps as a set literal to break the ambiguity so // handle that case now if (receiver.type instanceof MapType && elements.isEmpty()) return new Maps.Value(Collections.<ByteBuffer, ByteBuffer>emptyMap()); ColumnSpecification valueSpec = Sets.valueSpecOf(receiver); Set<Term> values = new HashSet<>(elements.size()); boolean allTerminal = true; for (Term.Raw rt : elements) { Term t = rt.prepare(keyspace, valueSpec); if (t.containsBindMarker()) throw new InvalidRequestException(String.format("Invalid set literal for %s: bind variables are not supported inside collection literals", receiver.name)); if (t instanceof Term.NonTerminal) allTerminal = false; values.add(t); } DelayedValue value = new DelayedValue(elementsType(receiver.type), values); return allTerminal ? value.bind(QueryOptions.DEFAULT) : value; } private void validateAssignableTo(String keyspace, ColumnSpecification receiver) throws InvalidRequestException { AbstractType<?> type = unwrap(receiver.type); if (!(type instanceof SetType)) { // We've parsed empty maps as a set literal to break the ambiguity so // handle that case now if ((type instanceof MapType) && elements.isEmpty()) return; throw new InvalidRequestException(String.format("Invalid set literal for %s of type %s", receiver.name, receiver.type.asCQL3Type())); } ColumnSpecification valueSpec = Sets.valueSpecOf(receiver); for (Term.Raw rt : elements) { if (!rt.testAssignment(keyspace, valueSpec).isAssignable()) throw new InvalidRequestException(String.format("Invalid set literal for %s: value %s is not of type %s", receiver.name, rt, valueSpec.type.asCQL3Type())); } } public AssignmentTestable.TestResult testAssignment(String keyspace, ColumnSpecification receiver) { return testSetAssignment(receiver, elements); } @Override public AbstractType<?> getExactTypeIfKnown(String keyspace) { return getExactSetTypeIfKnown(elements, p -> p.getExactTypeIfKnown(keyspace)); } public String getText() { return setToString(elements, Term.Raw::getText); } } public static class Value extends Term.Terminal { public final SortedSet<ByteBuffer> elements; public Value(SortedSet<ByteBuffer> elements) { this.elements = elements; } public static Value fromSerialized(ByteBuffer value, SetType type, ProtocolVersion version) throws InvalidRequestException { try { // Collections have this small hack that validate cannot be called on a serialized object, // but compose does the validation (so we're fine). Set<?> s = type.getSerializer().deserializeForNativeProtocol(value, ByteBufferAccessor.instance, version); SortedSet<ByteBuffer> elements = new TreeSet<>(type.getElementsType()); for (Object element : s) elements.add(type.getElementsType().decompose(element)); return new Value(elements); } catch (MarshalException e) { throw new InvalidRequestException(e.getMessage()); } } public ByteBuffer get(ProtocolVersion protocolVersion) { return CollectionSerializer.pack(elements, elements.size(), protocolVersion); } public boolean equals(SetType st, Value v) { if (elements.size() != v.elements.size()) return false; Iterator<ByteBuffer> thisIter = elements.iterator(); Iterator<ByteBuffer> thatIter = v.elements.iterator(); AbstractType elementsType = st.getElementsType(); while (thisIter.hasNext()) if (elementsType.compare(thisIter.next(), thatIter.next()) != 0) return false; return true; } } // See Lists.DelayedValue public static class DelayedValue extends Term.NonTerminal { private final Comparator<ByteBuffer> comparator; private final Set<Term> elements; public DelayedValue(Comparator<ByteBuffer> comparator, Set<Term> elements) { this.comparator = comparator; this.elements = elements; } public boolean containsBindMarker() { // False since we don't support them in collection return false; } public void collectMarkerSpecification(VariableSpecifications boundNames) { } public Terminal bind(QueryOptions options) throws InvalidRequestException { SortedSet<ByteBuffer> buffers = new TreeSet<>(comparator); for (Term t : elements) { ByteBuffer bytes = t.bindAndGet(options); if (bytes == null) throw new InvalidRequestException("null is not supported inside collections"); if (bytes == ByteBufferUtil.UNSET_BYTE_BUFFER) return UNSET_VALUE; buffers.add(bytes); } return new Value(buffers); } public void addFunctionsTo(List<Function> functions) { Terms.addFunctions(elements, functions); } } public static class Marker extends AbstractMarker { protected Marker(int bindIndex, ColumnSpecification receiver) { super(bindIndex, receiver); assert receiver.type instanceof SetType; } public Terminal bind(QueryOptions options) throws InvalidRequestException { ByteBuffer value = options.getValues().get(bindIndex); if (value == null) return null; if (value == ByteBufferUtil.UNSET_BYTE_BUFFER) return UNSET_VALUE; return Value.fromSerialized(value, (SetType)receiver.type, options.getProtocolVersion()); } } public static class Setter extends Operation { public Setter(ColumnMetadata column, Term t) { super(column, t); } public void execute(DecoratedKey partitionKey, UpdateParameters params) throws InvalidRequestException { Term.Terminal value = t.bind(params.options); if (value == UNSET_VALUE) return; // delete + add if (column.type.isMultiCell()) params.setComplexDeletionTimeForOverwrite(column); Adder.doAdd(value, column, params); } } public static class Adder extends Operation { public Adder(ColumnMetadata column, Term t) { super(column, t); } @Override public void execute(DecoratedKey partitionKey, UpdateParameters params) throws InvalidRequestException { assert column.type.isMultiCell() : "Attempted to add items to a frozen set"; Term.Terminal value = t.bind(params.options); if (value != UNSET_VALUE) doAdd(value, column, params); } static void doAdd(Term.Terminal value, ColumnMetadata column, UpdateParameters params) throws InvalidRequestException { if (value == null) { // for frozen sets, we're overwriting the whole cell if (!column.type.isMultiCell()) params.addTombstone(column); return; } Set<ByteBuffer> elements = ((Value) value).elements; if (column.type.isMultiCell()) { // Guardrails about collection size are only checked for the added elements without considering // already existent elements. This is done so to avoid read-before-write, having additional checks // during SSTable write. Guardrails.itemsPerCollection.guard(elements.size(), column.name.toString(), false, params.state); int dataSize = 0; for (ByteBuffer bb : elements) { if (bb == ByteBufferUtil.UNSET_BYTE_BUFFER) continue; Cell cell = params.addCell(column, CellPath.create(bb), ByteBufferUtil.EMPTY_BYTE_BUFFER); dataSize += cell.dataSize(); } Guardrails.collectionSize.guard(dataSize, column.name.toString(), false, params.state); } else { Guardrails.itemsPerCollection.guard(elements.size(), column.name.toString(), false, params.state); Cell cell = params.addCell(column, value.get(ProtocolVersion.CURRENT)); Guardrails.collectionSize.guard(cell.dataSize(), column.name.toString(), false, params.state); } } } // Note that this is reused for Map subtraction too (we subtract a set from a map) public static class Discarder extends Operation { public Discarder(ColumnMetadata column, Term t) { super(column, t); } @Override public void execute(DecoratedKey partitionKey, UpdateParameters params) throws InvalidRequestException { assert column.type.isMultiCell() : "Attempted to remove items from a frozen set"; Term.Terminal value = t.bind(params.options); if (value == null || value == UNSET_VALUE) return; // This can be either a set or a single element Set<ByteBuffer> toDiscard = value instanceof Sets.Value ? ((Sets.Value)value).elements : Collections.singleton(value.get(params.options.getProtocolVersion())); for (ByteBuffer bb : toDiscard) params.addTombstone(column, CellPath.create(bb)); } } public static class ElementDiscarder extends Operation { public ElementDiscarder(ColumnMetadata column, Term k) { super(column, k); } @Override public void execute(DecoratedKey partitionKey, UpdateParameters params) throws InvalidRequestException { assert column.type.isMultiCell() : "Attempted to delete a single element in a frozen set"; Term.Terminal elt = t.bind(params.options); if (elt == null) throw new InvalidRequestException("Invalid null set element"); params.addTombstone(column, CellPath.create(elt.get(params.options.getProtocolVersion()))); } } }
package me.sumwu.heartbeat; import com.loopj.android.http.AsyncHttpClient; import com.loopj.android.http.RequestParams; import com.loopj.android.http.ResponseHandlerInterface; import org.json.JSONException; import org.json.JSONObject; /** * Created by justin on 10/18/14. */ public class MisfitApi { private static final String BASE_URL = "https://api.misfitwearables.com"; private static AsyncHttpClient client = new AsyncHttpClient(); public static void get(java.lang.String url, RequestParams params, ResponseHandlerInterface responseHandler) { // url should be "/move/resource/v1/user/me/activity/sessions" client.get(getAbsoluteUrl(url), params, responseHandler); } public static void post(java.lang.String url, RequestParams params, ResponseHandlerInterface responseHandler) { client.post(getAbsoluteUrl(url), params, responseHandler); } private static String getAbsoluteUrl(String relativeUrl) { return BASE_URL + relativeUrl;} }
use std::{collections::HashMap, str::Split}; use crate::input; pub fn run() { let contents = input::get_lines("day05"); let part1 = h_v_overlaps(&contents); println!("part1: {:?}", part1); let part2 = h_v_d_overlaps(&contents); println!("part2: {:?}", part2); } type Coordinates = HashMap<(u32, u32), i32>; fn parse(lines: &[String]) -> Vec<((u32, u32), (u32, u32))> { lines .iter() .map(|line| { let mut coordinates = line.split(" -> "); let mut coordinate1 = coordinates.next().unwrap().split(','); let x1 = parse_coordinate(&mut coordinate1); let y1 = parse_coordinate(&mut coordinate1); let mut coordinate2 = coordinates.next().unwrap().split(','); let x2 = parse_coordinate(&mut coordinate2); let y2 = parse_coordinate(&mut coordinate2); ((x1, y1), (x2, y2)) }) .collect::<Vec<_>>() } fn parse_coordinate(coordinate: &mut Split<char>) -> u32 { coordinate.next().unwrap().parse::<u32>().unwrap() } fn h_v_overlaps(lines: &[String]) -> usize { let coordinates = parse(lines); let mut all_coordinates = HashMap::new(); coordinates.iter().for_each(|((x1, y1), (x2, y2))| { if x1 == x2 { add_h_lines(&mut all_coordinates, *x1, *y1, *y2) } if y1 == y2 { add_v_lines(&mut all_coordinates, *x1, *x2, *y1) } }); all_coordinates.into_iter().filter(|e| e.1 > 1).count() } fn h_v_d_overlaps(lines: &[String]) -> usize { let coordinates = parse(lines); let mut all_coordinates = HashMap::new(); coordinates.iter().for_each(|((x1, y1), (x2, y2))| { if x1 == x2 { add_h_lines(&mut all_coordinates, *x1, *y1, *y2) } else if y1 == y2 { add_v_lines(&mut all_coordinates, *x1, *x2, *y1) } else { add_d_lines(&mut all_coordinates, *x1, *x2, *y1, *y2) } }); all_coordinates.into_iter().filter(|e| e.1 > 1).count() } fn add_h_lines(coordinates: &mut Coordinates, x: u32, y1: u32, y2: u32) { let range = if y1 < y2 { y1..=y2 } else { y2..=y1 }; for y in range { add_coordinate(coordinates, (x, y)) } } fn add_v_lines(coordinates: &mut Coordinates, x1: u32, x2: u32, y: u32) { let range = if x1 < x2 { x1..=x2 } else { x2..=x1 }; for x in range { add_coordinate(coordinates, (x, y)) } } fn add_d_lines(coordinates: &mut Coordinates, x1: u32, x2: u32, y1: u32, y2: u32) { let xrange: Box<dyn Iterator<Item = _>> = if x1 < x2 { Box::new(x1..=x2) } else { Box::new((x2..=x1).rev()) }; let yrange: Box<dyn Iterator<Item = _>> = if y1 < y2 { Box::new(y1..=y2) } else { Box::new((y2..=y1).rev()) }; let range = xrange.zip(yrange); for (x, y) in range { add_coordinate(coordinates, (x, y)) } } fn add_coordinate(coordinates: &mut Coordinates, coordinate: (u32, u32)) { let entry = coordinates.entry(coordinate).or_insert(0); *entry += 1; } #[allow(dead_code)] fn print_grid(coordinates: &Coordinates, width: usize, height: usize) { for row in 0..height { for column in 0..width { if let Some(n) = coordinates.get(&(column as u32, row as u32)) { print!("{}", n); } else { print!(".") } } println!(); } } #[cfg(test)] mod tests { use super::*; const INPUT: [&str; 10] = [ "0,9 -> 5,9", "8,0 -> 0,8", "9,4 -> 3,4", "2,2 -> 2,1", "7,0 -> 7,4", "6,4 -> 2,0", "0,9 -> 2,9", "3,4 -> 1,4", "0,0 -> 8,8", "5,5 -> 8,2", ]; #[test] fn parsing() { let lines = INPUT.map(|s| s.to_string()); assert_eq!( parse(&lines), vec![ ((0, 9), (5, 9)), ((8, 0), (0, 8)), ((9, 4), (3, 4)), ((2, 2), (2, 1)), ((7, 0), (7, 4)), ((6, 4), (2, 0)), ((0, 9), (2, 9)), ((3, 4), (1, 4)), ((0, 0), (8, 8)), ((5, 5), (8, 2)), ] ) } #[test] fn example1() { let lines = INPUT.map(|s| s.to_string()); assert_eq!(h_v_overlaps(&lines), 5); } #[test] fn example2() { let lines = INPUT.map(|s| s.to_string()); assert_eq!(h_v_d_overlaps(&lines), 12); } }
<gh_stars>0 export default class Checksum { static calculate(input: string, part: number): number { var nextDigit = (i: number) => i % input.length, next = part === 1 ? (i: number) => nextDigit(i + 1) : (i: number) => nextDigit(i + input.length / 2); return [...input].reduce((acc, c, i, arr) => c == input[next(i)] ? acc + parseInt(c) : acc, 0); } }
def del_strings(df: pd.DataFrame, cols: List[str] = "Value") -> pd.DataFrame: drop_cl = list() for title in cols: if title in list(df): df[title] = pd.to_numeric(df[title], errors="coerce") drop_cl.append(title) return df.dropna(subset=drop_cl)
#! /usr/bin/env -S stack runhaskell import Data.List part1 :: [Int] -> Int part1 [] = 0 part1 [_] = 0 part1 (x:y:xs) = (if y > x then 1 else 0) + part1 (y:xs) part2 :: [Int] -> Int part2 = part1 . slide where slide [] = [] slide [_] = [] slide [_, _] = [] slide (x:y:z:xs) = (x + y + z) : slide (y:z:xs) main :: IO() main = do f <- readFile("1.in") let input = map read $ lines f putStr "Solution for part 1: " putStrLn . show $ part1 input putStr "Solution for part 2: " putStrLn . show $ part2 input -- Solution part 1: 1121 -- Solution part 2: 1065
<gh_stars>0 # -*- coding:utf-8 -*- import json import time from flask import current_app import api.lib.cmdb.ci from api.extensions import celery from api.extensions import db from api.extensions import es from api.extensions import rd from api.lib.cmdb.const import CMDB_QUEUE from api.lib.cmdb.const import REDIS_PREFIX_CI from api.lib.cmdb.const import REDIS_PREFIX_CI_RELATION from api.models.cmdb import CIRelation @celery.task(name="cmdb.ci_cache", queue=CMDB_QUEUE) def ci_cache(ci_id): time.sleep(0.01) db.session.close() m = api.lib.cmdb.ci.CIManager() ci = m.get_ci_by_id_from_db(ci_id, need_children=False, use_master=False) if current_app.config.get("USE_ES"): es.create_or_update(ci_id, ci) else: rd.create_or_update({ci_id: json.dumps(ci)}, REDIS_PREFIX_CI) current_app.logger.info("{0} flush..........".format(ci_id)) @celery.task(name="cmdb.ci_delete", queue=CMDB_QUEUE) def ci_delete(ci_id): current_app.logger.info(ci_id) if current_app.config.get("USE_ES"): es.delete(ci_id) else: rd.delete(ci_id, REDIS_PREFIX_CI) current_app.logger.info("{0} delete..........".format(ci_id)) @celery.task(name="cmdb.ci_relation_cache", queue=CMDB_QUEUE) def ci_relation_cache(parent_id, child_id): db.session.close() children = rd.get([parent_id], REDIS_PREFIX_CI_RELATION)[0] children = json.loads(children) if children is not None else {} cr = CIRelation.get_by(first_ci_id=parent_id, second_ci_id=child_id, first=True, to_dict=False) if str(child_id) not in children: children[str(child_id)] = cr.second_ci.type_id rd.create_or_update({parent_id: json.dumps(children)}, REDIS_PREFIX_CI_RELATION) current_app.logger.info("ADD ci relation cache: {0} -> {1}".format(parent_id, child_id)) @celery.task(name="cmdb.ci_relation_delete", queue=CMDB_QUEUE) def ci_relation_delete(parent_id, child_id): children = rd.get([parent_id], REDIS_PREFIX_CI_RELATION)[0] children = json.loads(children) if children is not None else {} if str(child_id) in children: children.pop(str(child_id)) rd.create_or_update({parent_id: json.dumps(children)}, REDIS_PREFIX_CI_RELATION) current_app.logger.info("DELETE ci relation cache: {0} -> {1}".format(parent_id, child_id))
MOSCOW, Aug. 29 -- Russian Prime Minister Vladimir Putin said Thursday that he had reason to think U.S. personnel were in the combat zone during the recent war in Georgia, adding that if confirmed, their presence suggested "someone in the United States" provoked the conflict to help one of the candidates in the American presidential race. Georgian President Mikheil Saakashvili called the claim "ridiculous," likening it to Putin saying that "extraterrestrials were also there." In Putin's first extended remarks defending Russia's military intervention in Georgia, which has drawn international condemnation, he blamed the Bush administration for failing to stop Georgian leaders from launching the Aug. 7 attack on the breakaway province of South Ossetia that sparked the war. Speaking on CNN, Putin argued that the U.S. policy of training and supplying weapons to the Georgian army had emboldened the country to abandon long-standing negotiations over the future of South Ossetia and to try instead to seize the region by force, an assault that resulted in the deaths of Russian soldiers stationed there as peacekeepers. Putin suggested that U.S. military advisers were working with Georgian forces that clashed with the Russian army, a prospect he described as "very dangerous." "Even during the Cold War, during the harsh confrontation between the Soviet Union and the United States, we always avoided direct clashes between our civilians, even more so between our military personnel," he said in the interview, portions of which were also broadcast on Russian national television. "Ordinary experts, even if they teach military affairs, should not do so in combat zones, but in training areas and training centers," he added. Putin said he based his assertions on information provided to him by the Russian military, but he offered no evidence and cautioned that his "suspicions" required further confirmation. Earlier in the day, a senior Russian military official said at a news briefing that Russian troops had recovered an American passport in the rubble of a village near the South Ossetian capital of Tskhinvali, where a Georgian special forces unit had been based during the war. "What was the purpose of that gentleman being among the special forces, and what is he doing today, I so far cannot answer," said Col. Gen. Anatoly Nogovitsyn, deputy chief of the general staff, holding up an enlarged color photocopy of the passport. He identified its owner as Michael Lee White, a resident of Houston, born in 1967, state-owned Vesti television reported. Saakashvili, in an interview Friday morning with The Washington Post, dismissed the passport report as "typical tricks." "I wish we had Americans and American weapons, but it's not the case," he said. "They are living in a parallel world, with a parallel perception. If you say a lie in Russia, it becomes the truth the next day on TV." In Washington, White House spokeswoman Dana Perino said Putin's allegations were "patently false" and sounded "not rational." She added: "It also sounds like his defense officials who said they believe this to be true are giving him really bad advice."
<gh_stars>100-1000 /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.olingo.fit.tecsvc.http; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.net.HttpURLConnection; import java.net.URL; import org.apache.commons.io.IOUtils; import org.apache.olingo.client.api.ODataClient; import org.apache.olingo.commons.api.format.ContentType; import org.apache.olingo.commons.api.http.HttpHeader; import org.apache.olingo.commons.api.http.HttpMethod; import org.apache.olingo.commons.api.http.HttpStatusCode; import org.apache.olingo.fit.AbstractBaseTestITCase; import org.apache.olingo.fit.tecsvc.TecSvcConst; import org.junit.Test; public class DerivedAndMixedTypeTestITCase extends AbstractBaseTestITCase { private static final String SERVICE_URI = TecSvcConst.BASE_URI + "/"; @Test public void queryESCompCollDerivedJson() throws Exception { URL url = new URL(SERVICE_URI + "ESCompCollDerived?$format=json"); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod(HttpMethod.GET.name()); connection.connect(); assertEquals(HttpStatusCode.OK.getStatusCode(), connection.getResponseCode()); assertEquals(ContentType.JSON, ContentType.create(connection.getHeaderField(HttpHeader.CONTENT_TYPE))); final String content = IOUtils.toString(connection.getInputStream()); assertTrue(content.contains( "[{\"PropertyInt16\":32767,\"PropertyCompAno\":null,\"CollPropertyCompAno\":[{\"PropertyString\":" + "\"TEST9876\"}]},{\"PropertyInt16\":12345,\"PropertyCompAno\":{\"@odata.type\":" + "\"#olingo.odata.test1.CTBaseAno\",\"PropertyString\":\"Num111\",\"AdditionalPropString\":" + "\"Test123\"},\"CollPropertyCompAno\":[{\"@odata.type\":\"#olingo.odata.test1.CTBaseAno\"," + "\"PropertyString\":\"TEST12345\",\"AdditionalPropString\":\"Additional12345\"}," + "{\"PropertyString\":\"TESTabcd\"}]}]}" )); } @Test public void queryESCompCollDerivedXml() throws Exception { URL url = new URL(SERVICE_URI + "ESCompCollDerived?$format=xml"); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod(HttpMethod.GET.name()); connection.connect(); assertEquals(HttpStatusCode.OK.getStatusCode(), connection.getResponseCode()); assertEquals(ContentType.APPLICATION_XML, ContentType.create(connection.getHeaderField(HttpHeader.CONTENT_TYPE))); final String content = IOUtils.toString(connection.getInputStream()); assertTrue(content.contains("<d:PropertyCompAno m:type=\"#olingo.odata.test1.CTBaseAno\">" + "<d:PropertyString>Num111</d:PropertyString>" + "<d:AdditionalPropString>Test123</d:AdditionalPropString>" + "</d:PropertyCompAno>" + "<d:CollPropertyCompAno m:type=\"#Collection(olingo.odata.test1.CTTwoPrimAno)\">" + "<m:element m:type=\"olingo.odata.test1.CTBaseAno\">" + "<d:PropertyString>TEST12345</d:PropertyString>" + "<d:AdditionalPropString>Additional12345</d:AdditionalPropString>" )); } @Test public void queryESAllPrimDerivedJson() throws Exception { URL url = new URL(SERVICE_URI + "ESAllPrimDerived(0)?$expand=NavPropertyETTwoPrimMany&$format=json"); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod(HttpMethod.GET.name()); connection.connect(); assertEquals(HttpStatusCode.OK.getStatusCode(), connection.getResponseCode()); final String content = IOUtils.toString(connection.getInputStream()); assertTrue(content.contains("\"@odata.type\":\"#olingo.odata.test1.ETBase\"," + "\"PropertyInt16\":32766," + "\"PropertyString\":\"Test String1\"," + "\"AdditionalPropertyString_5\":\"Additional String1\"" )); } @Test public void queryESAllPrimDerivedXml() throws Exception { URL url = new URL(SERVICE_URI + "ESAllPrimDerived(0)?$expand=NavPropertyETTwoPrimMany&$format=xml"); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod(HttpMethod.GET.name()); connection.connect(); assertEquals(HttpStatusCode.OK.getStatusCode(), connection.getResponseCode()); final String content = IOUtils.toString(connection.getInputStream()); assertTrue(content.contains("term=\"#olingo.odata.test1.ETBase\"/>")); assertTrue(content.contains( "<d:PropertyInt16 m:type=\"Int16\">32766</d:PropertyInt16>" + "<d:PropertyString>Test String1</d:PropertyString>" + "<d:AdditionalPropertyString_5>Additional String1</d:AdditionalPropertyString_5>")); } @Test public void queryESCompCollDerivedJsonNone() throws Exception { URL url = new URL(SERVICE_URI + "ESCompCollDerived"); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod(HttpMethod.GET.name()); connection.setRequestProperty(HttpHeader.ACCEPT, "application/json;odata.metadata=none"); connection.connect(); assertEquals(HttpStatusCode.OK.getStatusCode(), connection.getResponseCode()); assertEquals(ContentType.JSON_NO_METADATA, ContentType.create(connection.getHeaderField(HttpHeader.CONTENT_TYPE))); final String content = IOUtils.toString(connection.getInputStream()); assertTrue(content.contains( "[{\"PropertyInt16\":32767,\"PropertyCompAno\":null,\"CollPropertyCompAno\":[{\"PropertyString\":" + "\"TEST9876\"}]},{\"PropertyInt16\":12345,\"PropertyCompAno\":{"+ "\"PropertyString\":\"Num111\",\"AdditionalPropString\":" + "\"Test123\"},\"CollPropertyCompAno\":[{" + "\"PropertyString\":\"TEST12345\",\"AdditionalPropString\":\"Additional12345\"}," + "{\"PropertyString\":\"TESTabcd\"}]}]}" )); } @Test public void queryESCompCollDerivedJsonFull() throws Exception { URL url = new URL(SERVICE_URI + "ESCompCollDerived"); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setRequestProperty(HttpHeader.ACCEPT, "application/json;odata.metadata=full"); connection.setRequestMethod(HttpMethod.GET.name()); connection.connect(); assertEquals(HttpStatusCode.OK.getStatusCode(), connection.getResponseCode()); assertEquals(ContentType.JSON_FULL_METADATA, ContentType.create(connection.getHeaderField(HttpHeader.CONTENT_TYPE))); final String content = IOUtils.toString(connection.getInputStream()); assertTrue(content.contains("\"PropertyInt16\":32767,\"PropertyCompAno\":null," + "\"[email protected]\":\"#Collection(olingo.odata.test1.CTTwoPrimAno)\"," + "\"CollPropertyCompAno\":[{\"@odata.type\":" + "\"#olingo.odata.test1.CTTwoPrimAno\",\"PropertyString\":\"TEST9876\"}]}," + "{\"@odata.type\":\"#olingo.odata.test1.ETDeriveCollComp\",\"@odata.id\":\"ESCompCollDerived(12345)\"," + "\"[email protected]\":\"#Int16\",\"PropertyInt16\":12345,\"PropertyCompAno\":" + "{\"@odata.type\":\"#olingo.odata.test1.CTBaseAno\"," + "\"PropertyString\":\"Num111\",\"AdditionalPropString\":\"Test123\"}," + "\"[email protected]\":\"#Collection(olingo.odata.test1.CTTwoPrimAno)\",\"CollPropertyCompAno\":" + "[{\"@odata.type\":\"#olingo.odata.test1.CTBaseAno\"," + "\"PropertyString\":\"TEST12345\",\"AdditionalPropString\":\"Additional12345\"}," + "{\"@odata.type\":\"#olingo.odata.test1.CTTwoPrimAno\",\"PropertyString\":\"TESTabcd\"}]}]}" )); } @Override protected ODataClient getClient() { return null; } @Test public void queryESMixPrimWithLambdaDerived_JsonFull_Olingo1122() throws Exception { URL url = new URL(SERVICE_URI + "ESMixPrimCollComp?$filter=CollPropertyComp/any" + "(f:f/olingo.odata.test1.CTBase/AdditionalPropString%20eq%20%27ADD%20TEST%27)"); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod(HttpMethod.GET.name()); connection.setRequestProperty(HttpHeader.ACCEPT, "application/json;odata.metadata=full"); connection.connect(); assertEquals(HttpStatusCode.OK.getStatusCode(), connection.getResponseCode()); assertEquals(ContentType.JSON_FULL_METADATA, ContentType.create(connection.getHeaderField(HttpHeader.CONTENT_TYPE))); final String content = IOUtils.toString(connection.getInputStream()); final String actualContent = "\"value\":[{\"@odata.type\":\"#olingo.odata.test1.ETMixPrimCollComp\"," + "\"@odata.id\":\"ESMixPrimCollComp(32767)\"," + "\"<EMAIL>Int<EMAIL>\":\"#Int16\",\"PropertyInt16\":32767," + "\"<EMAIL>String<EMAIL>\":\"#Collection(String)\"," + "\"CollPropertyString\":[\"<EMAIL>\",\"<EMAIL>\"," + "\"<EMAIL>\"]," + "\"PropertyComp\":{\"@odata.type\":\"#olingo.odata.test1.CTTwoPrim\"," + "\"[email protected]\":\"#Int16\",\"PropertyInt16\":111," + "\"PropertyString\":\"TEST A\"," + "\"Nav<EMAIL>\":\"ESTwoKeyNav(PropertyInt16=1,PropertyString='1')\"}," + "\"CollPropertyComp@<EMAIL>\":\"#Collection(olingo.odata.test1.CTTwoPrim)\"," + "\"CollPropertyComp\":[{\"@odata.type\":\"#olingo.odata.test1.CTTwoPrim\"," + "\"PropertyInt16@<EMAIL>\":\"#Int16\",\"PropertyInt16\":123," + "\"PropertyString\":\"TEST 1\"," + "\"<EMAIL>\":\"ESTwoKeyNav(PropertyInt16=1,PropertyString='2')\"}," + "{\"@odata.type\":\"#olingo.odata.test1.CTTwoPrim\"," + "\"PropertyInt16@<EMAIL>\":\"#Int16\",\"PropertyInt16\":456," + "\"PropertyString\":\"TEST 2\"," + "\"<EMAIL>\":\"ESTwoKeyNav(PropertyInt16=1,PropertyString='2')\"}," + "{\"@odata.type\":\"#olingo.odata.test1.CTBase\",\"[email protected]\":\"#Int16\"," + "\"PropertyInt16\":789,\"PropertyString\":\"TEST 3\"," + "\"AdditionalPropString\":\"ADD TEST\"," + "\"<EMAIL>\":\"ESTwoKeyNav(PropertyInt16=1,PropertyString='2')\"}]," + "\"#olingo.odata.test1.BAETMixPrimCollCompRTCTTwoPrim\":" + "{\"title\":\"olingo.odata.test1.BAETMixPrimCollCompRTCTTwoPrim\"," + "\"target\":\"ESMixPrimCollComp(32767)/olingo.odata.test1.BAETMixPrimCollCompRTCTTwoPrim\"}}," + "{\"@odata.type\":\"#olingo.odata.test1.ETMixPrimCollComp\"," + "\"@odata.id\":\"ESMixPrimCollComp(7)\"," + "\"[email protected]\":\"#Int16\",\"PropertyInt16\":7," + "\"CollPropertyString@<EMAIL>\":\"#Collection(String)\"," + "\"CollPropertyString\":[\"<EMAIL>\",\"<EMAIL>\"," + "\"<EMAIL>\"]," + "\"PropertyComp\":{\"@odata.type\":\"#olingo.odata.test1.CTTwoPrim\"," + "\"<EMAIL>\":\"#Int16\"," + "\"PropertyInt16\":222,\"PropertyString\":\"TEST B\"}," + "\"CollPropertyComp@<EMAIL>\":\"#Collection(olingo.odata.test1.CTTwoPrim)\"," + "\"CollPropertyComp\":[{\"@odata.type\":\"#olingo.odata.test1.CTTwoPrim\"," + "\"PropertyInt16<EMAIL>\":\"#Int16\",\"PropertyInt16\":123," + "\"PropertyString\":\"TEST 1\"},{\"@odata.type\":\"#olingo.odata.test1.CTTwoPrim\"," + "\"PropertyInt16@<EMAIL>\":\"#Int16\",\"PropertyInt16\":456,\"PropertyString\":\"TEST 2\"}," + "{\"@odata.type\":\"#olingo.odata.test1.CTBase\",\"PropertyInt16@<EMAIL>\":\"#Int16\"," + "\"PropertyInt16\":789,\"PropertyString\":\"TEST 3\",\"AdditionalPropString\":\"ADD TEST\"}]," + "\"#olingo.odata.test1.BAETMixPrimCollCompRTCTTwoPrim\":" + "{\"title\":\"olingo.odata.test1.BAETMixPrimCollCompRTCTTwoPrim\"," + "\"target\":\"ESMixPrimCollComp(7)/olingo.odata.test1.BAETMixPrimCollCompRTCTTwoPrim\"}}," + "{\"@odata.type\":\"#olingo.odata.test1.ETMixPrimCollComp\",\"@odata.id\":\"ESMixPrimCollComp(0)\"," + "\"PropertyInt<EMAIL>\":\"#Int16\",\"PropertyInt16\":0," + "\"CollPropertyString@<EMAIL>\":\"#Collection(String)\"," + "\"CollPropertyString\":[\"<EMAIL>\",\"<EMAIL>\"," + "\"<EMAIL>\"]," + "\"PropertyComp\":{\"@odata.type\":\"#olingo.odata.test1.CTTwoPrim\"," + "\"PropertyInt16@<EMAIL>\":\"#Int16\",\"PropertyInt16\":333,\"PropertyString\":\"TEST C\"}," + "\"CollPropertyComp@odata.<EMAIL>\":\"#Collection(olingo.odata.test1.CTTwoPrim)\"," + "\"CollPropertyComp\":[{\"@odata.type\":\"#olingo.odata.test1.CTTwoPrim\"," + "\"PropertyInt16@<EMAIL>\":\"#Int16\",\"PropertyInt16\":123," + "\"PropertyString\":\"TEST 1\"},{\"@odata.type\":\"#olingo.odata.test1.CTTwoPrim\"," + "\"PropertyInt16@<EMAIL>\":\"#Int16\",\"PropertyInt16\":456,\"PropertyString\":\"TEST 2\"}," + "{\"@odata.type\":\"#olingo.odata.test1.CTBase\",\"PropertyInt16@<EMAIL>\":\"#Int16\"," + "\"PropertyInt16\":789,\"PropertyString\":\"TEST 3\",\"AdditionalPropString\":\"ADD TEST\"}]," + "\"#olingo.odata.test1.BAETMixPrimCollCompRTCTTwoPrim\":" + "{\"title\":\"olingo.odata.test1.BAETMixPrimCollCompRTCTTwoPrim\"," + "\"target\":\"ESMixPrimCollComp(0)/olingo.odata.test1.BAETMixPrimCollCompRTCTTwoPrim\"}}]"; assertTrue(content.contains(actualContent)); } @Test public void queryESMixPrimWithLambdaDerived_JsonMinimal_Olingo1122() throws Exception { URL url = new URL(SERVICE_URI + "ESMixPrimCollComp?$filter=CollPropertyComp/any" + "(f:f/olingo.odata.test1.CTBase/AdditionalPropString%20eq%20%27ADD%20TEST%27)"); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod(HttpMethod.GET.name()); connection.setRequestProperty(HttpHeader.ACCEPT, "application/json;odata.metadata=minimal"); connection.connect(); assertEquals(HttpStatusCode.OK.getStatusCode(), connection.getResponseCode()); assertEquals(ContentType.JSON, ContentType.create(connection.getHeaderField(HttpHeader.CONTENT_TYPE))); final String content = IOUtils.toString(connection.getInputStream()); assertTrue(content.contains("\"value\":[{\"PropertyInt16\":32767," + "\"CollPropertyString\":[\"<EMAIL>\"," + "\"<EMAIL>\"," + "\"<EMAIL>\"]," + "\"PropertyComp\":{\"PropertyInt16\":111,\"PropertyString\":\"TEST A\"}," + "\"CollPropertyComp\":[" + "{\"PropertyInt16\":123,\"PropertyString\":\"TEST 1\"}," + "{\"PropertyInt16\":456,\"PropertyString\":\"TEST 2\"}," + "{\"@odata.type\":\"#olingo.odata.test1.CTBase\"," + "\"PropertyInt16\":789,\"PropertyString\":\"TEST 3\"," + "\"AdditionalPropString\":\"ADD TEST\"}]}," + "{\"PropertyInt16\":7,\"CollPropertyString\":" + "[\"[email protected]\"," + "\"Employee2@<EMAIL>.example\"," + "\"<EMAIL>\"]," + "\"PropertyComp\":{\"PropertyInt16\":222,\"PropertyString\":\"TEST B\"}," + "\"CollPropertyComp\":[{\"PropertyInt16\":123,\"PropertyString\":\"TEST 1\"}," + "{\"PropertyInt16\":456,\"PropertyString\":\"TEST 2\"}," + "{\"@odata.type\":\"#olingo.odata.test1.CTBase\"," + "\"PropertyInt16\":789,\"PropertyString\":\"TEST 3\"," + "\"AdditionalPropString\":\"ADD TEST\"}]}," + "{\"PropertyInt16\":0,\"CollPropertyString\":[" + "\"<EMAIL>1@<EMAIL>\"," + "\"<EMAIL>\"," + "\"<EMAIL>\"]," + "\"PropertyComp\":{\"PropertyInt16\":333,\"PropertyString\":\"TEST C\"}," + "\"CollPropertyComp\":[" + "{\"PropertyInt16\":123,\"PropertyString\":\"TEST 1\"}," + "{\"PropertyInt16\":456,\"PropertyString\":\"TEST 2\"}," + "{\"@odata.type\":\"#olingo.odata.test1.CTBase\"," + "\"PropertyInt16\":789,\"PropertyString\":\"TEST 3\"," + "\"AdditionalPropString\":\"ADD TEST\"}]}]")); } @Test public void queryESTwoPrimWithEntityTypeCast() throws Exception { URL url = new URL(SERVICE_URI + "ESTwoPrim(111)/olingo.odata.test1.ETBase"); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod(HttpMethod.GET.name()); connection.setRequestProperty(HttpHeader.ACCEPT, "application/json;odata.metadata=full"); connection.connect(); assertEquals(HttpStatusCode.OK.getStatusCode(), connection.getResponseCode()); assertEquals(ContentType.JSON_FULL_METADATA, ContentType.create(connection.getHeaderField(HttpHeader.CONTENT_TYPE))); final String content = IOUtils.toString(connection.getInputStream()); assertTrue(content.contains("\"@odata.type\":\"#olingo.odata.test1.ETBase\"," + "\"@odata.id\":\"ESBase(111)\"," + "\"[email protected]\":\"#Int16\"," + "\"PropertyInt16\":111," + "\"PropertyString\":\"TEST A\"," + "\"AdditionalPropertyString_5\":\"TEST A 0815\"")); } @Test public void queryESTwoPrimWithEntityTypeCastInFilter() throws Exception { URL url = new URL(SERVICE_URI + "ESTwoPrim?$filter=olingo.odata.test1.ETBase/" + "AdditionalPropertyString_5%20eq%20%27TEST%20A%200815%27"); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod(HttpMethod.GET.name()); connection.setRequestProperty(HttpHeader.ACCEPT, "application/json;odata.metadata=full"); connection.connect(); assertEquals(HttpStatusCode.OK.getStatusCode(), connection.getResponseCode()); assertEquals(ContentType.JSON_FULL_METADATA, ContentType.create(connection.getHeaderField(HttpHeader.CONTENT_TYPE))); final String content = IOUtils.toString(connection.getInputStream()); assertTrue(content.contains("\"value\":[{\"@odata.type\":\"#olingo.odata.test1.ETBase\"," + "\"@odata.id\":\"ESBase(111)\"," + "\"[email protected]\":\"#Int16\"," + "\"PropertyInt16\":111," + "\"PropertyString\":\"TEST A\"," + "\"AdditionalPropertyString_5\":\"TEST A 0815\"" + ",\"#olingo.odata.test1.BAETBaseETTwoBaseRTETTwoBase\":" + "{\"title\":\"olingo.odata.test1.BAETBaseETTwoBaseRTETTwoBase\"," + "\"target\":\"ESBase(111)/olingo.odata.test1.BAETBaseETTwoBaseRTETTwoBase\"}}]")); } @Test public void queryESAllPrimWithEntityTypeCastInExpand() throws Exception { URL url = new URL(SERVICE_URI + "ESAllPrim(0)?$expand=NavPropertyETTwoPrimOne/olingo.odata.test1.ETBase"); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setRequestMethod(HttpMethod.GET.name()); connection.setRequestProperty(HttpHeader.ACCEPT, "application/json;odata.metadata=minimal"); connection.connect(); assertEquals(HttpStatusCode.OK.getStatusCode(), connection.getResponseCode()); assertEquals(ContentType.JSON, ContentType.create(connection.getHeaderField(HttpHeader.CONTENT_TYPE))); final String content = IOUtils.toString(connection.getInputStream()); assertTrue(content.contains("\"PropertyInt16\":0," + "\"PropertyString\":\"\"," + "\"PropertyBoolean\":false," + "\"PropertyByte\":0," + "\"PropertySByte\":0," + "\"PropertyInt32\":0," + "\"PropertyInt64\":0," + "\"PropertySingle\":0.0," + "\"PropertyDouble\":0.0," + "\"PropertyDecimal\":0," + "\"PropertyBinary\":\"\"," + "\"PropertyDate\":\"1970-01-01\"," + "\"PropertyDateTimeOffset\":\"2005-12-03T00:00:00Z\"," + "\"PropertyDuration\":\"PT0S\"," + "\"PropertyGuid\":\"76543201-23ab-cdef-0123-456789cccddd\"," + "\"PropertyTimeOfDay\":\"00:01:01\",\"NavPropertyETTwoPrimOne\":{" + "\"@odata.type\":\"#olingo.odata.test1.ETBase\"," + "\"PropertyInt16\":111,\"PropertyString\":\"TEST A\"," + "\"AdditionalPropertyString_5\":\"TEST A 0815\"}")); } }
<filename>src/goStatus.ts import * as vscode from "vscode"; export const outputChannel = vscode.window.createOutputChannel("Go Coverage");
/** * if either table lacks a name, do not compare them */ @Override public boolean different() { DatabaseTable table1 = (DatabaseTable) this.getObject1(); DatabaseTable table2 = (DatabaseTable) this.getObject2(); if ((table1.getName().length() == 0) || (table2.getName().length() == 0)) { return false; } return super.different(); }
/** * Create a SyncMapPermissionFetcher to execute fetch. * * @param pathServiceSid The SID of the Sync Service with the Sync Map * Permission resource to fetch * @param pathMapSid The SID of the Sync Map with the Sync Map Permission * resource to fetch * @param pathIdentity The application-defined string that uniquely identifies * the User's Sync Map Permission resource to fetch * @return SyncMapPermissionFetcher capable of executing the fetch */ public static SyncMapPermissionFetcher fetcher(final String pathServiceSid, final String pathMapSid, final String pathIdentity) { return new SyncMapPermissionFetcher(pathServiceSid, pathMapSid, pathIdentity); }
<gh_stars>1-10 /// <reference path="../typescript/boilerplate.d.ts" /> declare namespace Magicspace { interface BoilerplateOptions { powerApp: BoilerplateOptions.PowerAppOptions; } namespace BoilerplateOptions { interface PowerAppOptions { port: number; images?: string[]; /** * 会根据 pages 生成 client 的页面模板 */ pages?: string[]; } } }
#include<stdio.h> int main() { int n, m, i, j; int s[101][3]; scanf("%d%d", &n, &m); for(i=0; i<n; i++) { for(j=0; j<2; j++) { scanf("%d", &s[i][j]); } } int found=0, t1, t2, sum; if((s[0][0]*60+s[0][1]) >= (m+1) ) { sum = 0; goto tr; } for(i=0; i<n-1; i++) { t1 = s[i][0]*60+s[i][1]; t2 = s[i+1][0]*60+s[i+1][1]; if((t2-t1)>=(2*m+2)) { sum = t1+m+1; found=1; break; } } if (found == 0) { sum = s[n-1][0]*60+s[n-1][1]+m+1; } tr: printf("%d %d", sum/60, sum%60); return 0; }
/* Dump a buffer which includes all operations, qbits states and a print buffer to a json format file. */ func (q *QBitsCircuit) FileDumpAll(path string) { s := q.DumpAll() file, err := os.Create(path) if err != nil { log.Fatal(err) } defer file.Close() file.Write(([]byte)(s)) }
// Add used to add an EnvVar to the EnvVarBuilder. func (b *EnvVarBuilder) Add(iEnvVar *corev1.EnvVar) *EnvVarBuilder { found := false for id, envVar := range b.envVars { if envVar.Name == iEnvVar.Name { found = true if b.options.AllowOverride { b.envVars[id] = *iEnvVar } } } if !found { b.envVars = append(b.envVars, *iEnvVar) } return b }
package edu.uc.action; import java.util.List; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import com.liuvei.common.PagerItem; import com.liuvei.common.SysFun; import edu.uc.bean.Manager; import edu.uc.service.AdminService; import edu.uc.util.MD5util; @Component("ManagerAction") @Scope("prototype") public class ManagerAction extends CrudAction{ /** * */ private static final long serialVersionUID = -1011748844560817334L; @Autowired private AdminService adminService; private String userName; private String userId; private String id; //判断是修改密码还是重置密码 private String status; private String newPass; private String checkPass; public String getNewPass() { return newPass; } public void setNewPass(String newPass) { this.newPass = newPass; } public String getCheckPass() { return checkPass; } public void setCheckPass(String checkPass) { this.checkPass = checkPass; } public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public String getId() { return id; } public void setId(String id) { this.id = id; } public String getUserId() { return userId; } public void setUserId(String userId) { this.userId = userId; } public String getUserName() { return userName; } public void setUserName(String userName) { this.userName = userName; } @Override public String list() { List<Manager> vDataList = null; //分页步骤,创建分页对象,老师已经包装 PagerItem pagerItem = new PagerItem(); pagerItem.parsePageSize(pageSize); pagerItem.parsePageNum(pageNum); //分页步骤 1定义总量 初始化为0 Long rowCount=0L; //2.查询记录总数 rowCount = adminService.count(); //3.将记录赋给pagerItem,以便进行分页的各项计算 pagerItem.changeRowCount(rowCount); //4.根据指定分页数 获取数据 vDataList = adminService.pager(pagerItem.getPageNum(), pagerItem.getPageSize()); //5.设置跳转页面 pagerItem.changeUrl(SysFun.generalUrl(requestURI,queryString)); request.put("pagerItem", pagerItem); request.put("DataList", vDataList); return "list"; } @Override public String listDeal() { request.put("userName", userName); List<Manager> vDataList =null; //创建分页对象 PagerItem pagerItem = new PagerItem(); pagerItem.parsePageNum(pageNum); pagerItem.parsePageSize(pageSize); //定义记录变量数 Long rowCount = 0L; if(!SysFun.isNullOrEmpty(userName)) { rowCount = adminService.countByName(userName); pagerItem.changeRowCount(rowCount); vDataList = adminService.pagerByName(userName, pagerItem.getPageNum(), pagerItem.getPageSize()); } else { rowCount = adminService.count(); pagerItem.changeRowCount(rowCount); vDataList = adminService.pager(pagerItem.getPageNum(), pagerItem.getPageSize()); } pagerItem.changeUrl(SysFun.generalUrl(requestURI,queryString)); request.put("pagerItem", pagerItem); request.put("DataList", vDataList); return "list"; } @Override public String insert() { return "insert"; } @Override public String insertDeal() { request.put("userName",userName); request.put("userId",userId); //System.out.println(selectedCategoryName); String vMsg=""; if(SysFun.isNullOrEmpty(userId)) { vMsg+="管理员账号不能为空"; } if(SysFun.isNullOrEmpty(userName)) { vMsg+="管理员姓名不能为空"; } if(!SysFun.isNullOrEmpty(vMsg)) { request.put("msg", vMsg); System.out.println(vMsg); return "insert"; } Manager bean = new Manager(); bean.setUserId(userId); bean.setUserName(userName); bean.setPriority(2); bean.setUserPass(MD5util.getMD5BySalt(userId, "12<PASSWORD>6")); Long result = 0l; try { result = adminService.insert(bean); } catch(Exception e) { vMsg+="添加失败,原因"+e.getMessage(); } if(result>0) { return "go_preload"; } else { request.put("msg", vMsg); return "insert"; } } @Override public String delete() { return "delete"; } @Override public String deleteDeal() { if(!SysFun.isNullOrEmpty(id)) { Long vId = SysFun.parseLong(id); Long result = adminService.delete(vId); if(result>0) { return "go_ok"; } } return "go_no"; } @Override public String update() { request.put("id", id); request.put("status",status); Long vId = SysFun.parseLong(id); if(status.equals("0")) { Manager bean = adminService.load(vId); bean.setUserPass(MD5util.getMD5BySalt(bean.getUserId(), "<PASSWORD>6")); adminService.update(bean); return "go_reload"; } else { return "update"; } } @Override public String updateDeal() { request.put("id", id); request.put("newPass",newPass); request.put("checkPass", checkPass); String vMsg=""; if(SysFun.isNullOrEmpty(id)) { vMsg+="用户主键不能为空"; } if(SysFun.isNullOrEmpty(newPass)) { vMsg+="新密码不能为空"; } if(SysFun.isNullOrEmpty(checkPass)) { vMsg+="确认密码不能为空"; } if(!newPass.equals(checkPass)) { vMsg+="两次输入不一致"; } if(!SysFun.isNullOrEmpty(vMsg)) { request.put("msg", vMsg); System.out.println(vMsg); return "update"; } Long vId = SysFun.parseLong(id); Manager bean = adminService.load(vId); if(bean==null) { vMsg+="记录不存在"; } if(!SysFun.isNullOrEmpty(vMsg)) { request.put("msg", vMsg); System.out.println(vMsg); return "update"; } bean.setUserPass(MD5util.getMD5BySalt(bean.getUserId(), newPass)); Long result = 0l; try { result = adminService.update(bean); } catch(Exception e) { vMsg+="修改失败,原因"+e.getMessage(); } if(result>0) { return "go_preload"; } else { request.put("msg", vMsg); return "update"; } } @Override public String detail() { // TODO Auto-generated method stub return null; } @Override public String detailDeal() { // TODO Auto-generated method stub return null; } }
<reponame>pgollucci/p6-casino-craps<gh_stars>0 import { BetStateType } from '../../src/bet'; import { PassLineBet } from '../../src/bet/line'; import * as roll from '../../src/dice/roll'; // XXX: Missing odds, oddsPayout in constructor setting test('PassLineBet create', () => { // GIVEN const passLineBet = new PassLineBet(); // WHEN // THEN expect(passLineBet.bet).toBe(0); expect(passLineBet.odds).toBe(0); expect(passLineBet.betState).toBe(BetStateType.ON); }); test('PassLineBet create w/ Off state', () => { // GIVEN const passLineBet = new PassLineBet(); // WHEN // WHEN passLineBet.make({ bet: 10, betState: BetStateType.OFF }); // THEN expect(passLineBet.bet).toBe(10); expect(passLineBet.odds).toBe(0); expect(passLineBet.betState).toBe(BetStateType.OFF); }); test('PassLineBet make', () => { // GIVEN const passLineBet = new PassLineBet(); // WHEN passLineBet.make(); // THEN expect(passLineBet.bet).toBe(0); expect(passLineBet.odds).toBe(0); }); test('PassLineBet make w/ bet', () => { // GIVEN const passLineBet = new PassLineBet(); // WHEN passLineBet.make({ bet: 10 }); // THEN expect(passLineBet.bet).toBe(10); expect(passLineBet.odds).toBe(0); expect(passLineBet.betState).toBe(BetStateType.ON); }); test('PassLineBet w/ bet', () => { // GIVEN const passLineBet = new PassLineBet({ bet: 10 }); // WHEN // THEN expect(passLineBet.bet).toBe(10); expect(passLineBet.odds).toBe(0); }); test('PassLineBet make w/ bet', () => { // GIVEN const passLineBet = new PassLineBet({ bet: 10 }); // WHEN passLineBet.make({ bet: 20 }); // overwrite // THEN expect(passLineBet.bet).toBe(20); }); test('PassLineBet make w/ bet and odds', () => { // GIVEN const passLineBet = new PassLineBet({ bet: 10, odds: 30 }); // WHEN passLineBet.make({ bet: 40 }); // overwrite passLineBet.addOdds({ odds: 120 }); // overwrite // THEN expect(passLineBet.bet).toBe(40); expect(passLineBet.odds).toBe(120); }); test('PassLineBet setPoint()', () => { // GIVEN const passLineBet = new PassLineBet({ bet: 10, odds: 30 }); // WHEN passLineBet.make({ bet: 40 }); // overwrite passLineBet.setPoint(roll.R4_22); passLineBet.addOdds({ odds: 120 }); // overwrite // THEN expect(passLineBet.bet).toBe(40); expect(passLineBet.point).toBe(roll.R4_22); expect(passLineBet.odds).toBe(120); }); test('PassLineBet isWin()', () => { // GIVEN const passLineBet = new PassLineBet({ bet: 10, odds: 30 }); // WHEN passLineBet.make({ bet: 40 }); // overwrite passLineBet.addOdds({ odds: 120 }); // overwrite // THEN expect(passLineBet.bet).toBe(40); expect(passLineBet.isWin(roll.R10_55)).toBeFalsy(); // no point no win expect(passLineBet.isWin(roll.R7_34)).toBeTruthy(); // no point 7/11 = win expect(passLineBet.isWin(roll.R11_56)).toBeTruthy(); // no point 7/11 = win expect(passLineBet.odds).toBe(120); // WHEN passLineBet.setPoint(roll.R4_22); // THEN (4) expect(passLineBet.point).toBe(roll.R4_22); expect(passLineBet.isWin(roll.R4_22)).toBeTruthy(); // 5 passLineBet.setPoint(roll.R5_23); expect(passLineBet.isWin(roll.R5_23)).toBeTruthy(); // 6 passLineBet.setPoint(roll.R6_24); expect(passLineBet.isWin(roll.R6_24)).toBeTruthy(); // any when 7 is not a win passLineBet.setPoint(roll.R6_24); expect(passLineBet.isWin(roll.R7_34)).toBeFalsy(); }); test('PassLineBet isLoss()', () => { // GIVEN const passLineBet = new PassLineBet({ bet: 10, odds: 30 }); // WHEN passLineBet.make({ bet: 40 }); // overwrite passLineBet.addOdds({ odds: 120 }); // overwrite // THEN expect(passLineBet.bet).toBe(40); expect(passLineBet.isLoss(roll.R10_55)).toBeFalsy(); // no point no Loss expect(passLineBet.isLoss(roll.R7_34)).toBeFalsy(); // no point 7 is a win expect(passLineBet.isLoss(roll.R11_56)).toBeFalsy(); // no point 11 is a win expect(passLineBet.odds).toBe(120); // WHEN passLineBet.setPoint(roll.R4_22); // THEN (4) expect(passLineBet.point).toBe(roll.R4_22); expect(passLineBet.isLoss(roll.R4_22)).toBeFalsy(); // 5 passLineBet.setPoint(roll.R5_23); expect(passLineBet.isLoss(roll.R5_23)).toBeFalsy(); // 6 passLineBet.setPoint(roll.R6_24); expect(passLineBet.isLoss(roll.R6_24)).toBeFalsy(); // any when 7 is a loss passLineBet.setPoint(roll.R6_24); expect(passLineBet.isLoss(roll.R7_34)).toBeTruthy(); });
import flask import json import argparse import base64 import sqlite3 import sys TEST_DB = 'example_app_test_db.db' TEST_DB_SCHEMA = '''\ create table if not exists events ( _id integer primary key autoincrement, date_created datetime default current_timestamp, token varchar, data blob );''' INSERT_EVENT_QUERY = 'insert into events (token, data) ' \ 'values (\'{token}\', \'{data}\');' GET_EVENTS_QUERY = 'select _id, date_created, token, data from events;' GET_EVENTS_BY_TOKEN_QUERY_TPL = 'select _id, date_created, token, data ' \ 'from events where token=\'{token}\'' COMMIT = 'commit;' COUNT_EVENTS_QUERY = 'select count(1) from events;' COUNT_EVENTS_BY_TOKEN_QUERY_TPL = 'select count(1) from events ' \ 'where token=\'{token}\'' DELETE_EVENTS_QUERY = 'delete from events;' DELETE_EVENTS_BY_TOKEN_QUERY_TPL = 'delete from events where token=\'{token}\'' app = flask.Flask('alooma-iossdk-test-server') @app.route('/kill', methods=['POST']) def kill_app(): func = flask.request.environ.get('werkzeug.server.shutdown') if func is None: raise RuntimeError('Not running with the Werkzeug Server') func() return 'Shutting down...\n' @app.route('/track/', methods=['POST']) def track_event(): decoded_data = base64.decodebytes(flask.request.form['data'].encode()) received_events = json.loads(decoded_data) if len(received_events) > 0: cursor = get_db().cursor() for idx, e in enumerate(received_events): data = json.dumps(e) token = e['properties']['token'] event_type = e.get('event', '<<nil>>') app.logger.info('event idx=%d type=%s token=%s', idx, event_type, token) cursor.execute(INSERT_EVENT_QUERY.format(token=token, data=data)) cursor.execute(COMMIT) return "0", 200 @app.route('/events/', methods=['GET', 'DELETE']) def events(): if flask.request.method == 'GET': return get_events() elif flask.request.method == 'DELETE': return delete_events() @app.route('/events/<token>', methods=['GET', 'DELETE']) def events_by_token(token): if flask.request.method == 'GET': return get_events(token) elif flask.request.method == 'DELETE': return delete_events(token) def get_events(token=None): cursor = get_db().cursor() if token: query = GET_EVENTS_BY_TOKEN_QUERY_TPL.format(token=token) else: query = GET_EVENTS_QUERY all_events = cursor.execute(query).fetchall() app.logger.info('GET EVENTS: %s', all_events) result = { 'events': [ { '_id': r[0], 'timestamp': r[1], 'token': r[2], 'data': json.loads(r[3]) } for r in all_events ] } return flask.jsonify(result) def delete_events(token=None): cursor = get_db().cursor() if token: count_query = COUNT_EVENTS_BY_TOKEN_QUERY_TPL.format(token=token) delete_query = DELETE_EVENTS_BY_TOKEN_QUERY_TPL.format(token=token) else: count_query = COUNT_EVENTS_QUERY delete_query = DELETE_EVENTS_QUERY num_events = int(cursor.execute(count_query).fetchall()[0][0]) cursor.execute(delete_query) cursor.execute(COMMIT) return flask.jsonify({ 'success': True, 'token': token, 'num_deleted_events': num_events }) def get_db(): db = getattr(flask.g, '_database', None) if db is None: db = flask.g._database = sqlite3.connect(TEST_DB) return db @app.teardown_appcontext def close_connection(exception): db = getattr(flask.g, '_database', None) if db is not None: db.close() def init_db(): with app.app_context(): db = get_db() db.cursor().executescript(TEST_DB_SCHEMA) db.commit() if __name__ == '__main__': parser = argparse.ArgumentParser('simple iossdk http server') parser.add_argument('--host', '-d', default='0.0.0.0') parser.add_argument('--port', '-p', default='8000') parser.add_argument('--debug', action='store_true') args = parser.parse_args() init_db() app.run(host=args.host, port=args.port, debug=args.debug)
/** * This method provides object parsing from String and validation. */ private <T extends ObjectType> PrismObject<T> updateLoadedObject(GetObjectResult result, Class<T> type, String oid, Collection<SelectorOptions<GetOperationOptions>> options, Holder<PrismObject<T>> partialValueHolder, Session session, OperationResult operationResult) throws SchemaException { byte[] fullObject = result.getFullObject(); String xml = RUtil.getXmlFromByteArray(fullObject, getConfiguration().isUseZip()); PrismObject<T> prismObject; try { ParsingContext parsingContext = prismContext.createParsingContextForCompatibilityMode(); prismObject = prismContext.parserFor(xml).language(SqlRepositoryServiceImpl.DATA_LANGUAGE).context(parsingContext).parse(); if (parsingContext.hasWarnings()) { LOGGER.warn("Object {} parsed with {} warnings", ObjectTypeUtil.toShortString(prismObject), parsingContext.getWarnings().size()); } } catch (SchemaException | RuntimeException | Error e) { LOGGER.error("Couldn't parse object {} {}: {}: {}\n{}", type.getSimpleName(), oid, e.getClass().getName(), e.getMessage(), xml, e); throw e; } attachDiagDataIfRequested(prismObject, fullObject, options); if (FocusType.class.isAssignableFrom(prismObject.getCompileTimeClass())) { if (SelectorOptions.hasToLoadPath(FocusType.F_JPEG_PHOTO, options)) { Query query = session.getNamedQuery("get.focusPhoto"); query.setParameter("oid", prismObject.getOid()); byte[] photo = (byte[]) query.uniqueResult(); if (photo != null) { PrismProperty property = prismObject.findOrCreateProperty(FocusType.F_JPEG_PHOTO); property.setRealValue(photo); } } } else if (ShadowType.class.equals(prismObject.getCompileTimeClass())) { prismObject.removeContainer(ShadowType.F_ASSOCIATION); GetOperationOptions rootOptions = SelectorOptions.findRootOptions(options); if (GetOperationOptions.isRaw(rootOptions)) { LOGGER.debug("Loading definitions for shadow attributes."); Short[] counts = result.getCountProjection(); Class[] classes = GetObjectResult.EXT_COUNT_CLASSES; for (int i = 0; i < classes.length; i++) { if (counts[i] == null || counts[i] == 0) { continue; } applyShadowAttributeDefinitions(classes[i], prismObject, session); } LOGGER.debug("Definitions for attributes loaded. Counts: {}", Arrays.toString(counts)); } else { LOGGER.debug("Not loading definitions for shadow attributes, raw=false"); } } else if (LookupTableType.class.equals(prismObject.getCompileTimeClass())) { lookupTableHelper.updateLoadedLookupTable(prismObject, options, session); } else if (AccessCertificationCampaignType.class.equals(prismObject.getCompileTimeClass())) { caseHelper.updateLoadedCampaign(prismObject, options, session); } else if (TaskType.class.equals(prismObject.getCompileTimeClass())) { if (SelectorOptions.hasToLoadPath(TaskType.F_RESULT, options)) { Query query = session.getNamedQuery("get.taskResult"); query.setParameter("oid", prismObject.getOid()); byte[] opResult = (byte[]) query.uniqueResult(); if (opResult != null) { String xmlResult = RUtil.getXmlFromByteArray(opResult, true); OperationResultType resultType = prismContext.parserFor(xmlResult).parseRealValue(OperationResultType.class); PrismProperty property = prismObject.findOrCreateProperty(TaskType.F_RESULT); property.setRealValue(resultType); prismObject.setPropertyRealValue(TaskType.F_RESULT_STATUS, resultType.getStatus()); } } ROperationResultStatus status = query.uniqueResult(); prismObject.setPropertyRealValue(TaskType.F_RESULT_STATUS, (status != null ? status.getSchemaValue() : null)); } } if (partialValueHolder != null) { partialValueHolder.setValue(prismObject); } nameResolutionHelper.resolveNamesIfRequested(session, prismObject.getValue(), options); validateObjectType(prismObject, type); ObjectTypeUtil.normalizeAllRelations(prismObject, relationRegistry); return prismObject; }
/** * Issue.Smart can find the latest event. * @throws Exception If some problem inside */ @Test public void findsLatestEvent() throws Exception { final Issue.Smart issue = new Issue.Smart(RtIssueITCase.issue()); issue.close(); MatcherAssert.assertThat( new Event.Smart( new Issue.Smart(issue).latestEvent(Event.CLOSED) ).author().login(), Matchers.equalTo(issue.author().login()) ); }
def generic_schedule_email(request, pk): template_slug = request.POST.get("slug", "") original_template = get_object_or_404(EmailTemplate, slug=template_slug) trigger = get_object_or_404( Trigger, action__startswith="workshop-request-response", template__slug=template_slug, active=True, ) form = GenericEmailScheduleForm(request.POST, instance=original_template) workshop_request = get_object_or_404(WorkshopRequest, pk=pk) if form.is_valid(): template = EmailTemplate( slug=form.cleaned_data["slug"], subject=form.cleaned_data["subject"], to_header=form.cleaned_data["to_header"], from_header=form.cleaned_data["from_header"], cc_header=form.cleaned_data["cc_header"], bcc_header=form.cleaned_data["bcc_header"], reply_to_header=form.cleaned_data["reply_to_header"], body_template=form.cleaned_data["body_template"], ) objects = dict(request=workshop_request) if workshop_request.event: objects["event"] = workshop_request.event objects["workshop"] = workshop_request.event action = GenericAction( trigger=trigger, objects=objects, ) action_name = GenericAction.__name__ launch_at = action.get_launch_at() meta = dict( action=action, template=template, launch_at=launch_at, email=None, context=None, ) job = scheduler.enqueue_in(launch_at, action, meta=meta) logger.debug("%s: enqueueing", action_name) scheduled_at = scheduled_execution_time( job.get_id(), scheduler=scheduler, naive=False ) logger.debug("%s: job created [%r]", action_name, job) rqj = workshop_request.rq_jobs.create( job_id=job.get_id(), trigger=trigger, scheduled_execution=scheduled_at, status=check_status(job), mail_status="", event_slug=action.event_slug(), recipients=action.all_recipients(), ) messages.info( request, format_html( "New email ({}) was scheduled to run " '<relative-time datetime="{}">{}</relative-time>: ' '<a href="{}">{}</a>.', trigger.get_action_display(), scheduled_at.isoformat(), "{:%Y-%m-%d %H:%M}".format(scheduled_at), reverse("admin:autoemails_rqjob_preview", args=[rqj.pk]), job.id, ), fail_silently=True, ) default_url = workshop_request.get_absolute_url() next_url = request.POST.get("next", None) return redirect(safe_next_or_default_url(next_url, default_url)) else: messages.error( request, f"Could not send the email due to form errors: {form.errors}", fail_silently=True, ) default_url = workshop_request.get_absolute_url() next_url = request.POST.get("next", None) return redirect(safe_next_or_default_url(next_url, default_url))
/* dialogWait() method This method is called by the inner dialog box while waiting for the user to decide whether or not to delete a duplicate file. */ static void dialogWait() { synchronized (dialogLock) { try { dialogLock.wait(); } catch (InterruptedException ie) { } } }
Two debut novels – one freshly published, the other on its way to becoming a classic – have reminded me that for the past century American writers and artists have been obsessed with that shimmering, sexy, liberating, lethal contraption known as the automobile. Small wonder. Is there a more potent metaphor for American restlessness, for the American hunger for status and sex, for the American tendency to wind up, broken and bloody, in a ditch? In a thesis written in 2007, a doctoral candidate named Shelby Smoak neatly summed up the role of the automobile in American fiction as a way for characters to experience “violence, sacredness and consumption.” Reversing this order, cars give writers and artists a way to talk about that unholy troika: status, escape (including sexual escapades), and death. In the bargain, the automobile, which introduced the concept of planned obsolescence back in the 1930s, is the shiny embodiment of American capitalism’s relentless quest to make consumers hunger for the next new thing, whether they need it or not. The first of the two debut novels that brought all this home to me is Lot Boy by Buffalo native Greg Shemkovitz, just published by Sunnyoutside Press. It’s the story of Eddie Lanning, a twentysomething fuckup in Buffalo who works as the titular lot boy, performer of the lowliest tasks at the Ford dealership founded by his late grandfather and now run by his father, who’s dying of cancer. All Eddie wants to do is hook back up with his former girlfriend and get the hell out of Buffalo. To finance his escape, Eddie’s working a scam selling hot auto parts from the dealership, which inspires this rosy portrait of the local scenery: To get here, you have to go through a shitty part of South Buffalo to get to an even shittier section, until you cross a bridge into the wetlands and fields and eventually hit the rundown industrial lakeshore. Seeing all this decay and frozen debris pass by my windows, I realize that the only reason anyone would come here is to sell stolen auto parts to somebody who would only come here to buy them. Among its many virtues, this novel offers a peek behind the curtain of a world few people have experienced – the claustrophobic, corrupt, filthy, noisy, inefficient and mind-numbingly banal world of a Big Three car dealership. Reading Lot Boy, you’ll find yourself rooting for Eddie’s escape, while coming to understand why the American automobile industry went so far down the toilet that the U.S. taxpayer had to reach in and pull it out. Here’s the terse but uplifting author note at the end of this winning novel: “Greg Shemkovitz left Buffalo.” Theodore Weesner, who died on June 25 at age 79, published his debut novel in 1972 to foam-at-the-mouth critical praise but modest sales. The Car Thief is the sometimes brutal, sometimes tender story of a troubled teenage boy named Alex Housman whose biography has much in common with Weesner’s. Alex’s hard-drinking mother abandoned him in infancy, and after spending some time in foster care he’s now growing up in a Michigan factory town, living with his alcoholic father, an autoworker. To give his “uncounted” life some account, Alex steals cars and takes them on aimless drives before abandoning them and stealing again. It’s the only means of self-expression available to a boy in such stunted circumstances. Here’s the novel’s opening: Again today Alex Housman drove the Buick Riviera. The Buick, coppertone, white sidewalls, was the model of the year, a ’59, although the 1960 models were already out. Its upholstery was black, its windshield was tinted a thin color of motor oil. The car’s heater was issuing a stale and odorous warmth, but Alex remained chilled. He had walked several blocks through snow and slush, wearing neither hat nor gloves nor boots, to where he had left the car the night before. The steering wheel was icy in his hands, and he felt icy within, throughout his veins and bones. Alex was sixteen; the Buick was his fourteenth car. There is not a shred of sentimentality or self-pity in this book, and it never sinks to the dreary level of a treatise on “the juvenile delinquent problem.” This is a work of art, fuelled by all those purloined Buick Rivieras and Chevy Bel Airs. In the end, like Lot Boy, it is less a coming-of-age story than a story about our shared yearning to escape. Here are a dozen other writers and artists who have used the automobile to tell stories about Americans on their way to escape, status and death, sometimes all three. This list doesn’t pretend to be exhaustive. Feel free to offer your own additions: Scott Fitzgerald’s Rolls-Royce Few cars in American fiction have done as much lifting as Jay Gatsby’s 1922 Rolls-Royce. It’s the status symbol that fairly shouts Gatsby’s nouveau riche bona fides; it’s the dream machine that whisks Gatsby and Nick Carraway through the Valley of Ashes, past the unblinking eyes of Dr. T.J. Eckleburg and onto the Gilded Isle of Manhattan; and finally it’s the weapon that ends Myrtle Wilson’s life and leads her grieving husband to mistakenly seek vengeance against the owner of the deadly Rolls-Royce. Here’s our narrator Nick, who drives a stodgy Dodge, gushing over Gatsby’s versatile automobile: It was a rich cream color, bright with nickel, swollen here and there in its monstrous length with triumphant hat-boxes and supper-boxes and tool-boxes, and terraced with a labyrinth of wind-shields that mirrored a dozen suns. Sitting down behind many layers of glass in a sort of green leather conservatory, we started to town. John O’Hara’s Cadillac Few American novelists were more finely attuned to – or hungrier for – the trappings of status than John O’Hara. The protagonist of his 1934 novel Appointment in Samarra is Julian English, son of an old Gibbsville, Pa., family, snug inside the upper-crust Lantenengo Country Club set, a hard drinker, a philanderer, and a Cadillac salesman at a time when the Depression is bottoming out and Cadillacs are becoming harder to sell. Despite appearances, Julian has problems – a drinking problem, a marital problem, a money problem, plus the fact that a lot of people in town secretly despise him. Julian’s seemingly perfect life unravels in a whiplash blur over the course of three days and, after an unsuccessful seduction attempt in his own living room, he pitches an epic drunk, playing records, smoking, spilling drinks, falling down, getting up, drinking more. Finally he grabs a pack of cigarettes and a fresh bottle of Scotch and goes out the front door and into the garage, where his Cadillac awaits. Then, after closing the garage door and making sure the windows are shut: He climbed in the front seat and started the car. It started with a merry, powerful hum, ready to go. ‘There, the bastards,’ said Julian, and smashed the clock with the bottom of the bottle, to give them an approximate time. It was 10:41. There was nothing to do now but wait. He smoked a little, hummed for a minute or two, and had three quick drinks and was on his fourth when he lay back and slumped down in the seat. At 10:50, by the clock in the rear seat, he tried to get up. He had not the strength to help himself, and at ten minutes past eleven no one could have helped him, no one in the world. It’s a remarkable moment: suicide by status symbol. Vladimir Nabokov’s Buick “Nothing,” according to Vladimir Nabokov, “is more exhilarating than philistine vulgarity.” So perhaps it’s not surprising that during their years in America Nabokov and his wife Vera owned a series of cars – Pontiacs, Oldsmobiles, Buicks – manufactured by General Motors, the most vigorous purveyor of philistine vulgarity the world has ever known. One in particular, a 1952 Buick, was crucial to the composition of Nabokov’s masterpiece, Lolita. With Vera at the wheel of the Buick, Nabokov wrote on index cards during transcontinental butterfly-hunting trips in the 1950s, gleefully cataloging the abundant philistine vulgarity whizzing past the windshield. Among the Nabokovs’ preferred lodgings were the Coral Log Motel in Afton, Wyo., and the Bright Angel Lodge on the rim of the Grand Canyon. Lolita, of course, would not have been possible without cars. After Humbert Humbert’s bride (Lolita’s mother) is dispatched by an errant Packard, leaving the top of poor Charlotte Haze’s head “a porridge of bone, brains, bronze hair and blood,” Humbert, now the legal guardian of his prey, takes Lolita on a year-long, 27,000-mile, pedophile’s “joy ride,” during which his illicit lusts are slaked in roadside hostelries called Sunset Motel, U-Beam Cottages, Mac’s Courts, Kumfy Kabins and, most famously, the Enchanted Hunters. This ecstatic wallowing in philistine vulgarity and taboo sex is made possible by “the Haze jalopy with its ineffectual wipers and whimsical brakes.” In Stanley Kubrick’s 1962 movie adaptation of the novel, which Nabokov professed to admire (he wrote the screenplay), the “Haze jalopy” is a pedestrian 1957 Ford station wagon. There was nothing pedestrian about the chariot the Nabokovs drove. Jack Kerouac’s Hudson Which brings us, inevitably, to that Holy Bible of American car novels, Jack Kerouac’s ricocheting pubescent kicks-quest known as On the Road. There are many vehicles in the novel, including cars, trucks and buses, but the most important one is the 1949 Hudson that Dean Moriarty buys on a whim and proceeds to punish until the rear end falls out on the way back from a high old time in a Mexican whorehouse. As impulse purchases go, this one’s a beauty, and Kerouac’s telling of it is instructive. For a writer so dedicated to spontaneous bop prosody and its fluorescent riffs and rambles, Kerouac’s writing could be stunningly flat. Dean’s purchase of the Hornet, so rich with possibilities for rapture, reads like a police blotter: (Dean) got a job on the railroad and made a lot of money. He became the father of a cute little girl, Amy Moriarty. Then suddenly he blew his top while walking down the street one day. He saw a ’49 Hudson for sale and rushed to the bank for his entire roll. He bought the car on the spot. Ed Dunkel was with him. Now they were broke. Dean calmed Camille’s fears and told her he’d be back in a month. “I’m going to New York and bring Sal back.” She wasn’t too pleased at this prospect. “But what is the purpose of all this? Why are you doing this to me?” “It’s nothing, it’s nothing, darling – ah – hem – Sal has pleaded and begged with me to come and get him, it is absolutely necessary for me to – but we won’t got into all these explanations – and I’ll tell you why… No, listen, I’ll tell you why.” And he told her why and of course it made no sense.” What made no sense is that Kerouac didn’t do more with glorious iron like this. Richard Yates’s Pontiac In the 2008 movie adaptation of Richard Yates’s masterpiece, Revolutionary Road, Frank Wheeler (played by Leonardo DiCaprio) drives a gargantuan, four-door 1954 Buick Roadmaster. The car is miscast. That big bulky Buick is a banker’s car, not the car of a disaffected suburbanite with vague artistic pretensions like Frank Wheeler. But no matter. There are two moments when cars are crucial to the novel’s effect. The first comes when Frank’s wife, April, is driving to rehearsals for a community theater production. It’s important to remember that Revolutionary Road is set in 1955, the year after President Dwight Eisenhower announced his plan to build a system of interstate highways, an acknowledgement that the chrome-encrusted, fire-breathing cars coming out of Detroit were traveling on hopelessly outdated roads. As April and her fellow Players make their way to rehearsal: Their automobiles didn’t look quite right either – unnecessarily wide and gleaming in the colors of candy and ice cream, seeming to wince at each spatter of mud, they crawled along apologetically down the broken roads that led from all directions to the deep, level slab of Route Twelve. Once there the cars seemed able to relax in an environment all their own, a long bright valley of colored plastic and plate glass and stainless steel – KING KONE, MOBILGAS, SHOPORAMA, EAT… The only thing missing is a Kumfy Kabins. This marvelous sketch repays re-reading, especially the way the cars “wince” and “crawl along apologetically” and are finally able to “relax.” But it merely sets the table for one of this bleak novel’s bleakest moments, the night April and a fawning neighbor named Shep Campbell find themselves pawing each other drunkenly in the front seat of Shep’s ’55 Pontiac in the parking lot outside Vito’s Log Cabin alongside Route Twelve: The noise of their breathing had deafened them to all other sounds: the loud insects that sang near the car, the drone of traffic on Route Twelve and the fainter sounds from the Log Cabin – a woman’s shrieking laugh dissolving into the music of horn and piano and drums. “Honey, wait. Let me take you somewhere – we’ve got to get out of – ” “No. Please,” she whispered. “Here. Now. In the back seat.” And the back seat was where it happened. Cramped and struggling for purchase in the darkness, deep in the mingled scents of gasoline and children’s overshoes and Pontiac upholstery, while a delicate breeze brought wave on wave of Steve Kovick’s final drum solo of the night, Shep Campbell found and claimed the fulfillment of his love at last. When the deed is done, Shep blurts out that he loves April. She rebuffs this idiotic confession by telling him, “I don’t know who you are.” Then comes the final blow, which will prove to be a death sentence: “And even if I did,” she said, “I’m afraid it wouldn’t help, because you see I don’t know who I am either.” Ah, the truths that pass back and forth in the sticky back seat of a ’55 Pontiac. Robert Frank’s Ford In 1955, the year Revolutionary Road was set, the Swiss-born photographer Robert Frank bought a second-hand 1950 Ford Business Coupe and set out on an epic year-long road trip of his own. Armed with two cameras and many boxes of film, sometimes traveling alone and sometimes accompanied by his wife and their young son, he set out to capture a visual record of what it meant to live in America – the frustrations and joys, the unease, the misgivings people think about but don’t discuss. This was the season of Emmett Till’s murder, the Montgomery bus boycott. The Deep South and Detroit proved to be two particularly fruitful locales for Frank – the hell of Jim Crow and the different hell where people went to escape it. “I went to Detroit to photograph the Ford factories,” Frank said, “and then it was clear to me I wanted to do this. It was summer and so loud. So much noise. So much heat. It was hell. So much screaming.” Frank loved his faceless ’50 Ford because, like him, it was inconspicuous. He saw himself as a detective, or a spy, surreptitiously chronicling the lives he encountered. He usually shot in public places – parks, drive-ins, city sidewalks, political rallies, churches, juke joints. His sympathies, he has said, “were with people who struggled. There was also my mistrust of people who made the rules.” The resulting book is called The Americans, and today it’s an undisputed classic. The last image in the book shows Frank’s wife Mary and son Pablo asleep in the Ford on the shoulder of a road in Texas. It’s a testament to just how gritty and exhausting the journey was. Flannery O’Connor’s…Ford? In what may be her greatest short story, “A Good Man is Hard to Find,” Flannery O’Connor sends a Georgia family off to their doom aboard an automobile of unspecified make. Given the thoroughly middle-class, middle-brow nature of this Atlanta family, I’m going to guess the car was a Ford. It doesn’t really matter. What matters is that the chatterbox grandmother has concealed her cat in a hat box in the back seat, and when it springs onto the shoulder of the driver, the grandmother’s son, Bailey, the car lurches off the road and tumbles into a gulch, landing right-side-up, nobody seriously injured. Now comes the best part. A car slowly approaches the scene of the accident: “It was a big black battered hearse-like automobile. There were three men in it.” This unholy trinity are undertakers, all right, three prison escapees – The Misfit, Hiram and Bobby Lee – who set about slaughtering the whole family: husband and wife, son and daughter, infant and finally, most spectacularly, the grandmother. It’s the perfect ending to a perfect story, one that began with the promise of automobile-assisted escape, then progressed to an automotive mishap that led, finally, inevitably, to mass murder. As the story ends, The Misfit and his two psychopath sidekicks plan to fix the mildly damaged Ford and leave the hearse behind. That’s called trading up. That’s what you’re supposed to do with cars in America. Andy Warhol’s Fords If Henry Ford’s assembly line introduced the central impulse of the 20th century – the urge to replicate an act endlessly – then Andy Warhol introduced the notion that the replicable event was not only the essence of our age, it was also lowly stuff that could be transformed into high art. And so Warhol mass-produced images of things meant to be consumed in mass quantities, including Coca-Cola bottles, Campbell’s soup cans and Brillo boxes; movie stars (Marilyn, Liz, Elvis); political icons (Mao, Che, Nixon); pop stars (Michael Jackson, John Lennon, Debby Harry); and, inevitably, the automobile and its ubiquitous by-product, the car crash. In 2013, 50 years after it was painted, Warhol’s “Silver Car Crash (Double Disaster)” sold at auction for $105 million, then the highest price ever paid for the artist’s work. The painting is repeating images of a body inside a mangled car after a crash. For me, a far more chilling image from Warhol’s “Death and Disaster” series is “Pink Car Crash,” which shows three bloody people flung halfway out of a 1955 Ford that has landed on its roof. One of the two women appears to be alive, but she’s being hugged by a man who’s either unconscious or dead. It doesn’t get much grimmer than this. Robert Bechtle’s Pontiac The California-based painter Robert Bechtle makes very different use of automobiles in his hyper-realistic pictures. Rather than making cars into gaudy badges of status or conveyances of disaster and death, Bechtle treats them matter-of-factly as part of the furniture of middle-class American life. He paints everyday scenes without irony or condescension. As he states in the catalog of the inaugural show at the new Whitney Museum of American Art: “My interest in these subjects has nothing to do with satire or social comment. I paint them because they are part of what I know and as such I have an affection for them; I am interested in their commonness and in the challenge of making art from such ordinary fare.” It’s telling that the above triptych, a self-portrait of the artist and his family on a blandly barbered suburban street, is called “’61 Pontiac.” The painting may give us Mom, Dad, Buddy and Sis, but the beige, no-frills Pontiac station wagon is clearly at the core of this nuclear unit. Harry Crews’s Ford Maverick Only Harry Crews would have thought to create a character who sets out to eat a car. His name is Herman Mack, son of the owner of a 43-acre auto junkyard, a true believer in the importance of the automobile. “Everything that’s happened in this goddam country in the last fifty years,” Herman says, “has happened in, on, around, with, or near a car.” And so Herman decides to eat a 1971 Ford Maverick, half a pound a day of blow-torched, swallowable pellets per day, until the car is gone. For good measure – since this 1972 novel, called Car, is a wicked satire of American consumerism – Herman collects the pellets after defecation and melts them down into mini-Mavericks, then sells them as key chains. Herman takes the concept of recycling to a whole new level. Of course this is all carried on coast-to-coast television because Americans love a spectacle almost as much as they love their automobiles. But for all its conceptual hijinks, this novel, like all of Crews’s fiction, is firmly grounded in the very dirty and bloody real world. His description of a gruesome multi-car smashup is, in the words of one reviewer, “a superb grotesque of the ordinariness of death on the open American road.” Or as one obit writer put it after Crews’s death in 2002, his novels, including Car, out-Gothic Southern Gothic. E.L. Doctorow’s Model T Ford When he died on July 21 at the age of 84, E.L. Doctorow was eulogized as a writer who mined American history and historical figures to create a body of fiction that showed, in briskly entertaining fashion, how the past forever informs the present. While The March might be his greatest literary achievement, Doctorow’s most accessible and probably his best-loved novel is 1975’s audacious Ragtime. Michael Chabon recently noted that Doctorow’s blending of historical and fictional characters and his gleeful mining of historical events for fictional purposes provided a “magic way out” for writers caught between an unappetizing choice: dirty realism or arid postmodernism? At any rate, Ragtime uses a vast canvas to paint a portrait of the twilight of American innocence on the eve of the First World War. It was an age that oozed isms, including patriotism, jingoism, feminism, socialism, hedonism and racism. This big gaudy portrait pivots around a single iteration of what was arguably the most influential invention of the so-called American Century: Henry Ford’s mass-produced Model T. This particular Model T is owned by a Negro musician named Coalhouse Walker Jr. “His car shone,” Doctorow tells us. “The brightwork gleamed. There was a glass windshield and a custom pantasote top.” Coalhouse Walker is terribly proud of his modest automobile, an unimaginable luxury for many black people at that time and, therefore, a scalding affront to many white people. So when a group of racist Irish firefighters desecrate his beloved car, Coalhouse Walker will settle for nothing less than complete justice. His insistence, much like April Wheeler’s confession in the back seat of that ’55 Pontiac, turns out to be a death sentence. Such a big fuss over one of Henry Ford’s affordable little cars! Denis Johnson’s Oldsmobile Denis Johnson’s “Car Crash While Hitchhiking” is that rarest thing – a work of fiction you don’t merely read, but experience as if you are inside the story while it’s taking place. Since it’s narrated by a hitchhiker, this short story is, naturally, crowded with cars – a salesman’s “luxury” car, a “VW no more than a bubble of hashish” and, finally, an Oldsmobile with a family inside that performs the story’s central business: it “headonned and killed forever a man driving west out of Bethany, Missouri…” That “headonned” and “killed forever” hint at Johnson’s verbal wizardry, his ability to write sentences that startle as they sheer off in unexpected directions, creating a sense of disorientation that is crucial to the story’s effect. The hitchhiker passes under “Midwestern clouds like great gray brains.” Here’s the hitchhiker stuck by the side of the road in a downpour: “My thoughts zoomed pitifully. The travelling salesman had fed me pills that made the linings of my veins feel scraped out. I knew every raindrop by its name. I sensed everything before it happened. I knew a certain Oldsmobile would stop for me even before it slowed, and by the sweet voices of the family inside it I knew we’d have an accident in the storm.” His prediction comes true, of course. Call this what you will: clairvoyance, madness, stoned lunacy. No matter what you call it, it’s great writing. Since the story was first published in the Paris Review in 1989, I’m guessing the deadly Oldsmobile was a mid-’80s model. Don DeLillo’s Limousine No one, to my knowledge, has claimed that Cosmopolis is Don DeLillo’s greatest achievement, but this 2003 novel does have an ingenious framing device: it takes place almost entirely inside a white stretch limousine as it conveys the protagonist, a super-rich currency trader named Eric Packer, across Manhattan to get a haircut. Here’s Eric after coming down from his 48-room apartment with its computerized bed, shark tank, gym, screening room and a pen for the borzois: He put on his sunglasses. Then he walked back across the avenue and approached the lines of white limousines. There were ten cars, five in a curbside row in front of the tower, and five lined up on the cross street, facing west. The cars were identical at a glance. The limo Eric rides is equipped with an array of visual display units full of financial data, a microwave oven, a heart monitor, a liquor cabinet, a floor made of Carrara marble. This baby puts Gatsby’s Rolls-Royce in the shade, but it’s forced to crawl to its destination, delayed by a presidential motorcade (more limos), a water main break, a political protest, a rap star’s funeral, and various other diversions. Unfortunately, like so many high-concept ideas, this one sounds better as an elevator pitch than it reads on the page. Straining to sound portentous, these characters come off as gassy. When one of his minions asks Eric why he doesn’t have the barber come to his office, or even to this limo, he replies, “A haircut has what. Associations. Calendar on the wall. Mirrors everywhere. There’s no barber chair here. Nothing swivels but the spycam.” Nobody talks like this, not even currency traders in danger of losing hundreds of millions of dollars for betting against the yen. But the very worst of this flawed novel’s missteps might be DeLillo’s choice of automobile. A white stretch limo doesn’t spell Master of the Universe; it spells prom night in Poughkeepsie. Cars, it turns out, can kill in more ways than one.
import {Entity, property, model} from '@loopback/repository'; @model() export class MockModel extends Entity { @property({ type: 'string', id: true, generated: false, }) id: string; @property({ type: 'string', id: true, generated: false, }) itemName: string; @property({ type: 'string', id: true, generated: false, }) description: string; }
<gh_stars>0 import events import io import json import os from executors.python import run as python_run from executors.workflow import run as workflow_run from . import utils from girder_worker.utils import JobStatus, StateTransitionException from girder_worker import config, PACKAGE_DIR # Maps task modes to their implementation _task_map = {} class TaskSpecValidationError(Exception): pass def register_executor(name, fn): """ Register a new executor in the girder_worker runtime. This is used to map the 'mode' field of a task to a function that will execute the task. :param name: The value of the mode field that maps to the given function. :type name: str :param fn: The implementing function. :type fn: function """ _task_map[name] = fn def unregister_executor(name): """ Unregister an executor from the map. :param name: The name of the executor to unregister. :type name: str """ del _task_map[name] def _resolve_scripts(task): if task.get('mode') != 'workflow': if 'script_uri' in task and 'script' not in task: task['script'] = io.fetch({ 'url': task['script_uri'] }) elif 'steps' in task: for step in task['steps']: _resolve_scripts(step['task']) def load(task_file): """ Load a task JSON into memory, resolving any ``'script_uri'`` fields by replacing it with a ``'script'`` field containing the contents pointed to by ``'script_uri'`` (see :py:mod:`girder_worker.uri` for URI formats). A ``script_fetch_mode`` field may also be set :param task_file: The path to the JSON file to load. :returns: The task as a dictionary. """ with open(task_file) as f: task = json.load(f) prevdir = os.getcwd() parent = os.path.dirname(task_file) if parent != '': os.chdir(os.path.dirname(task_file)) _resolve_scripts(task) os.chdir(prevdir) return task def set_job_status(mgr, status): if mgr: mgr.updateStatus(status) def _extractId(spec): return spec['id'] if 'id' in spec else spec['name'] def _validateInputs(task_inputs, inputs): for name, task_input in task_inputs.iteritems(): if name not in inputs: if 'default' in task_input: inputs[name] = task_input['default'] else: raise Exception('Required input \'%s\' not provided.' % name) @utils.with_tmpdir def run(task, inputs=None, outputs=None, fetch=True, status=None, **kwargs): """ Run a task with the specified I/O bindings. :param task: Specification of the task to run. :type task: dict :param inputs: Specification of how input objects should be fetched into the runtime environment of this task. :type inputs: dict :param outputs: Specification of what should be done with outputs of this task. :type outputs: dict :param write_script: If ``True`` task scripts will be written to file before being passed to ``exec``. This improves interactive debugging with tools such as ``pdb`` at the cost of additional file I/O. Note that when passed to run *all* tasks will be written to file including validation and conversion tasks. :param fetch: If ``True`` will perform a fetch on the input before running the task (default ``True``). :param status: Job status to update to during execution of this task. :type status: girder_worker.utils.JobStatus :returns: A dictionary of the form ``name: binding`` where ``name`` is the name of the output and ``binding`` is an output binding of the form ``{'data': data}``. The ``'data'`` field may be absent if an output URI was provided. Instead, those outputs will be saved to that URI and the output binding will contain the location in the ``'uri'`` field. """ inputs = inputs or {} outputs = outputs or {} task_inputs = {_extractId(d): d for d in task.get('inputs', ())} task_outputs = {_extractId(d): d for d in task.get('outputs', ())} mode = task.get('mode', 'python') if mode not in _task_map: raise Exception('Invalid mode: %s' % mode) job_mgr = kwargs.get('_job_manager') info = { 'task': task, 'task_inputs': task_inputs, 'task_outputs': task_outputs, 'mode': mode, 'inputs': inputs, 'outputs': outputs, 'status': status, 'job_mgr': job_mgr, 'kwargs': kwargs } events.trigger('run.before', info) try: # If some inputs are not there, fill in with defaults _validateInputs(task_inputs, inputs) for name, d in inputs.iteritems(): task_input = task_inputs[name] if task_input.get('stream'): continue # this input will be fetched as a stream if fetch: if status == JobStatus.RUNNING and 'data' not in d: set_job_status(job_mgr, JobStatus.FETCHING_INPUT) d['data'] = io.fetch(d, **dict({'task_input': task_input}, **kwargs)) events.trigger('run.handle_input', { 'info': info, 'task_input': task_input, 'input': d, 'name': name }) if 'script_data' not in d: d['script_data'] = d['data'] for name, task_output in task_outputs.iteritems(): if name not in outputs: outputs[name] = {} # Set the appropriate job status flag set_job_status(job_mgr, status) # Actually run the task for the given mode _task_map[mode]( task=task, inputs=inputs, outputs=outputs, task_inputs=task_inputs, task_outputs=task_outputs, **kwargs) for name, task_output in task_outputs.iteritems(): if task_output.get('stream'): continue # this output has already been sent as a stream output = outputs[name] e = events.trigger('run.handle_output', { 'info': info, 'task_output': task_output, 'output': output, 'outputs': outputs, 'name': name }) if not e.default_prevented: data = outputs[name]['script_data'] if status == JobStatus.RUNNING: set_job_status(job_mgr, JobStatus.PUSHING_OUTPUT) io.push(data, outputs[name], **dict({'task_output': task_output}, **kwargs)) output.pop('script_data', None) events.trigger('run.after', info) return outputs except StateTransitionException: if job_mgr: status = job_mgr.refreshStatus() # If we are canceling we want to stay in that state, otherwise raise # the exception if status != JobStatus.CANCELING: raise else: raise finally: events.trigger('run.finally', info) register_executor('python', python_run) register_executor('workflow', workflow_run) # Load plugins that are enabled in the config file or env var _plugins = os.environ.get('WORKER_PLUGINS_ENABLED', config.get('girder_worker', 'plugins_enabled')) _plugins = [p.strip() for p in _plugins.split(',') if p.strip()] _paths = os.environ.get( 'WORKER_PLUGIN_LOAD_PATH', config.get( 'girder_worker', 'plugin_load_path')).split(':') _paths = [p for p in _paths if p.strip()] _paths.append(os.path.join(PACKAGE_DIR, 'plugins')) utils.load_plugins(_plugins, _paths, quiet=True)
//========================================================================= // Copyright (C) 2012 The Elastos Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //========================================================================= #include "Elastos.CoreLibrary.Core.h" #include "elastos/droid/server/location/LocationProviderProxy.h" #include <elastos/core/AutoLock.h> #include <elastos/core/StringBuilder.h> #include <elastos/utility/logging/Logger.h> #include <elastos/core/AutoLock.h> using Elastos::Core::AutoLock; using Elastos::Droid::Location::ILocationProvider; using Elastos::Droid::Os::CWorkSource; using Elastos::Droid::Os::IBinder; using Elastos::Core::AutoLock; using Elastos::Core::CString; using Elastos::Core::IAppendable; using Elastos::Core::ICharSequence; using Elastos::Core::StringBuilder; using Elastos::IO::IFlushable; using Elastos::Utility::Logging::Logger; namespace Elastos { namespace Droid { namespace Server { namespace Location { //=============================== //LocationProviderProxy::NewServiceWorkRunnable //=============================== LocationProviderProxy::NewServiceWorkRunnable::NewServiceWorkRunnable( /* [in] */ LocationProviderProxy* host) : mHost(host) {} ECode LocationProviderProxy::NewServiceWorkRunnable::Run() { if (D) Logger::D(TAG, "applying state to connected service"); Boolean enabled; AutoPtr<IProviderProperties> properties; AutoPtr<IProviderRequest> request; AutoPtr<IWorkSource> source; AutoPtr<IILocationProvider> service; { AutoLock syncLock(this); enabled = mHost->mEnabled; request = mHost->mRequest; source = mHost->mWorksource; service = mHost->GetService(); } if (service == NULL) return E_NULL_POINTER_EXCEPTION; // load properties from provider ECode ec = service->GetProperties((IProviderProperties**)&properties); if (FAILED(ec)) { Logger::W(TAG, "%08x", ec); return E_REMOTE_EXCEPTION; } if (properties == NULL) { Logger::E(TAG, "%s has invalid locatino provider properties", mHost->mServiceWatcher->GetBestPackageName().string()); } // apply current state to new service if (enabled) { service->Enable(); if (request != NULL) { service->SetRequest(request, source); } } { AutoLock syncLock(this); mHost->mProperties = properties; } return NOERROR; } //================================= // LocationProviderProxy //================================= const String LocationProviderProxy::TAG("LocationProviderProxy"); const Boolean LocationProviderProxy::D = FALSE;//ILocationManagerService::D; CAR_INTERFACE_IMPL(LocationProviderProxy, Object, ILocationProviderInterface) AutoPtr<LocationProviderProxy> LocationProviderProxy::CreateAndBind( /* [in] */ IContext* context, /* [in] */ const String& name, /* [in] */ const String& action, /* [in] */ Int32 overlaySwitchResId, /* [in] */ Int32 defaultServicePackageNameResId, /* [in] */ Int32 initialPackageNamesResId, /* [in] */ IHandler* handler) { AutoPtr<LocationProviderProxy> proxy = new LocationProviderProxy( context, name, action, overlaySwitchResId, defaultServicePackageNameResId, initialPackageNamesResId, handler); if (proxy->Bind()) { return proxy; } else { return NULL; } } LocationProviderProxy::LocationProviderProxy( /* [in] */ IContext* context, /* [in] */ const String& name, /* [in] */ const String& action, /* [in] */ Int32 overlaySwitchResId, /* [in] */ Int32 defaultServicePackageNameResId, /* [in] */ Int32 initialPackageNamesResId, /* [in] */ IHandler* handler) : mContext(context) , mName(name) , mEnabled(FALSE) { CWorkSource::New((IWorkSource**)&mWorksource); mNewServiceWork = new NewServiceWorkRunnable(this); mServiceWatcher = new ServiceWatcher( mContext, TAG + "-" + name, action, overlaySwitchResId, defaultServicePackageNameResId, initialPackageNamesResId, mNewServiceWork, handler); } Boolean LocationProviderProxy::Bind() { return mServiceWatcher->Start(); } AutoPtr<IILocationProvider> LocationProviderProxy::GetService() { AutoPtr<IBinder> binder = mServiceWatcher->GetBinder(); return (binder != NULL) ? IILocationProvider::Probe(binder) : NULL; } ECode LocationProviderProxy::GetConnectedPackageName( /* [out] */ String* name) { VALIDATE_NOT_NULL(name) *name = mServiceWatcher->GetBestPackageName(); return NOERROR; } ECode LocationProviderProxy::GetName( /* [out] */ String* name) { VALIDATE_NOT_NULL(name); *name = mName; return NOERROR; } ECode LocationProviderProxy::GetProperties( /* [out] */ IProviderProperties** properties) { VALIDATE_NOT_NULL(properties) { AutoLock syncLock(this); *properties = mProperties; REFCOUNT_ADD(*properties); } return NOERROR; } ECode LocationProviderProxy::Enable() { { AutoLock syncLock(this); mEnabled = TRUE; } AutoPtr<IILocationProvider> service = GetService(); if (service == NULL) return E_NULL_POINTER_EXCEPTION; ECode ec = service->Enable(); if (FAILED(ec)) { Logger::W(TAG, "%08x", ec); return E_REMOTE_EXCEPTION; } // if (ec == (ECode)E_EXCEPRION) { // // never let remote service crash system server // Logger::E(TAG, "Exception from %s", mServiceWatcher->GetBestPackageName().string()); // return ec; // } return NOERROR; } ECode LocationProviderProxy::Disable() { { AutoLock syncLock(this); mEnabled = FALSE; } AutoPtr<IILocationProvider> service = GetService(); if (service == NULL) return E_NULL_POINTER_EXCEPTION; ECode ec = service->Disable(); if (FAILED(ec)) { Logger::W(TAG, "%08x", ec); return E_REMOTE_EXCEPTION; } // if (ec == (ECode)E_EXCEPRION) { // // never let remote service crash system server // Logger::E(TAG, "Exception from %s", mServiceWatcher->GetBestPackageName().string()); // return ec; // } return NOERROR; } ECode LocationProviderProxy::IsEnabled( /* [out] */ Boolean* enable) { VALIDATE_NOT_NULL(enable); { AutoLock syncLock(this); *enable = mEnabled; } return NOERROR; } ECode LocationProviderProxy::SetRequest( /* [in] */ IProviderRequest* request, /* [in] */ IWorkSource* source) { { AutoLock syncLock(this); mRequest = request; mWorksource = source; } AutoPtr<IILocationProvider> service = GetService(); if (service == NULL) return E_NULL_POINTER_EXCEPTION; ECode ec = service->SetRequest(request, source); if (FAILED(ec)) { Logger::W(TAG, "%08x", ec); return E_REMOTE_EXCEPTION; } // if (ec == (ECode)E_EXCEPRION) { // // never let remote service crash system server // Logger::E(TAG, "Exception from %s", mServiceWatcher->GetBestPackageName().string()); // return ec; // } return NOERROR; } ECode LocationProviderProxy::Dump( /* [in] */ IFileDescriptor* fd, /* [in] */ IPrintWriter* pw, /* [in] */ ArrayOf<String>* args) { AutoPtr<IAppendable> appendable = IAppendable::Probe(pw); AutoPtr<ICharSequence> cs; CString::New(String("REMOTE SERVICE"), (ICharSequence**)&cs); appendable->Append(cs.Get()); cs = NULL; CString::New(String(" name="), (ICharSequence**)&cs); appendable->Append(cs.Get()); cs = NULL; CString::New(mName, (ICharSequence**)&cs); appendable->Append(cs.Get()); cs = NULL; CString::New(String(" pkg="), (ICharSequence**)&cs); appendable->Append(cs.Get()); cs = NULL; CString::New(mServiceWatcher->GetBestPackageName(), (ICharSequence**)&cs); appendable->Append(cs.Get()); cs = NULL; CString::New(String(" version="), (ICharSequence**)&cs); appendable->Append(cs.Get()); StringBuilder sb(""); sb += mServiceWatcher->GetBestVersion(); cs = NULL; CString::New(sb.ToString(), (ICharSequence**)&cs); appendable->Append(cs.Get()); appendable->AppendChar('\n'); AutoPtr<IILocationProvider> service = GetService(); if (service == NULL) { pw->Println(String("service down (null)")); return E_NULL_POINTER_EXCEPTION; } IFlushable::Probe(pw)->Flush(); //TODO // try { // service.asBinder().dump(fd, args); // } catch (RemoteException e) { // pw.println("service down (RemoteException)"); // Log.w(TAG, e); // } catch (Exception e) { // pw.println("service down (Exception)"); // // never let remote service crash system server // Log.e(TAG, "Exception from " + mServiceWatcher.getBestPackageName(), e); // } return NOERROR; } ECode LocationProviderProxy::GetStatus( /* [in] */ IBundle* extras, /* [out] */ Int32* status) { VALIDATE_NOT_NULL(status); AutoPtr<IILocationProvider> service = GetService(); if (service != NULL) { *status = ILocationProvider::TEMPORARILY_UNAVAILABLE; return NOERROR; } ECode ec = service->GetStatus((IBundle**)&extras, status); if (FAILED(ec)) { Logger::W(TAG, "%08x", ec); return E_REMOTE_EXCEPTION; } // if (ec == (ECode)E_EXCEPRION) { // // never let remote service crash system server // Logger::E(TAG, "Exception from %s", mServiceWatcher->GetBestPackageName().string()); // return ec; // } return NOERROR; } //@Override ECode LocationProviderProxy::GetStatusUpdateTime( /* [out] */ Int64* time) { VALIDATE_NOT_NULL(time); AutoPtr<IILocationProvider> service = GetService(); if (service == NULL) { *time = 0; return NOERROR; } ECode ec = service->GetStatusUpdateTime(time); if (FAILED(ec)) { Logger::W(TAG, "%08x", ec); return E_REMOTE_EXCEPTION; } // if (ec == (ECode)E_EXCEPRION) { // // never let remote service crash system server // Logger::E(TAG, "Exception from %s", mServiceWatcher->GetBestPackageName().string()); // return ec; // } return NOERROR; } ECode LocationProviderProxy::SendExtraCommand( /* [in] */ const String& command, /* [in] */ IBundle* extras, /* [out] */ Boolean* result) { VALIDATE_NOT_NULL(result); AutoPtr<IILocationProvider> service = GetService(); if (service == NULL) { *result = FALSE; return NOERROR; } ECode ec = service->SendExtraCommand(command, extras, (IBundle**)&extras, result); if (FAILED(ec)) { Logger::W(TAG, "%08x", ec); return E_REMOTE_EXCEPTION; } // if (ec == (ECode)E_EXCEPRION) { // // never let remote service crash system server // Logger::E(TAG, "Exception from %s", mServiceWatcher->GetBestPackageName().string()); // return ec; // } return NOERROR; } } // namespace Location } // namespace Server } // namespace Droid } // namespace Elastos
So what is the biggest challenge that many of us face when it comes to decorating our homes? Undoubtedly ‘space’ is the answer that tops the list almost every single time. We try our very best to make the most of every inch of available space in our homes and often dream about how that one extra room could solve all our space conundrums. Funnily enough, when we do eventually move into a bigger home or get that one additional room, soon we find ourselves craving even more space! Be as that may, proper planning and the smart utilization of the available room can often turn a cramped home into an airy and relaxed hub. In an attempt to find that elusive ‘additional nook’, today we turn our attention to the forgotten, lonely corner in the bedroom. Often overlooked and rarely utilized to the hilt, the bedroom corner is filled with potential waiting to be unlocked. All you need is the right decor and a bit of creativity to get the job done. And here are 12 snazzy ideas to help you turn that tight corner into a stylish and functional spot – 1. The Corner Bed Yes, the bed is the focal point of your bedroom, and it needs to remain that way. But making it the star of the show does not necessarily mean you need to put it front and center quite literally! Corner beds are quickly becoming a popular trend, as they not only save up on precious space but also turn that ill-utilized corner into a hot hub for activity. This also opens up the remainder of the bedroom, giving it an airy appeal. 2. Twice the Comfort! While angling the bed into the corner or giving it that dual headboard is one of the ways to go, you can also put in a couple of beds in the corner to really cut down on the wastage of space. This is another hot look in kids’ bedrooms that is replacing the use of bunk beds, and the common area between the two beds can be used for sconce lighting or as storage and display space. 3. A Fiery Corner Fireplace So, you are not too happy about moving the bed into the corner and realigning the entire room? We understand the appeal of a bed that is placed elegantly at the heart of the bedroom. Instead, turn that boring and at times ‘poorly lit’ corner into a stunning and brilliant fireplace. The fireplace that you choose can depend on the style of the bedroom and can range from the sleek, contemporary models with a glass front to the classic stone fireplace. We suggest bringing in a standalone fireplace, as it will save you the trouble of going in for a more tedious makeover. by Lapis Design Partners by Fisher Heck Architects by Platt Architecture 4. A Lovely Reading Nook This is definitely my personal favorite, as it lets you relax in style in the bedroom and catch up on the latest bestseller before you hit the bed. Just add a plush, comfy chair and a cool ottoman along with a small side table and table lamp to turn the corner into an ergonomic reading rook. We obviously would suggest a classic like the Eames lounger with ottoman, but feel free to improvise. For those really short on space (read as a tiny corner), do away with the side table and the lamp and bring in a beautiful pendant light. by Mina Brinkey by Sean Litchfield Photography 5. Share Those Special Moments! This is indeed a perfect plan for young couples and parents who might want to spend Sunday morning in the quiet refuge of their bedroom. Instead of the reading nook, bring in a simple coffee table and a couple of chairs, or even a plush couch does the trick. You can even enjoy a simple breakfast here on your special day, and it truly is the ideal spot to catch up on the few beautiful, fleeting moments of life! by Stonewood 6. An Ergonomic Workstation Are you less of a romantic and more of a workaholic who loves to spend a few hours in front of the laptop at home? Then transform that distant bedroom corner into an efficient workstation. All this needs is a corner work table that can be mounted onto the wall and a chair. With ample vertical room on offer, there is absolutely no shortage of storage space here. by Laura Kirar Design 7. Versatile Bookshelves Remember those days when your parents insisted on flicking off the lights at a certain time, and hence you took to the torch and caught up on your favorite comic? Okay, maybe today’s young generation with its iPads, smartphones and iPods does not get it. If you would much rather catch up on a book while tucked in, then the idea of a corner bookshelf is indeed perfect. Obviously, the shelf also doubles as a lovely display, adding to the aesthetics of the room. by Dan Nelson, Designs Northwest Architects 8. Relax in Style! The plush daybed in the corner is another great idea, and it works even better if you have a great corner window. The combination is indeed tempting, and it looks even better on the cold winter day when you want to tuck in with your favorite beverage and the remote in your hand. Corner windows also bring in ample natural ventilation, giving your small bedroom an airy appeal. by Ryan Group Architects by GM Construction by Craig Denis 9. A Good Old Nightstand If your bed is already pretty close to the corner and there is very little space on offer, then usher in a cool nightstand to create a picture-perfect image. Adding twin nightstands on each side gives the room geometric visual balance and is a classic that never fails. Simple, timeless and practical, the trendy contemporary nightstand has undeniable appeal. by Greg Natale 10. A Hint of Greenery Decorating your bedroom is not just about decor and accessories alone, and if you already have pretty much everything you want, then just use a lovely potted plant to enliven the corner. Tropical-style bedrooms are currently all the rage, and a hint of natural goodness is the best way to give the room a dash of vitality and green goodness. by Laura U 11. The World Outside This is obviously the ideal case scenario, as not all of us are blessed with a bedroom that opens toward idyllic mountains or the sight of waves crashing onto the beach. But if you do have such a privileged vantage point, then take full advantage of it by adding glass walls and opening up the corner to the outside world. A view of the distant Golden Gate Bridge, the glittering New York City skyline or a classic view of sleepy Vienna is not a bad option either for the urban dweller! by Carney Logan Burke Architects 12. Use your Imagination! Yes, we did say 12 ideas, and no, we are not going bail out by just telling you to use your ingenuity. Wall murals, decals and lovely modern art illuminated by the right accent lighting are an easy way to spice up the bedroom corner. Maybe your bedroom could use a bathtub in the corner! Do think about what you really want and which of the above additions really brings value to the bedroom and your home as a whole. by James Thomas So, are you now convinced that the small corner in our bedroom does hold endless possibilities? Which of these additions is your preferred choice? Or do you have something completely different planed? Share your thoughts with your fellow Deco-heads… Liked the story? Share it with friends.
/*Sign in if user has already * been created*/ private void signIn(String email, String password) { Log.d(TAG, "signIn:" + email); showProgressDialog(); firebaseAuth.signInWithEmailAndPassword(email, password) .addOnCompleteListener(this, new OnCompleteListener<AuthResult>() { @Override public void onComplete(@NonNull Task<AuthResult> task) { if (task.isSuccessful()) { Log.d(TAG, "signInWithEmail:success"); FirebaseUser user = firebaseAuth.getCurrentUser(); TextView helloText = findViewById(R.id.hello); if (user!=null && user.isEmailVerified()) { findViewById(R.id.verifLayout).setVisibility(View.GONE); helloText.setVisibility(View.VISIBLE); helloText.setText(String.format("Hello %s", user.getDisplayName())); } else { findViewById(R.id.verifLayout).setVisibility(View.GONE); helloText.setVisibility(View.VISIBLE); helloText.setText(R.string.verif_req); } } else { Log.w(TAG, "signInWithEmail:failure", task.getException()); Toast.makeText(MainActivity.this, "Authentication failed.", Toast.LENGTH_SHORT).show(); } if (!task.isSuccessful()) { Toast.makeText(MainActivity.this, "Authentication failed.", Toast.LENGTH_SHORT).show(); } hideProgressDialog(); } }); }
/** * \brief Get info about channels of playback stream of a mixer simple element * \param elem Mixer simple element handle * \param channel Mixer simple element channel identifier * \return 0 if channel is not present, 1 if present */ int snd_mixer_selem_has_playback_channel(snd_mixer_elem_t *elem, snd_mixer_selem_channel_id_t channel) { CHECK_BASIC(elem); return sm_selem_ops(elem)->is(elem, SM_PLAY, SM_OPS_IS_CHANNEL, (int)channel); }
#pragma once #ifndef DRAKE_COMMON_SYMBOLIC_HEADER #error Do not directly include this file. Include "drake/common/symbolic.h". #endif #include <ostream> #include "drake/common/symbolic.h" namespace drake { namespace symbolic { /** * Represents symbolic rational function. A function f(x) is a rational * function, if f(x) = p(x) / q(x), where both p(x) and q(x) are polynomials of * x. Note that rational functions are closed under (+, -, x, /). One * application of rational function is in polynomial optimization, where we * represent (or approximate) functions using rational functions, and then * convert the constraint f(x) = h(x) (where h(x) is a polynomial) to a * polynomial constraint p(x) - q(x) * h(x) = 0, or convert the inequality * constraint f(x) >= h(x) as p(x) - q(x) * h(x) >= 0 if we know q(x) > 0. * * This class represents a special subset of the symbolic::Expression. While a * symbolic::Expression can represent a rational function, extracting the * numerator and denominator, generally, is quite difficult; for instance, from * p1(x) / q1(x) + p2(x) / q2(x) + ... + pn(x) / qn(x). This class's explicit * structure facilitates this decomposition. */ class RationalFunction { public: /** Constructs a zero rational function 0 / 1. */ RationalFunction(); DRAKE_DEFAULT_COPY_AND_MOVE_AND_ASSIGN(RationalFunction) /** * Constructs the rational function: numerator / denominator. * @param numerator The numerator of the fraction. * @param denominator The denominator of the fraction. * @pre denominator cannot be structurally equal to 0. * @pre None of the indeterminates in the numerator can be decision variables * in the denominator; similarly none of the indeterminates in the denominator * can be decision variables in the numerator. * @throws std::logic_error if the precondition is not satisfied. */ RationalFunction(Polynomial numerator, Polynomial denominator); /** * Constructs the rational function: p / 1. Note that we use 1 as the * denominator. * @param p The numerator of the rational function. */ explicit RationalFunction(const Polynomial& p); /** * Constructs the rational function: c / 1. Note that we use 1 as the * denominator. * @param c The numerator of the rational function. */ explicit RationalFunction(double c); ~RationalFunction() = default; /// Getter for the numerator. [[nodiscard]] const Polynomial& numerator() const { return numerator_; } /// Getter for the denominator. [[nodiscard]] const Polynomial& denominator() const { return denominator_; } RationalFunction& operator+=(const RationalFunction& f); RationalFunction& operator+=(const Polynomial& p); RationalFunction& operator+=(double c); RationalFunction& operator-=(const RationalFunction& f); RationalFunction& operator-=(const Polynomial& p); RationalFunction& operator-=(double c); RationalFunction& operator*=(const RationalFunction& f); RationalFunction& operator*=(const Polynomial& p); RationalFunction& operator*=(double c); RationalFunction& operator/=(const RationalFunction& f); RationalFunction& operator/=(const Polynomial& p); RationalFunction& operator/=(double c); /** * Unary minus operation for rational function. * if f(x) = p(x) / q(x), then -f(x) = (-p(x)) / q(x) */ friend RationalFunction operator-(RationalFunction f); /** * Returns true if this rational function and f are structurally equal. */ [[nodiscard]] bool EqualTo(const RationalFunction& f) const; /** * Returns a symbolic formula representing the condition where this rational * function and @p f are the same. * If f1 = p1 / q1, f2 = p2 / q2, then f1 == f2 <=> p1 * q2 == p2 * q1 */ Formula operator==(const RationalFunction& f) const; /** * Returns a symbolic formula representing the condition where this rational * function and @p f are not the same. */ Formula operator!=(const RationalFunction& f) const; friend std::ostream& operator<<(std::ostream&, const RationalFunction& f); private: // Throws std::logic_error if an indeterminate of the denominator (numerator, // respectively) is a decision variable of the numerator (denominator). void CheckIndeterminates() const; Polynomial numerator_; Polynomial denominator_; }; RationalFunction operator+(RationalFunction f1, const RationalFunction& f2); RationalFunction operator+(RationalFunction f, const Polynomial& p); RationalFunction operator+(const Polynomial& p, RationalFunction f); RationalFunction operator+(RationalFunction f, double c); RationalFunction operator+(double c, RationalFunction f); RationalFunction operator-(RationalFunction f1, const RationalFunction& f2); RationalFunction operator-(RationalFunction f, const Polynomial& p); RationalFunction operator-(const Polynomial& p, const RationalFunction& f); RationalFunction operator-(RationalFunction f, double c); RationalFunction operator-(double c, RationalFunction f); RationalFunction operator*(RationalFunction f1, const RationalFunction& f2); RationalFunction operator*(RationalFunction f, const Polynomial& p); RationalFunction operator*(const Polynomial& p, RationalFunction f); RationalFunction operator*(RationalFunction f, double c); RationalFunction operator*(double c, RationalFunction f); RationalFunction operator/(RationalFunction f1, const RationalFunction& f2); RationalFunction operator/(RationalFunction f, const Polynomial& p); RationalFunction operator/(const Polynomial& p, const RationalFunction& f); RationalFunction operator/(RationalFunction f, double c); RationalFunction operator/(double c, const RationalFunction& f); /** * Returns the rational function @p f raised to @p n. * If n is positive, (f/g)ⁿ = fⁿ / gⁿ; * If n is negative, (f/g)ⁿ = g⁻ⁿ / f⁻ⁿ; * (f/g)⁰ = 1 / 1. */ RationalFunction pow(const RationalFunction& f, int n); /** * Provides the following operations: * * - Matrix<RF> * Matrix<Polynomial> => Matrix<RF> * - Matrix<RF> * Matrix<double> => Matrix<RF> * - Matrix<Polynomial> * Matrix<RF> => Matrix<RF> * - Matrix<double> * Matrix<RF> => Matrix<RF> * * where RF is a shorthand for RationalFunction. * * @note that these operator overloadings are necessary even after providing * Eigen::ScalarBinaryOpTraits. See * https://stackoverflow.com/questions/41494288/mixing-scalar-types-in-eigen * for more information */ #if defined(DRAKE_DOXYGEN_CXX) template <typename MatrixL, typename MatrixR> Eigen::Matrix<RationalFunction, MatrixL::RowsAtCompileTime, MatrixR::ColsAtCompileTime> operator*(const MatrixL& lhs, const MatrixR& rhs); #else template <typename MatrixL, typename MatrixR> typename std::enable_if< std::is_base_of<Eigen::MatrixBase<MatrixL>, MatrixL>::value && std::is_base_of<Eigen::MatrixBase<MatrixR>, MatrixR>::value && ((std::is_same<typename MatrixL::Scalar, RationalFunction>::value && (std::is_same<typename MatrixR::Scalar, Polynomial>::value || std::is_same<typename MatrixR::Scalar, double>::value)) || (std::is_same<typename MatrixR::Scalar, RationalFunction>::value && (std::is_same<typename MatrixL::Scalar, Polynomial>::value || std::is_same<typename MatrixL::Scalar, double>::value))), Eigen::Matrix<RationalFunction, MatrixL::RowsAtCompileTime, MatrixR::ColsAtCompileTime>>::type operator*(const MatrixL& lhs, const MatrixR& rhs) { return lhs.template cast<RationalFunction>() * rhs.template cast<RationalFunction>(); } #endif } // namespace symbolic } // namespace drake #if !defined(DRAKE_DOXYGEN_CXX) namespace Eigen { // Defines Eigen traits needed for Matrix<drake::symbolic::RationalFunction>. template <> struct NumTraits<drake::symbolic::RationalFunction> : GenericNumTraits<drake::symbolic::RationalFunction> { static inline int digits10() { return 0; } }; // Informs Eigen that BinaryOp(LhsType, RhsType) gets ResultType. #define DRAKE_SYMBOLIC_SCALAR_BINARY_OP_TRAITS(LhsType, RhsType, BinaryOp, \ ResultType) \ template <> \ struct ScalarBinaryOpTraits<LhsType, RhsType, BinaryOp<LhsType, RhsType>> { \ enum { Defined = 1 }; \ typedef ResultType ReturnType; \ }; // Informs Eigen that LhsType op RhsType gets ResultType // where op ∈ {+, -, *, /, conj_product}. #define DRAKE_SYMBOLIC_SCALAR_SUM_DIFF_PRODUCT_CONJ_PRODUCT_TRAITS( \ LhsType, RhsType, ResultType) \ DRAKE_SYMBOLIC_SCALAR_BINARY_OP_TRAITS(LhsType, RhsType, \ internal::scalar_sum_op, ResultType) \ DRAKE_SYMBOLIC_SCALAR_BINARY_OP_TRAITS( \ LhsType, RhsType, internal::scalar_difference_op, ResultType) \ DRAKE_SYMBOLIC_SCALAR_BINARY_OP_TRAITS( \ LhsType, RhsType, internal::scalar_product_op, ResultType) \ DRAKE_SYMBOLIC_SCALAR_BINARY_OP_TRAITS( \ LhsType, RhsType, internal::scalar_conj_product_op, ResultType) // Informs Eigen that RationalFunction op Polynomial gets RationalFunction // where op ∈ {+, -, *, conj_product}. DRAKE_SYMBOLIC_SCALAR_SUM_DIFF_PRODUCT_CONJ_PRODUCT_TRAITS( drake::symbolic::RationalFunction, drake::symbolic::Polynomial, drake::symbolic::RationalFunction) // Informs Eigen that Polynomial op RationalFunction gets RationalFunction // where op ∈ {+, -, *, conj_product}. DRAKE_SYMBOLIC_SCALAR_SUM_DIFF_PRODUCT_CONJ_PRODUCT_TRAITS( drake::symbolic::Polynomial, drake::symbolic::RationalFunction, drake::symbolic::RationalFunction) // Informs Eigen that double op RationalFunction gets RationalFunction // where op ∈ {+, -, *, conj_product}. DRAKE_SYMBOLIC_SCALAR_SUM_DIFF_PRODUCT_CONJ_PRODUCT_TRAITS( double, drake::symbolic::RationalFunction, drake::symbolic::RationalFunction) // Informs Eigen that RationalFunction op double gets RationalFunction // where op ∈ {+, -, *, conj_product}. DRAKE_SYMBOLIC_SCALAR_SUM_DIFF_PRODUCT_CONJ_PRODUCT_TRAITS( drake::symbolic::RationalFunction, double, drake::symbolic::RationalFunction) #undef DRAKE_SYMBOLIC_SCALAR_BINARY_OP_TRAITS #undef DRAKE_SYMBOLIC_SCALAR_SUM_DIFF_PRODUCT_CONJ_PRODUCT_TRAITS } // namespace Eigen #endif // !defined(DRAKE_DOXYGEN_CXX)
The terrorist organization Boko Haram made news this week following their kidnapping of over 200 Nigerian schoolgirls. The kidnappings occurred weeks ago, but outcry from the families finally hit the international stage. Boko Haram is an Islamist group opposed to Western society. They attack schools in particular as a sign of their contempt for the West. Their most recent attack involved kidnapping the schoolgirls and threatening to force the girls to marry or selling them into slavery for as little as $12. This sparked international outrage once the media caught wind of it. There are numerous social media campaigns supporting the girls and their families. President Obama has pledged to give support to the Nigerian government, including sending in troops. All of that is perfectly fine, and also perfectly ironic. Why? Primarily because the international concern is only for girls. Boko Haram does not just attack girls. The group has also attacked boys, most recently setting fire to a school after locking the boys in: Islamic militants set fire to a locked dormitory at a school in northern Nigeria, then shot and slit the throats of students who tried to escape through windows during a pre-dawn attack Tuesday. At least 58 students were killed, including many who were burned alive. They “slaughtered them like sheep” with machetes, and gunned down those who ran away, said one teacher, Adamu Garba. Soldiers guarding a checkpoint near the coed government school were mysteriously withdrawn hours before it was targeted by the militants, said the spokesman for the governor of northeastern Yobe state. Boko Haram has a history of targeting schools, and generally does not discriminate by sex. However, the media does, and apparently murdered boys does not raise people’s concern as much as kidnapped girls. That is not to say that nothing should be done about the girls. We cannot allow a terrorist organization to kidnap anyone and threaten to sell them into slavery. As an international community we ought to move against them, and it should not have taken as long as it has for the Nigerian government to realize they could not manage the situation on their own. That said, we cannot allow a terrorist organization to murder people either. We cannot give them a pass because they killed boys. Our indifference to male victimization likely emboldens Boko Haram. If they can murder boys in their sleep and receive no outcry, why can they not kidnap 200 girls? Boko Haram is another example of how we as a community turn a blind eye to violence until it is done to the “wrong” group. The same situation happens in Afghanistan. We rail against any violence against women and girls in that country, but pay no mind to the countless boys kidnapped, raped, and sold by Afghan warlords funded by Coalition dollars. We should be outraged by those acts of violence, but we are not. No one writes petitions, no creates hashtags, no one sends troops. As long as the victims are boys, no one cares. In that sense the girls are lucky. Had they been boys, we would still be talking about the White House Correspondence dinner.
import {Component, ViewChild, EventEmitter, Output} from 'angular2/core'; import {NgForm} from 'angular2/common'; import {MODAL_DIRECTIVES, ModalComponent} from 'ng2-bs3-modal/ng2-bs3-modal'; import {Project} from './project'; @Component({ selector: 'project-creator', directives: [MODAL_DIRECTIVES], templateUrl: '/app/projects/project-editor.component.html' }) export class ProjectCreatorModalComponent { @ViewChild('modal') modal: ModalComponent; @Output() saveRequest:EventEmitter<Project> = new EventEmitter<Project>(); statuses: string[] = ['Not Started', 'In Progress', 'Complete']; model = new Project(-1, '', this.statuses[0]); saveAction: string = 'Create'; title: string = 'Create New Project'; open() { this.modal.open(); } closed() { console.log('closed'); this.saveRequest.emit(this.model); } }
// LineIntersect finds the intersection of the lines (infinite) through p,q and a,b, // or returns false if they are parallel. func LineIntersect(p, q, a, b F2) (F2, bool) { dx1, dx2, dy1, dy2 := p.X-q.X, a.X-b.X, p.Y-q.Y, a.Y-b.Y det := dx2*dy1 - dx1*dy2 if -Epsilon < det && det < Epsilon { return F2{}, false } pq := (q.X*p.Y - p.X*q.Y) ab := (b.X*a.Y - a.X*b.Y) return F2{pq*dx2 - ab*dx1, pq*dy2 - ab*dy1}.Div(det), true }
use proconio::input; fn main() { input! { n: usize, a: [usize; n] }; println!("{}", a.iter().step_by(2).fold(0, |x, y| x + y%2)); }
<reponame>TimJSwan89/Java-Programs import java.util.concurrent.*; import java.util.*; class Parallel2048 { int poolSize = 2; int maxPoolSize = 2; long keepAliveTime = 10; ThreadPoolExecutor threadPool = null; final ArrayBlockingQueue<Runnable> queue = new ArrayBlockingQueue<Runnable>( 5); public Parallel2048() { threadPool = new ThreadPoolExecutor(poolSize, maxPoolSize, keepAliveTime, TimeUnit.SECONDS, queue); } public void runTask(Runnable task) { // System.out.println("Task count.."+threadPool.getTaskCount() ); // System.out.println("Queue Size before assigning the // task.."+queue.size() ); threadPool.execute(task); // System.out.println("Queue Size after assigning the // task.."+queue.size() ); // System.out.println("Pool Size after assigning the // task.."+threadPool.getActiveCount() ); // System.out.println("Task count.."+threadPool.getTaskCount() ); System.out.println("Task count.." + queue.size()); } public void shutDown() { threadPool.shutdown(); } public static void main(String args[]) { Parallel2048 mtpe = new Parallel2048(); // start first one mtpe.runTask(new Runnable() { public void run() { for (int i = 0; i < 10; i++) { try { System.out.println("First Task"); Thread.sleep(1000); } catch (InterruptedException ie) { } } } }); // start second one /* * try{ Thread.sleep(500); }catch(InterruptedException * ie){} */ mtpe.runTask(new Runnable() { public void run() { for (int i = 0; i < 10; i++) { try { System.out.println("Second Task"); Thread.sleep(1000); } catch (InterruptedException ie) { } } } }); // start third one /* * try{ Thread.sleep(500); }catch(InterruptedException * ie){} */ mtpe.runTask(new Runnable() { public void run() { for (int i = 0; i < 10; i++) { try { System.out.println("Third Task"); Thread.sleep(1000); } catch (InterruptedException ie) { } } } }); // start fourth one /* * try{ Thread.sleep(500); }catch(InterruptedException * ie){} */ mtpe.runTask(new Runnable() { public void run() { for (int i = 0; i < 10; i++) { try { System.out.println("Fourth Task"); Thread.sleep(1000); } catch (InterruptedException ie) { } } } }); // start fifth one /* * try{ Thread.sleep(500); }catch(InterruptedException * ie){} */ mtpe.runTask(new Runnable() { public void run() { for (int i = 0; i < 10; i++) { try { System.out.println("Fifth Task"); Thread.sleep(1000); } catch (InterruptedException ie) { } } } }); // start Sixth one /* * try{ Thread.sleep(500); }catch(InterruptedException * ie){} */ mtpe.runTask(new Runnable() { public void run() { for (int i = 0; i < 10; i++) { try { System.out.println("Sixth Task"); Thread.sleep(1000); } catch (InterruptedException ie) { } } } }); mtpe.shutDown(); } }
International experts want a fully tested and licensed Ebola vaccine scaled up for mass use in the near future, according to a recent World Health Organization (WHO) meeting. WHO organized a panel of more than 70 experts, from scientists to medical ethicists, to reach consensus over the status of Ebola vaccines currently being tested. WHO released news from the meeting on Wednesday, the day after the U.S. confirmed its first patient with Ebola. According to the WHO statement, the mission is to “accomplish, within a matter of months, work that normally takes from two to four years, without compromising international standards for safety and efficacy.” Two vaccines have great potential and are ready for safety testing. The first vaccine is developed by the U.S. National Institute of Allergy and Infectious Diseases (NIAID) and GlaxoSmithKline. That vaccine is currently undergoing a human-safety trial at the National Institutes of Health (NIH) campus in Bethesda, Md., as well as at the University of Oxford. The second vaccine is under development by the Public Health Agency of Canada in Winnepeg. That vaccine will start a human-safety trial in early October. Canada has already donated 800 vials of their vaccine to WHO, the organization says. Once more data is available on what dosing should be used, WHO says these vials could translate to around 1,500 to 2,000 doses of the vaccine. The goal of the safety trials is to confirm that the vaccines are safe enough to move on to a larger human trial. Dr. Anthony Fauci, director of NIAID and the lead on the NIH vaccine, tells TIME the safety trial is so far “uneventful,” which is a good thing. “There really [are] no red flags so it seems to be going along quite well,” he says. The vaccine had already been tested in monkeys and showed very promising results. WHO and other organizations have been expediting the testing and approval processes for these drugs since early summer, but the NIH’s vaccine has been under development since 2003. At the time, it did not have the pharmaceutical funding to move forward. “[In 2003] there was very little interest for the obvious reasons that there was no disease around,” says Fauci. “Recently, we now have a much more vigorous interest from pharmaceutical companies.” WHO hopes that in October and November, the vaccines will make it through their safety trials and into next-stage human testing. Between January and February 2015, the goal is to have next-phase human trials approved and initiated in countries affected with Ebola. People at a higher risk for the disease, like health care workers, are a priority. The meeting did not highlight ZMapp, the drug given to two American patients who were evacuated from Liberia to Emory University in Atlanta. Mapp Biopharmaceutical, the company that produces ZMapp, is a small team that says its resources are now exhausted. Their drug is grown in tobacco plants and requires waiting for a crop in order to produce more of it. One of the ways trials could be quickened is if the researchers take a “wedge” approach, which means that a wedge or slice of the study population is selected for a first step in the trial, and what is learned in that step is then used on the next slice of the participants. While trials are ongoing, there are still significant technical obstacles that need to be addressed once a vaccine is ready for mass use: how vaccines will be distributed, for instance, and how low-resource health systems can ensure that vaccines are stored below –100 degrees. In the WHO meeting, the phrase “Nothing can be allowed to delay this work” was repeated multiple times, and since Ebola has now infected more than 7,000 people and even made it to the U.S., the race to develop an effective vaccine is becoming all the more frantic. Contact us at [email protected].
<reponame>cojomojo/opengl_skybox_game //ACKNOWLEDGEMENT: This is Example 7.7 discussed in the OpenGL Programming Guide (8th edition) on //page 376 with some tweaks to match better with the Phong Reflectance model we discussed in class. //I am also requiring that the programmer use the w value of position to indicate whether the light //is a point light (w = 1) or a directional (w = 0). #ifndef LIGHTPROPERTIES_H #define LIGHTPROPERTIES_H #include <glm.hpp> using namespace glm; struct LightProperties { vec4 color; //color of light vec4 position; //location of light if w = 1, //otherwise the direction towards the light //spotlight attributes (can only use if light is local, i.e. position.w = 1) vec4 spotLightValues; //if spotLightValues.x > 0 then it is a spotLight //spotLightValues.y is spotCosCuttoff //spotLightValues.z is spotExponent vec4 spotConeDirection; //direction that the spotlight is shinining //local light attenuation coefficients (position.w must be 1 to use these) GLfloat constantAttenuation; GLfloat linearAttenuation; GLfloat quadraticAttenuation; GLuint isEnabled; }; #endif
<reponame>the-art-of-dev/tortue-js export { Component } from "./component"; export { ComponentBuilder } from "./componentBuilder"; export { ComponentRegistryRenderer, ComponentRegisterRendererJSDOM, } from "./componentRegistryRenderer"; export { ComponentRegistry, MapComponentRegistry } from "./componentRegistry";
<filename>examples/maze.py #!/usr/bin/env python # # Maze generator example for RPi-SSD1306 # # Adapted from: # https://github.com/rm-hull/maze/blob/master/src/maze/generator.clj import time from demo_opts import device from oled.render import canvas from random import randrange NORTH = 1 WEST = 2 class Maze(object): def __init__(self, size): self.width = int(size[0]) self.height = int(size[1]) self.size = self.width * self.height self.generate() def offset(self, coords): """ Converts [x,y] co-ords into an offset in the maze data """ return ((coords[1] % self.height) * self.width) + (coords[0] % self.width) def coords(self, offset): """ Converts offset to [x,y] co-ords """ return (offset % self.width, offset / self.width) def neighbours(self, pos): neighbours = [] if pos > self.width: neighbours.append(pos - self.width) if pos % self.width > 0: neighbours.append(pos - 1) if pos % self.width < self.width - 1: neighbours.append(pos + 1) if pos + self.width < self.size: neighbours.append(pos + self.width) return neighbours def is_wall_between(self, p1, p2): """ Checks to see if there is a wall between two (adjacent) points in the maze. The return value will indicate true if there is a wall else false. If the points aren't adjacent, false is returned. """ if p1 > p2: return self.is_wall_between(p2, p1) if p2 - p1 == self.width: return self.data[p2] & NORTH != 0 if p2 - p1 == 1: return self.data[p2] & WEST != 0 return False def knockdown_wall(self, p1, p2): """ Knocks down the wall between the two given points in the maze. Assumes that they are adjacent, otherwise it doesn't make any sense (and wont actually make any difference anyway) """ if p1 > p2: return self.knockdown_wall(p2, p1) if p2 - p1 == self.width: self.data[p2] &= WEST if p2 - p1 == 1: self.data[p2] &= NORTH def generate(self): self.data = [NORTH | WEST] * self.size visited = {0: True} stack = [0] not_visited = lambda x: not visited.get(x, False) while len(stack) > 0: curr = stack[-1] n = list(filter(not_visited, self.neighbours(curr))) sz = len(n) if sz == 0: stack.pop() else: np = n[randrange(sz)] self.knockdown_wall(curr, np) visited[np] = True if sz == 1: stack.pop() stack.append(np) def render(self, draw, scale=lambda a: a): for i in range(self.size): line = [] p1 = self.coords(i) if self.data[i] & NORTH > 0: p2 = (p1[0] + 1, p1[1]) line += p2 + p1 if self.data[i] & WEST > 0: p3 = (p1[0], p1[1] + 1) line += p1 + p3 draw.line(list(map(scale, line)), fill=1) draw.rectangle(list(map(scale, [0, 0, self.width, self.height])), outline=1) def to_string(self): s = "" for y in range(self.height): for x in range(self.width): s += "+" if self.data[self.offset(x, y)] & NORTH != 0: s += "---" else: s += " " s += "+\n" for x in range(self.width): if self.data[self.offset(x, y)] & WEST != 0: s += "|" else: s += " " s += " " s += "|\n" s += "+---" * self.width s += "+\n" return s def demo(iterations): screen = (128, 64) for loop in range(iterations): for scale in [2, 3, 4, 3]: sz = list(map(lambda z: z / scale - 1, screen)) with canvas(device) as draw: Maze(sz).render(draw, lambda z: int(z * scale)) time.sleep(1) if __name__ == "__main__": demo(20)
// Funcion que agrega una carta a la baraja especificada // de este modo solo se puede usar la misma funcion para // ingresar cartas a distintas barajas void agrega_carta(Baraja *baraja1, Carta a_carta) { baraja1->puntaje_total += a_carta.puntaje; baraja1->cartas[baraja1->index] = a_carta; baraja1->index++; }
/** * Convenience for {@link #remoteMapJournal(String, ClientConfig, * DistributedPredicate, DistributedFunction, JournalInitialPosition)} * which will pass only {@link EntryEventType#ADDED ADDED} * and {@link EntryEventType#UPDATED UPDATED} events and will * project the event's key and new value into a {@code Map.Entry}. */ @Nonnull public static <K, V> StreamSource<Entry<K, V>> remoteMapJournal( @Nonnull String mapName, @Nonnull ClientConfig clientConfig, @Nonnull JournalInitialPosition initialPos ) { return remoteMapJournal(mapName, clientConfig, mapPutEvents(), mapEventToEntry(), initialPos); }
m,n = map(int,input().split()) lister = input().split() count = 0 key = int(lister[n-1]) for x in lister: if int(x) >0 and int(x) >= key: count = count+1 print(count)
def scrape_reports(options): REPORTS_URL = 'http://www.gao.gov/browse/date/custom?adv_begin_date=01/01/' +\ '%s&adv_end_date=12/31/%s&rows=50&o=%s' archive = 1970 year_range = inspector.year_range(options, archive) for year in year_range: is_next_page = True offset = 0 while is_next_page: doc = utils.beautifulsoup_from_url( REPORTS_URL % (year, year, offset)) results = doc.select("div.listing") for result in results: report = process_report(result, year_range) if report: inspector.save_report(report) page_links = doc.select("a.non-current_page") if len(page_links) and page_links[-1].text.startswith('Next'): offset += 50 else: is_next_page = False
def tetrahedron(draw, r, im, theta = np.pi/12, shift = np.array([1000,1000,0]), scale = 150, rgb = (216,52,52), ind = 0): im_sun = Image.open('C:\\Users\\rohit\\Documents\\GitHub\\base\\numerical\\python\\visualization\\Animation\\Images\\Misc\\Sun' + str(ind%2) + '.jpg') im_sun.thumbnail((150,150), Image.ANTIALIAS) tet_orig = np.array([ [1,1,1], [-1,-1,1], [1,-1,-1], [-1,1,-1] ]) rgba = rgb + (100,) tet = np.dot(tet_orig,r) for i in tet: ver = i * scale + shift draw.ellipse((ver[0]-5,ver[1]-5,ver[0]+5,ver[1]+5), fill = rgb) bulb = np.array([-2.5,0,0,]) * scale + shift[:3] draw.ellipse((bulb[0]-5,bulb[1]-5,bulb[0]+5,bulb[1]+5), fill = rgb) pasteImage(im_sun, im, bulb-np.array([50,50,0])) for i in range(len(tet)): for k in range(i,len(tet)): ver1 = tet[i] * scale + shift ver2 = tet[k] * scale + shift draw.line((ver1[0],ver1[1],ver2[0],ver2[1]), fill = rgb, width = 5) ver1prime = project_on_plane(bulb, ver1) ver2prime = project_on_plane(bulb, ver2) draw.line((ver1prime[0],ver1prime[1],ver2prime[0],ver2prime[1]), fill = rgb, width = 2) draw_plane(draw, scale)
Comparative Yield of Different Diagnostic Tests for Tuberculosis among People Living with HIV in Western Kenya Background Diagnosis followed by effective treatment of tuberculosis (TB) reduces transmission and saves lives in persons living with HIV (PLHIV). Sputum smear microscopy is widely used for diagnosis, despite limited sensitivity in PLHIV. Evidence is needed to determine the optimal diagnostic approach for these patients. Methods From May 2011 through June 2012, we recruited PLHIV from 15 HIV treatment centers in western Kenya. We collected up to three sputum specimens for Ziehl-Neelsen (ZN) and fluorescence microscopy (FM), GeneXpert MTB/RIF (Xpert), and culture, regardless of symptoms. We calculated the incremental yield of each test, stratifying results by CD4 cell count and specimen type; data were analyzed to account for complex sampling. Results From 778 enrolled patients, we identified 88 (11.3%) laboratory-confirmed TB cases. Of the 74 cases who submitted 2 specimens for microscopy and Xpert testing, ZN microscopy identified 25 (33.6%); Xpert identified those plus an additional 18 (incremental yield = 24.4%). Xpert testing of spot specimens identified 48 (57.0%) of 84 cases; whereas Xpert testing of morning specimens identified 50 (66.0%) of 76 cases. Two Xpert tests detected 22/24 (92.0%) TB cases with CD4 counts <100 cells/μL and 30/45 (67.0%) of cases with CD4 counts ≥100 cells/μl. Conclusions In PLHIV, Xpert substantially increased diagnostic yield compared to smear microscopy and had the highest yield when used to test morning specimens and specimens from PLHIV with CD4 count <100 cells/μL. TB programs unable to replace smear microscopy with Xpert for all symptomatic PLHIV should consider targeted replacement and using morning specimens. Introduction Tuberculosis (TB) is the leading cause of death for people living with HIV (PLHIV) worldwide, responsible for an estimated 360,000 deaths in 2013. Active screening for TB among PLHIV has been shown to reduce mortality and morbidity, but laboratory confirmation of TB remains challenging in PLHIV. In resource-limited settings, detection of acid-fast bacilli (AFB) using sputum smear microscopy is still the most widely used diagnostic test for pulmonary TB. As a diagnostic test, however, it does not perform well among PLHIV, with 30%-50% sensitivity compared to liquid culture in research settings and as low as 9% sensitivity in operational settings. Sputum culture is often considered the standard for laboratory confirmation of TB, but requires resources and technical skill not routinely available in many settings. More recently, an automated polymerase chain reaction platform, the Cepheid Xpert MTB/RIF assay (Cepheid, Sunnyvale CA), was developed for rapid diagnosis of TB. In meta-analysis, the pooled sensitivity of this assay for culture-confirmed TB was 61% in PLHIV with sputum smear-negative TB and 97% in PLHIV with sputum smear-positive TB. The World Health Organization (WHO) recommends Xpert MTB/RIF as the initial diagnostic test among PLHIV. Many countries have expanded implementation and use of Xpert MTB/RIF, especially for PLHIV; however, financial and other logistical considerations, as well as inconsistent utilization, have slowed expansion of this technology. As countries continue to improve strategies for TB case-finding among PLHIV, it would be helpful to know the programmatic utility of sputum microscopy in settings where Xpert MTB/ RIF is available, the incremental value of each Xpert MTB/RIF test performed, and the role of sputum culture in programs using Xpert MTB/RIF. Perhaps more importantly, programs require a better understanding of how patient and specimen characteristics influence the yield of these tests, so that they might maximize their utility while minimizing financial and human resource requirements. We conducted a study to characterize the value of various diagnostic tests, including the incremental yield of Xpert MTB/RIF and culture above smear microscopy on sputum, stool, and lymph node aspirate (LNA) specimens collected from PLHIV and stratified findings by specimen type (morning or spot) and CD4 cell count. Study design and participants For enrollment, we stratified all 24 public HIV care and treatment facilities in three districts in western Kenya with at least 200 enrolled patients into small (200-1000 patients, n = 14) and large (>1000 patients; n = 10) clinics. The number of sites selected from each stratum was proportional to the size of the stratum and we randomly selected nine small and six large facilities. Between May 2011 and June 2012, we enrolled consecutive, consenting patients over a ten week period at each clinic. Patients were eligible for the study if they were seven years of age or older, had documented HIV infection, had not been enrolled in any HIV care or treatment program in the preceding two years, and had not received TB treatment at any time in the preceding one year. Clinical care and specimen collection We recorded demographic and clinical information at the initial encounter, screening participants for TB symptoms using a standardized questionnaire. All participants were asked to provide one morning and two spot sputum specimens (hereafter referred to as "spot 1" and "spot 2"). Participants from the three largest facilities were also asked to provide a single stool specimen. We asked trained clinicians at those three facilities, plus one additional facility with sufficient capacity, to aspirate subcutaneous lymph nodes in the head or neck region that were greater than one centimeter in diameter. Laboratory procedures Specimens were transported to the Kenya Medical Research Institute (KEMRI)/CDC TB reference laboratory in Kisumu for direct Ziehl-Neelsen (ZN) and concentrated fluorescence microscopy (FM), Xpert MTB/RIF testing and mycobacterial culture. Testing algorithms for each specimen are presented in Fig 1. Specimen Processing. We processed spot 1 and morning sputum specimens for culture using standard methods including decontamination with N-acetyl-L-cysteine and sodium hydroxide sodium citrate (1.5% final concentration), followed by centrifugation and resuspension in phosphate buffer (pH 6.8). Because it was not cultured, we did not process spot 2 specimens prior to testing. We processed stool specimens by emulsifying one gram in 10 ml of sterile water, vortexing with sterile glass beads and then filtering through sterile gauze. Lymph node aspirates (LNAs) were not processed prior to culture inoculation. Smear Microscopy. We stained direct smears prepared from the morning and spot 2 sputum specimens according to the ZN method, viewed them under 1000x magnification, and graded them according to World Health Organization (WHO) recommendations. We also prepared direct smears from LNAs with volumes >0.5 ml. We stained LNA smears and the concentrated smears prepared from processed specimen pellets (spot 1, morning sputum, and stool), using FM methods, viewed them under 400x magnification, and graded them according to WHO recommendations. All positive smears were confirmed by a second reader. Xpert 1 MTB/RIF. We performed Xpert MTB/RIF testing on all spot 2 sputum specimens and on morning sputum specimens with volumes 3.0 ml according to manufacturer's recommendations. Briefly, we mixed sputum specimens with a 2:1 ratio of sample reagent and incubated at room temperature for 15 minutes prior to Xpert MTB/RIF testing. Liquid culture. We cultured stool, LNA, and sputa from spot 1 and morning specimens with the BACTEC Mycobacteria Growth Indicator Tube (MGIT) 960 system using conventional methods. We reported culture tubes with no growth after 42 days incubation in the MGIT 960 instrument as culture-negative. We inspected cultures flagged as positive for the presence of AFB by ZN smear microscopy and sub-cultured them to blood agar plates to assess contamination. We discarded MGIT-positive, AFB-negative cultures with evidence of contamination. We re-incubated those without contamination in an auxiliary incubator for a total incubation time of 42 days, periodically re-examining them by ZN smear microscopy and blood agar cultures. We referred all MGIT-positive, AFB-positive cultures for identification of mycobacterial species. To salvage contaminated isolates, we redecontaminated MGIT-positive, AFB-positive cultures and re-inoculated them into MGIT culture media. Identification. We identified AFB-positive cultures as Mycobacterium tuberculosis complex (MTBC) using the Capilia TB Neo (Tauns Laboratories, Numazu, Japan) or MGIT TBc ID (Becton Dickinson, Sparks, MD) immunochromatographic assays. We tested all culture isolates that were AFB-positive but negative on the immunochromatographic assay for nontuberculous mycobacteria (NTM) using the Genotype CM line probe assay (Hain Lifescience, Nehren, Germany). Definitions and Data Analysis We regarded MGIT cultures flagged negative by the instrument at 42 days and culture isolates identified to have NTM (but not MTBC) as negative for MTBC. We regarded AFB-negative cultures with evidence of bacterial or fungal contamination as contaminated and not evaluable. We defined a TB case as any participant with laboratory-confirmed MTBC by at least one liquid culture test from any specimen or at least one Xpert MTB/RIF test if they had no previous history of TB treatment. We considered patients who had at least two sputum specimens negative for MTBC by Xpert MTB/RIF or liquid culture, and no positive result from any specimen, not to have TB. We excluded from analysis participants whose TB status could not be determined (i.e., those who did not have laboratory-confirmed MTBC and who did not have at least two specimens negative for MTBC). In comparisons with morning sputum, we regarded both spot sputum specimens as the same. For a given series of tests, we defined the incremental yield of each test in the series as the number and proportion of TB cases that were diagnosed using that test that were not diagnosed by the previous test(s) in the series, divided by the total number of TB cases that received all tests in the series. For calculations of incremental yield, we restricted analyses to cases that had all tests in the series and presupposed that patients would have a spot specimen collected before the morning specimen. Data were captured electronically at the 15 clinical sites and in the laboratory and were analyzed using SAS version 9.3 (SAS Institute, Cary NC). We performed all calculations, including all proportions, as domain analyses, controlling for the complex design of the survey (i.e., clustering, stratification, weighting). Frequencies are presented as crude numbers, but proportions are based on weighted frequencies to account for the size of the clinic from which the patients were enrolled. Analyses incorporated the use of a finite population correction factor to account for the large sampling fraction. Chi-squared tests incorporated a Rao-Scott second order correction to account for the survey design. Ethical Considerations All aspects of this study were approved by the Ethics Review Committee (ERC) of KEMRI (protocol number 1842) and by Institutional Review Board (IRB) G of the Human Resources Protection Office at the U.S. CDC (protocol number 5928). We requested a waiver of consent for testing of sputum specimens because: 1) the data and specimen collection, and corresponding test procedures, were not experimental (they were already recommended and used by the Kenya TB program); 2) the study activities posed no more than minimal risk to study participants; 3) participation did not adversely affect the welfare or rights of the patients in any way; and 4) to require formal written consent would have imposed an undue burden on the clinical staff of these busy clinics. Written informed consent was obtained for collection and testing of stool and lymph specimens, as these procedures were not routine or recommended at the time this study was conducted. The study protocols, including waiver of consent as specified above, were approved by the KEMRI ERC and CDC IRB with this methodology clearly described. Results Final enrollment included 778 participants, from whom we collected 760 spot 1 sputa, 711 morning sputa, 738 spot 2 sputa, 283 stool and 17 LNA specimens; 692 participants submitted all three sputa, 61 submitted two, 11 submitted one and 14 submitted no specimens. Among the morning specimens collected, 49 had insufficient volume for Xpert MTB/RIF testing. The TB status of 38 participants could not be determined because of multiple contaminated or missing specimens, and we excluded them from further analyses. Table 1 displays demographic and clinical characteristics of participants. MTBC was confirmed by liquid culture or Xpert MTB/RIF in specimens from 88 (11.3%; 95% confidence interval (CI): 10.0-12.6) participants, hereafter referred to as patients with TB; 85 had MTBC identified in sputum specimens, two had MTBC identified by stool culture only and one had MTBC identified by culture of LNA only. Thirty-three patients with TB had stool specimens cultured, 11 (33.3%; 95% CI: 9.5-57.1) of which were positive for MTBC; five patients with TB had LNA specimens cultured, all of which were positive. There were seven patients who were diagnosed by Xpert MTB/RIF alone, two of whom had contamination on both cultured sputum specimens, one of whom had one contaminated and one negative cultured sputum specimen, and four of whom had two negative cultured sputum specimens. One of them had a positive microscopy test, five were symptomatic and none had been previously diagnosed or treated for TB at any time in the past. Comparison of morning and spot sputum specimens among patients with TB Among patients with laboratory-confirmed TB, morning specimens appeared to be more sensitive than spot specimens for identification of MTBC by ZN microscopy, FM, and Xpert MTB/ RIF (Table 2). Conversely, in liquid culture 69.2% of the morning specimens and 74.6% of spot specimens grew MTBC, while 10.7% and 7.1%, respectively, were contaminated ( Table 2). Diagnostic Yield Calculations of incremental yield require that all TB cases received all tests in the series; because patients with TB did not all receive the same tests, the cohorts from which the incremental yields were calculated differ in size. Fig 2 displays Lymph node aspirate n/a n/a n/a n/a n/a n/a 0 5 (100, 100-100) Diagnostic yield of liquid culture among patients with TB. Of the 84 patients with TB who provided one spot and one morning sputum specimen for liquid culture, 62 (73.7%; 95% CI: 68.5-78.8) were identified by culture of the spot specimen and 58 (68.9%; 95% CI: 61.1-76.7) were identified by culture of the morning specimen; 46 (54.6%; 95% CI: 47.0-62.1) were identified by both tests. Twelve (14.3%; 95% CI: 9.9-18.7) additional cases were diagnosed by culturing the morning specimen after the spot specimen. Of the 10 (12.0%; 95% CI: 8.6-15.4) patients not diagnosed by culture of two specimens, seven were identified by Xpert MTB/RIF testing of sputum, four of whom had two negative cultures and three of whom had one or two contaminated cultures. The three remaining patients had two negative sputum cultures and were identified by culture of an extra-pulmonary specimen: two patients had a positive stool culture and one patient had a positive LNA culture ( Table 2). Comparison of ZN microscopy and FM, liquid culture, and Xpert MTB/ RIF performed on morning specimen among patients with TB Among the 76 patients with TB who had a morning sputum specimen tested by each method, 24 (31.5%; 95% CI: 23.4-39.6) were identified by ZN microscopy; FM would have identified all of those and an additional five (6.5%; 95% CI: 2.8-10.2). Xpert MTB/RIF identified all patients with a positive microscopy test and an additional 21 (28.0%; 95% CI: 16.6-39.3) smear-negative patients. Liquid culture identified an additional 10 (13.0%; 95% CI: 6.4-19.7) cases who were not identified by either microscopy or Xpert MTB/RIF, but Xpert MTB/RIF identified six patients who were not identified by liquid culture (two of whom were culture negative and four of whom had a contaminated culture). In total, liquid culture and Xpert MTB/RIF performed on a single morning specimen identified 60 (79.0%; 95% CI: 72.4-85.6) patients who had a morning sputum specimen tested by both methods ( Table 2). Discussion With an incremental diagnostic yield of almost 18% over FM, and almost 25% over direct ZN smear microscopy, a single Xpert MTB/RIF greatly increased confirmation of TB disease, similar to what has been reported from other studies. A second Xpert MTB/RIF diagnosed an additional 16% of TB patients not diagnosed by microscopy or a single Xpert MTB/RIF test. These data provide compelling programmatic evidence of the advantage of Xpert MTB/RIF over microscopy and again call into question the relevance of smear microscopy for TB diagnosis where Xpert MTB/RIF is available. Perhaps more notably, these data demonstrate how specimen type and origin impact test performance, which is directly relevant to TB programs struggling to determine the cost-benefit of these tests, and how best to maximize their performance and impact. In our study, direct ZN microscopy detected only about one-third of TB cases confirmed by Xpert MTB/RIF or liquid culture, even though microscopy was performed in a research facility by well-trained technicians. Concentrated FM microscopy performed slightly better, approximating what has been reported in the literature. Using ZN microscopy as an initial test for evaluating TB in PLHIV is not likely to be cost saving: in our study, very few of the 778 participants had a positive sputum smear, all others requiring investigation would have been referred for additional testing. Reliance on microscopy may even decrease the likelihood of treatment if clinicians are unaware of its remarkably limited sensitivity and assume that a negative test is meaningful, potentially increasing TB transmission and TB-related mortality. As TB programs scale up newer molecular technologies, replacement of microscopy with Xpert MTB/RIF may be more attractive, both clinically and financially, than reserving sequential testing with Xpert MTB/RIF for only those who test negative by smear microscopy, especially in settings of high HIV prevalence. Liquid culture is generally thought to be the most sensitive diagnostic technology, with a limit of detection that is generally under 10 organisms. While this is true, especially in idealized laboratory settings, the overall practical performance (diagnostic yield) and utility of liquid culture are limited by specimen contamination and the laboratory efforts to limit it. We found that Xpert MTB/RIF had an overall diagnostic yield directly comparable to that of liquid culture, and performed better in persons with low CD4 counts compared to those with CD4 counts 100 cells/μl. Conversely, contamination of liquid culture increased as CD4 cell counts declined, reaching a frequency of 19% in cultured sputum specimens from patients with CD4 cell counts <100 cells/μL. These unevaluable results diminish the diagnostic yield of specimen culture, and partially explain why the sensitivity of culture did not increase with worsening immune compromise. It is generally considered that in PLHIV with lower CD4 cell counts, less cavitary disease leads to decreasing numbers of bacilli in sputum. However, our findings support the more recent hypothesis that those with the most severe immune compromise may suffer from unchecked, interstitial mycobacterial growth, which could cause a rebound in overall bacillary load to levels detectable by Xpert MTB/RIF, whether or not they are detected by microscopy. Our data suggest that the performances of both liquid culture and Xpert MTB/RIF were also affected by specimen type (morning vs. spot). Previous research has demonstrated that the increased sensitivity of culturing a morning sputum specimen is offset by the increased contamination rate, and we found this to be true. In our analyses, approximately one in every seven cultured specimens was contaminated, and this contamination rate was higher for morning specimens than for spot specimens and, as stated above, higher for those with lower CD4 cell counts. This proportion of contamination is similar to that seen in other programmatic laboratories in resource-limited settings, and is an important limitation for culture-based diagnostic approaches. Importantly, this trade-off does not extend to Xpert MTB/ RIF, which, as a molecular test, is largely unaffected by the presence of contaminating fungus or bacteria. While this finding was not statistically significant, our data show a trend towards higher diagnostic yield of morning specimens than spot specimens when tested by Xpert MTB/ RIF; this increased when the morning specimens were from patients with CD4 cell counts <100 cells/μL. It is substantially easier to implement automated genetic testing, such as Xpert MTB/RIF, than to implement liquid culture, which is technically demanding and expensive, and the global scale-up of Xpert MTB/RIF is certainly outpacing that of liquid culture. The debate about which test has the higher diagnostic yield may soon be obviated, however, by a new MTB/RIF cartridge for the GeneXpert machine, which has been reported to have a limit of detection comparable to liquid culture. (http://www.croiconference.org/sessions/xpert-mtbrifultra-new-near-patient-tb-test-sensitivity-equal-culture) When those cartridges are available and have been validated in program settings, they may prove to have a significantly higher diagnostic yield than culture in all circumstances. But culture-based testing does allow clinicians to distinguish between active TB disease and previously treated disease, and Xpert MTB/ RIF does not. Moreover, culture-based testing allows for subsequent drug susceptibility determination and refinement of treatment for patients with drug-resistant disease; in circumstances where drug resistance is an issue and an indication for testing, culturing of specimens is a top priority. As each test offers different benefits, TB diagnosis would undoubtedly be maximized by utilizing both tests, and interpreting results in the correct clinical context, which would increase case-finding and reduce transmission. Programs must realistically assess their own capabilities and implement the best sequences of testing to address their needs. Globally, TB programs have been incorporating Xpert MTB/RIF into screening and diagnostic algorithms, but are struggling to determine if and under what circumstances it should be repeated if an initial test is negative. Our data suggest that the cumulative sensitivity of 2 Xpert MTB/RIF tests is higher in those with a CD4 count <100 cells/μL (92%) than in those with CD4 count 100 cells/μL (67%). Even in resource-limited settings, it may prove cost-effective to repeat Xpert MTB/RIF testing for PLHIV with CD4 counts <100 cells/μL who are at the highest risk for both TB disease and TB-related early mortality. Lymphadenopathy in our study population may have been underreported, as rates were lower than reported elsewhere, and aspiration was rarely performed. Importantly, however, MTBC was identified by LNA culture in 29% of participants who had an aspiration and in all TB cases for which LNAs were cultured. This adds to other literature on the impact of this diagnostic technique and provides rationale to increase programmatic capacity and utilization in the correct clinical circumstance (i.e., in patients with apparent enlarged lymphadenopathy whose clinical presentation suggests TB). This study has several limitations. Our ability to compare same-specimen test results was limited by the testing algorithm in our study, which apart from the morning specimen, assigned different tests to different specimens. Because Xpert MTB/RIF testing of morning specimens was restricted by volume in our protocol, it may be that analyses of the morning specimen results were biased. We think this is unlikely, as we found no association between sputum volume and culture result for the morning specimen, nor did we find an association between Xpert MTB/RIF result and sputum volume for spot specimens. We also assumed that the yield for spot 1 and spot 2 sputum specimens was the same. We included as cases patients who were identified by Xpert MTB/RIF alone. Given reports of false-positive Xpert MT/RIF, this may seem somewhat controversial. The most likely causes of discordance between Xpert MT/RIF and culture include the presence of dead bacteria, leading to falsepositive Xpert results, and the consequences of specimen decontamination, which can render mycobacteria non-viable, leading to false-negative culture results. None of the patients in this study who had discordant results were previously treated for TB, and most of them were symptomatic. Given the prevalence of TB in this population, and the known limitations of culture, we felt it appropriate to include them as cases. Because we were unable to confirm clinical characteristics for all participating patients, we could not determine the true prevalence of lymphadenopathy, which was lower than expected. This also limits what we can say about the use of LNA for TB diagnosis. However, our use of multiple specimens for Xpert MTB/RIF and culture diminish the possibility that we missed cases of active TB disease. Sputum smear microscopy has long been the most widely used method for diagnosing TB, but has limited sensitivity, and using it in parallel with more sensitive technologies is unlikely to be worthwhile. A single Xpert MTB/RIF is a better diagnostic test, and its sensitivity may be further enhanced by targeted testing and using a morning specimen, a consideration for programs unable to use it as an initial test for all symptomatic patients. It may be prudent to offer a second Xpert MTB/RIF for the most immune compromised patients, who would derive the biggest benefit and for whom the test is most sensitive. Additionally, sensitivity is expected to improve as the next generation of cartridges, Xpert MTB/RIF Ultra, are introduced into program settings; evaluating the impact of specimen type and CD4 cell count on the performance of these new cartridges should be an important early investigation that could directly inform program policy. Research to determine strategies to further investigate symptomatic PLHIV that test negative by Xpert MTB/RIF, and those that will test negative by Xpert MTB/ RIF Ultra, is urgently needed.
// Shutdown (uv_shutdown) shutdown the outgoing (write) side of a duplex stream. It waits for pending write requests to complete. The handle should refer to a initialized stream. req should be an uninitialized shutdown request struct. The cb is called after shutdown is complete. func (s *UvStream) Shutdown(req *C.uv_shutdown_t, cb func(*Request, int)) C.int { cbi := (*callbackInfo)(req.data) cbi.shutdown_cb = cb return uv_shutdown(req, s.Stream) }
Penetrating gazes: The poetics of sight and visual display in popular Indian cinema Following a sequence of visually stunning dance routines accompanied by sounds of sung poetry, the climax for most popular Indian films is the intense exchange of gazes between the lover and beloved. This exchange of penetrating gazes not only expresses sexual desire but, in a move beyond voyeurism, signifies a physical interaction through vision. This intense and even tactile gaze relies on two different notions of vision: that of drishti, activated in Indian religious contexts, and of nazar, so essential for the exposition of love in Persianate poetry. The lyrics of film songs rely for their affectivity on these notions of drishti and nazar. Committed to memory by film viewers, this repertoire of film songs generates a poetics of sight and visual display within the film-going public. By framing and focusing in on eyes and thereby simulating moments of intense visual interaction, Bollywood film directors have poetically nuanced a modern visual genre for contemporary Indian audiences which actively employs notions of vision and visuality that are specific to South Asia. Moreover, in cueing ocularity to the aural, Bollywood films constitute a subaltern modernity that disrupts the minimalist silences and ocular-centricity of most ‘Western’ modernisms, This signals the polyscopic, intersensual and poetic character of modern Indian visual experiences, both portrayed on, and enacted off, the screen.
We at Gunaxin have been profiling some of the best young actresses working in entertainment today. We started with the Top Ten Young Actresses, examining the best under thirty talent in the industry. People were familiar with many of the names on that list, as to be expected with great young talent. So to complement that post, we created our Top Ten Up and Coming Actresses to look at actresses on the cusp of stardom. The idea was to give people a heads up on some of the stars of tomorrow and give them some names they might not be familiar with. Throughout our examination, however, we’ve overlooked one large segment of the industry, a medium where a ton of talented young actresses work. That medium is television. Due to grueling schedules and timing of filming a TV series, most actresses don’t have the time to fit much feature film work in, let alone find that right movie to break out in. With a lack of feature film work, we ended up excluding many great names from our other two posts. Now is the time for those television actresses to get their recognition. We think the following dozen actresses have all the necessary talent to do even greater work than they’re doing now. And by that we mean moving over to do feature film work. Not that there’s anything wrong with television work. If anything, TV is at its creative peak, allowing for fantastic shows like Mad Men, Lost, Dexter, The Tudors, and many others. We’re not trying to insult television work. Yet we recognize that movies remain the pinnacle of the entertainment industry. You see people leave television shows to do feature films, recent examples include Jennifer Garner, Jennifer Aniston, Jessica Biel, and Jessica Alba. You don’t see it the other way around, unless someone needs the work. Movies seem to be where the big money and fame is located, and the top actresses gravitate to that medium. Of all the ladies currently working in television, we feel these twelve have the potential to do that something more. Please note, that if you’re looking for Amanda Seyfried, currently on HBO’s Big Love, we already profiled over here and decided not to include her again and be repetitive. So here are our Destined Dozen: 12. Evangeline Lilly Ranking Lilly on a list like this is difficult. She has the talent and looks to be ranked much higher, but does she have the ambition? People around her have noted her desire for privacy and the fact that she does not seek the limelight. In fact, she has stated that she lives in a “bubble”, and that she is not familiar with pop culture nor does she own a TV. With Lost in its final few seasons, will we see Lilly move on to major motion pictures or will she fade from the public eye? Time will have to tell on that one. But until her future becomes more certain, we’ll keep her a little farther down this list than she could otherwise rank. Major Television Roles: Lost, Kate Austen (2004-present) Select Filmography: Lilly’s movie roles to date have amounted to little more than scenery. Lost was her first speaking role, although she does appear in the French-Canadian psychological thriller Afterwards which is currently being released around the world. Upcoming Project: The Hurt Locker, an intense portrayal of a bomb squad that must disarm bombs in the heat of combat. 11. Missy Peregrym This Canadian actress and former fashion model began modeling at the age of eighteen. The modeling career opened doorways to appear in commercials and later a number of television guest spots. A self described tomboy, she was heavily involved in high school athletics, including soccer, snowboarding, basketball, and other assorted outdoor sports. The athleticism came in handy for her first film role, as it required a working knowledge of gymnastics. She broke through at the age of twenty-five with a reoccurring role on Season 2 of Heroes, and was subsequently cast as the female lead in the CW’s fantasy drama Reaper. Peregrym is at the beginning of what could be a very successful career. Major Television Roles: Heroes, Candice Wilmer (2007); Reaper, Andi Prendergast (2007-present) Select Filmography: Although Peregrym appeared in an uncredited role in the bomb Catwoman, she effectively made her film debut playing the lead in Stick It, a movie about a rebellious teenager who is forced to return to her former life in gymnastics. Upcoming Project: None currently scheduled. 10. Sophia Bush Bush was the 2000 Tournament of Roses Parade Queen and a junior at USC before landing the role of Brooke Davis on One Tree Hill. Like many young stars, she has sprinkled in several movies in between seasons of the CW drama. She almost had her big break when was cast as Kate Brewster in Terminator 3: Rise of the Machines, but was replaced because the director thought she was too young. With One Tree Hill winding down in its sixth season, Bush has the opportunity to explore other options in the near future. The hope is that she can transition over to movies like other former UPN and WB (now the CW) stars such as Jessica Biel, Sarah Michelle Gellar, or Katie Holmes. At worst, she can fall back on her sexy voice and provide voice-overs. Major Television Roles: One Tree Hill, Brooke Davis (2003-present) Select Filmography: Like many young actresses Bush has appeared in a couple of horror films, including the remake of The Hitcher and Stay Alive. But she’s not just about horror as she was featured in the teen comedy John Tucker Must Die and the action flick Supercross. Upcoming Project: Table for Three, a comedy about a single guy who invites a couple to move into his apartment, only to become the third wheel and have them involved in all aspects of his life. 9. Katee Sackhoff Only twenty-eight, Sackhoff is a ten year veteran of television, earning a number of guest roles and a regular gig on the Richard Dreyfuss vehicle The Education of Max Bickford before landing her current role on Battlestar Galactica. Last season Sackhoff pulled double duty, shooting scenes for both Battlestar and her role on the Bionic Woman remake. Her roles on both shows helped set a standard for action-heroine cool and complexity. Still, few stars from sci-fi shows, even acclaimed ones, achieve mainstream stardom. Odds don’t favor Sackhoff breaking out, particularly in light of the fact that Battlestar gets low ratings despite its acclaimed status. But don’t count her out just yet. Sackhoff has the rare ability to combine the qualities of strength and vulnerability at once, and already has a cult following from her sci-fi work. With her tenure on Battlestar Galactica coming to a close, she can start the next chapter in her career. Maybe movies are the next step? Major Television Roles: The Education of Max Bickford, Nell Bickford (2001-2002); Battlestar Galactica, Captain Kara ‘Starbuck’ Thrace (2004-present); Bionic Woman, Sarah Corvus (2007). Select Filmography: Sackhoff does not have a lengthy film history, making her motion picture debut in My First Mister, and appearing in film in Halloween: Resurrection as Jenna “Jen” Danzig. She has also done a few made for television movies. Upcoming Project: None currently scheduled. 8. Olivia Wilde Wilde got her start on short-lived Fox show Skin (most memorable for the line, “His father is the district attorney!” in promos) before breaking out in her role as Alex Kelly, a bisexual, who had a relationship with Mischa Barton’s character Marissa on The O.C. She was later cast in the midseason replacement The Black Donnellys (aptly so because she holds both Irish and U.S. passports) before landing her current gig on House. Her movie career has less hits on the résumé, although she was given strong consideration to play the Bond girl Vesper Lynd in the 2006 hit Casino Royale, a role that eventually went to Eva Green. Major Television Roles: Skin, Jewel Goldman (2003-2004); The O.C., Alex Kelly (2004-2005); The Black Donnellys, Jenny Reilly (2007); House, Thirteen/Dr. Remy Hadley (2007-present) Select Filmography: Wilde has appeared in minor roles in the teen comedy The Girl Next Door alongside Elisha Cuthbert and the true story turned drama Alpha Dog. Her biggest role came in the recent adventure thriller Turistas. Upcoming Project: The Year One, a comedy with Jack Black and Michael Cera about a couple of lazy hunter-gatherers who set off on an epic journey through the ancient world. 7. Anna Friel This British star got her start at the age of 13, kicking off a series of appearances on various British television shows and movies. Her work includes two memorable moments, a lesbian kiss on the soap Brookside and a much discussed ménage à trois scene in the TV film The Tribe. Friel then moved on to work in features, although mainly in smaller rolls, before joining the cast of the Bryan Fuller created drama Pushing Daisies. Friel was nominated for a Golden Globe in the show’s first season. Sadly, as we are publishing this article, word has leaked that Pushing Daisies has been cancelled by ABC. That puts and end to the Charlotte Charles character, but allows Friel to pursue more big screen employment. Major Television Roles: Pushing Daisies, Charlotte ‘Chuck’ Charles (2007-present) Select Filmography: Friel’s largest roles have been as Roz Harmison in the internationally successful Goal! franchise, a series of movies about soccer, and as Lady Claire in the Michael Crichton adaptation Timeline. Upcoming Project: Land of the Lost, a Will Ferrell comedy about a forest ranger who inadvertently stumbles into a mysterious land populated by dinosaurs and other creatures. 6. Summer Glau A professional dancer since age 12, Glau is a classically trained ballerina and knows several other dances. She parlayed her skills into several television commercial appearances before being cast as a ballerina (guess those lessons came in handy) in an episode of Angel. It was there that she caught the eye of writer/director and show co-creator Joss Whdeon. Whedon would later cast her in his short-lived sci-fi series Firefly. Glau has followed up her role on Firefly with a number of sci-fi shows, furthering her fan base. Although relatively new to the spotlight, Glau’s already won two Saturn Awards (2005 for Serenity and 2008 for Terminator: The Sarah Connor Chronicles) and in 2008 she was nominated for two Teen Choice Awards. Seems like only a matter of time before we see Glau on the big screen more often. Major Television Roles: Firefly, River Tam (2002-2003); The Unit, Crystal Burns (2006-2007); The 4400, Tess Doerner (2005-2007); Terminator: The Sarah Connor Chronicles, Cameron Phillips (2008-present) Select Filmography: Glau’s movie experience is limited to Serenity, the feature film sequel for Firefly. Upcoming Project: None currently scheduled. 5. Hayden Panettiere Panettiere started young. I mean real young. She began modeling at the age of four months and started appearing in commercials at age eleven months. At age 6, she landed a role on the ABC soap opera One Life to Live, and two years later landed a role on the CBS soap opera Guiding Light. Several television guess spots, feature films, and made-for-TV movies would follow before Panettiere landed her part on Heroes as a high school cheerleader with regenerative healing powers. And if her live acting résumé was not enough, she does voice work, including providing the voice for Dot in Pixar’s A Bug’s Life and assorted voices in several video games. Oh, and in 1999 Panettiere was nominated for a Grammy. And did I mention she’s not even twenty yet? I think she has a nice career ahead of her. Major Television Roles: One Life to Live, Sarah Victoria ‘Flash’ Roberts (1994-1997); Guiding Light, Elizabeth ‘Lizzie’ Spaulding (1996-2000); Ally McBeal, Maddie Harrington (2002); Heroes, Claire Bennet (2006-present) Select Filmography: In movies, Panettiere was first recognized for her role as Sheryl Yoast, Will Patton’s daughter, in the Disney football film Remember the Titans. She also starred as a (surprise, surprise) cheerleader in the direct to video Bring It On: All or Nothing and appeared in the Kate Hudson romancer Raising Helen. Upcoming Project: I Love You Beth Cooper, a comedy about a nerdy valedictorian who proclaims his love for the hottest and most popular girl in school. 4. Anna Paquin Paquin’s inclusion on this list can certainly be debated. At age 11, she won an Academy Award for Best Supporting Actress for her role in Jane Campion’s The Piano. That made her the second-youngest Oscar winner in history after Tatum O’Neal (and let’s hope her career turns out better). Paquin followed up her Oscar win with a series of roles, although generally in smaller films and general of a supporting nature. The one major exception was her role as Rogue is the popular X-Men trilogy. If you’ve won an Oscar and appeared in major motion pictures, can you be destined for greater things? In this case I believe the answer is yes. Paquin has yet to become a leading lady in Hollywood, and after headlining the popular HBO vampire drama True Blood, she may be in line for such a promotion. Major Television Roles: True Blood, Sookie Stackhouse (2008-present) Select Filmography: In addition to her role in the X-Men trilogy, Paquin may be most recognized for her roles in Fly Away Home, Finding Forrester, and as Polexia Aphrodisi in Almost Famous. Upcoming Project: Margaret, Paquin co-stars in the Matt Damon drama about a young woman who witnesses a bus accident and is caught up with consequences. 3. Blake Lively Lively can thank her brother for her career, as he made his agent send out auditions on her behalf despite her disinterest in acting. One of the auditions landed her the role of Bridget in The Sisterhood of the Traveling Pants movie, and Lively’s career was launched. Her first film was only in 2005, but in that short time period Lively’s star has been on a meteoric rise. After a Teen Choice Award nomination for the Pants movie, she really began to gain in popularity last year after landing the starring role on the teen TV series Gossip Girl. This time she won two Teen Choice Awards for her work. With the blonde bombshell only a few years into her career, Lively should have plenty of big roles in the future. Major Television Roles: Gossip Girl, Serena van der Woodsen (2007-present) Select Filmography: In addition to her role in the The Sisterhood of the Traveling Pants movie and its sequel, Lively has starred in the college comedy Accepted. Upcoming Project: The Private Lives of Pippa Lee, a star-studded drama about Pippa Lee a woman who engages in a period of reflection and finds herself heading toward a quiet nervous breakdown. 2. Kristen Bell Bell rose to fame and became best known for her lead role in Veronica Mars, a detective drama where Bell plays the impossibly smart daughter of a private eye that solves crimes Nancy Drew-style. Trust me, it’s a lot better than it may sound. The first season in particular is very compelling television. Bell parlayed that success into some movie parts and a role on Heroes as the electric Elle Bishop. She also provides the narration to the teen hit Gossip Girl. With her Heroes future up in the air, Bell may soon crossover into feature films full time. Major Television Roles: Veronica Mars, Veronica Mars (2004-2007); Heroes, Elle Bishop (2007-present); Gossip Girl, Narrator (2007-present); Select Filmography: Bell’s greatest film success came this year for her role in the Judd Apatow comedy Forgetting Sarah Marshall. She had previously starred in the horror remake Pulse, and for a real treat of a movie check of her brief role in David Mamet’s Spartan. Bell fans may also want to know that the long awaited and long delayed Fanboys should finally be released in February next year. Fanboys is about a group of Star Wars fans that travel to Skywalker Ranch to steal an early copy of Episode I: The Phantom Menace in 1997. Upcoming Project: Serious Moonlight, a high-powered attorney duct tapes her husband to the toilet right before their home is invaded by burglars. 1. Katherine Heigl Heigl started at age nine as a model before breaking into films in smaller roles. Her real breakthrough, however, was her role on the teen cult hit Roswell. It introduced her to a younger audience and got her noticed in Hollywood circles. After Roswell was cancelled, she took a role on the uber-popular medical drama Grey’s Anatomy. In a weird twist, her characters on Roswell and Grey’s both are named Isabel (although spelled differently). Heigl won a Emmy last year for her work on Grey’s and also broke out in movies with her role in the Apatow helmed comedy Knocked Up. Her choice for number one on this list may be a bit boring due to the fact she’s already sewing the seeds of a Hollywood breakout. However, she can’t be ignored and seems primed to become a major star very soon (and she’s nearly already there). Major Television Roles: Roswell, Isabel Evans (1999-2002); Grey’s Anatomy, Dr. Isobel ‘Izzie’ Stevens (2005-present) Select Filmography: In addition to Knocked Up, Heigl starred this year in the modest hit 27 Dresses. She also starred in The Ringer, a Johnny Knoxville comedy about pretending to be retarded that has a surprising amount of heart to it. Upcoming Project: The Ugly Truth, where Heigl stars as a romantically challenged morning show producer who gets caught up in a series of outrageous tests by her chauvinistic correspondent, played by Gerard Butler.