content
stringlengths
10
4.9M
<reponame>isilvalun/kibana /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ import { shallow } from 'enzyme'; import { Location } from 'history'; import React from 'react'; import { RRRRenderResponse } from 'react-redux-request'; import { ErrorGroupAPIResponse } from 'x-pack/plugins/apm/server/lib/errors/get_error_group'; import { APMError } from 'x-pack/plugins/apm/typings/es_schemas/Error'; // @ts-ignore import { mockMoment } from '../../../../utils/testHelpers'; import { DetailView } from './index'; describe('DetailView', () => { beforeEach(() => { // Avoid timezone issues mockMoment(); }); it('should render empty state', () => { const wrapper = shallow( <DetailView errorGroup={{} as any} urlParams={{}} location={{} as Location} /> ); expect(wrapper.isEmptyRender()).toBe(true); }); it('should render Discover button', () => { const errorGroup: RRRRenderResponse<ErrorGroupAPIResponse> = { args: [], status: 'SUCCESS', data: { occurrencesCount: 10, error: ({ '@timestamp': 'myTimestamp', http: { request: { method: 'GET' } }, url: { full: 'myUrl' }, service: { name: 'myService' }, user: { id: 'myUserId' }, error: { exception: { handled: true } }, transaction: { id: 'myTransactionId', sampled: true } } as unknown) as APMError } }; const wrapper = shallow( <DetailView errorGroup={errorGroup} urlParams={{}} location={{} as Location} /> ).find('DiscoverErrorLink'); expect(wrapper.exists()).toBe(true); expect(wrapper).toMatchSnapshot(); }); it('should render StickyProperties', () => { const errorGroup: RRRRenderResponse<ErrorGroupAPIResponse> = { args: [], status: 'SUCCESS', data: { occurrencesCount: 10, error: {} as APMError } }; const wrapper = shallow( <DetailView errorGroup={errorGroup} urlParams={{}} location={{} as Location} /> ).find('StickyErrorProperties'); expect(wrapper.exists()).toBe(true); }); it('should render tabs', () => { const errorGroup: RRRRenderResponse<ErrorGroupAPIResponse> = { args: [], status: 'SUCCESS', data: { occurrencesCount: 10, error: ({ '@timestamp': 'myTimestamp', service: {}, user: {} } as unknown) as APMError } }; const wrapper = shallow( <DetailView errorGroup={errorGroup} urlParams={{}} location={{} as Location} /> ).find('EuiTabs'); expect(wrapper.exists()).toBe(true); expect(wrapper).toMatchSnapshot(); }); it('should render TabContent', () => { const errorGroup: RRRRenderResponse<ErrorGroupAPIResponse> = { args: [], status: 'SUCCESS', data: { occurrencesCount: 10, error: ({ '@timestamp': 'myTimestamp', context: {} } as unknown) as APMError } }; const wrapper = shallow( <DetailView errorGroup={errorGroup} urlParams={{}} location={{} as Location} /> ).find('TabContent'); expect(wrapper.exists()).toBe(true); expect(wrapper).toMatchSnapshot(); }); });
def poisson_disc_sample(array_radius, pad_radius, candidates=100, d=2, seed=None): rng = np.random.default_rng(seed) uniform = rng.uniform randint = rng.integers key = array_radius, pad_radius, seed if key in XY_CACHE: return XY_CACHE[key] start = np.zeros(d) samples = [start] queue = [start] while queue: s_idx = randint(len(queue)) s = queue[s_idx] for i in range(candidates): coords = uniform(s - 2 * pad_radius, s + 2 * pad_radius, d) in_array = np.sqrt(np.sum(coords ** 2)) < array_radius in_ring = np.all(distance.cdist(samples, [coords]) > pad_radius) if in_array and in_ring: samples.append(coords) queue.append(coords) break if (i + 1) == candidates: queue.pop(s_idx) samples = np.array(samples) XY_CACHE[key] = samples return samples
Radioactive probes for adrenocorticotropic hormone receptors. Our attempts to develop adrenocorticotropic hormone (ACTH) analogues that can be employed for ACTH receptor identification and isolation began with the synthesis of ACTH fragments containing N epsilon-(dethiobiotinyl)lysine (dethiobiocytin) amide in position 25 to be used for affinity chromatographic purification of hormone-receptor complexes on Sepharose-immobilized avidin resins. Because labeling ACTH or ACTH fragments by conventional iodination techniques destroys biological activity due to oxidation of Met4 and incorporation of iodine into Tyr2, we have prepared ACTH1-24, ACTH1-25 amide, and ACTH1-25 amide by conventional synthetic techniques. The HPLC profiles and amino acid analyses of the final products indicate that the materials are of a high degree of purity. The amount of tertiary butylation of the Trp residue in the peptides was assessed by NMR and was found to be less than 0.5%. All three peptides are equipotent with the standard ACTH1-24 as concerns their ability to stimulate steroidogenesis and cAMP formation in bovine adrenal cortical cells. Iodination of ACTH1-24, with iodogen as the oxidizing agent, has been accomplished without any detectable loss of biological activity. The mono- and diiodo derivatives of ACTH1-24 have been prepared, separated by HPLC, and assayed for biological activity. Both peptides have the full capacity to stimulate steroidogenesis and cAMP production in bovine adrenal cortical cells.
use crate::lib::{default_sub_command, file_to_lines, parse_lines, Command, SumChecker}; use anyhow::Error; use clap::{value_t_or_exit, App, Arg, ArgMatches, SubCommand}; pub const REPORT_REPAIR: Command = Command::new(sub_command, "report-repair", run); struct ReportRepairArgs { file: String, target: isize, number: usize, } fn sub_command() -> App<'static, 'static> { default_sub_command( &REPORT_REPAIR, "Looks through the input for n numbers that sum to target. \ Then multiplies the result and produces the output.", "Path to the input file. Input should be newline delimited integers.", ) .arg( Arg::with_name("target") .short("t") .help("Target sum to find.") .takes_value(true) .required(true), ) .arg( Arg::with_name("number") .short("n") .help("Number of items that must be used in the sum") .takes_value(true) .required(true), ) .subcommand( SubCommand::with_name("part1") .about( "Searches the default input for two values that sum to 2020. \ Then multiplies the result and produces the output.", ) .version("1.0.0"), ) .subcommand( SubCommand::with_name("part2") .about( "Searches the default input for three values that sum to 2020. \ Then multiplies the result and produces the output.", ) .version("1.0.0"), ) } fn run(arguments: &ArgMatches) -> Result<(), Error> { let report_arguments = match arguments.subcommand_name() { Some("part1") => ReportRepairArgs { file: "day1/input.txt".to_string(), target: 2020, number: 2, }, Some("part2") => ReportRepairArgs { file: "day1/input.txt".to_string(), target: 2020, number: 3, }, _ => ReportRepairArgs { file: value_t_or_exit!(arguments.value_of("file"), String), target: value_t_or_exit!(arguments.value_of("target"), isize), number: value_t_or_exit!(arguments.value_of("number"), usize), }, }; file_to_lines(&report_arguments.file) .and_then(|lines| { parse_lines(lines, |line| line.parse::<isize>()).map_err(|err| err.into()) }) .and_then(|lines| { find_muliple_of_sum_of_n(&report_arguments.target, &lines, report_arguments.number) }) .map(|result| { println!("{:#?}", result); }) .map(|_| ()) } fn find_muliple_of_sum_of_n(target: &isize, input: &Vec<isize>, n: usize) -> Result<isize, Error> { SumChecker::with_vec(input) .find_sum_of_n(target, n) .map(|result| result.into_iter().fold(1, |acc, number| acc * number)) }
package steering /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import ( "database/sql" "errors" "net/http" "github.com/apache/trafficcontrol/lib/go-tc" "github.com/apache/trafficcontrol/lib/go-util" "github.com/apache/trafficcontrol/traffic_ops/traffic_ops_golang/api" "github.com/lib/pq" ) func Get(w http.ResponseWriter, r *http.Request) { inf, userErr, sysErr, errCode := api.NewInfo(r, nil, nil) if userErr != nil || sysErr != nil { api.HandleErr(w, r, inf.Tx.Tx, errCode, userErr, sysErr) return } defer inf.Close() steering, err := findSteering(inf.Tx.Tx) if err != nil { api.HandleErr(w, r, inf.Tx.Tx, http.StatusInternalServerError, nil, errors.New("steering.Get finding: "+err.Error())) return } api.WriteResp(w, r, steering) } func findSteering(tx *sql.Tx) ([]tc.Steering, error) { steeringData, err := getSteeringData(tx) if err != nil { return nil, err } targetIDs := steeringDataTargetIDs(steeringData) steeringFilters, err := getSteeringFilters(tx, targetIDs) if err != nil { return nil, err } primaryOriginCoords, err := getPrimaryOriginCoords(tx, targetIDs) if err != nil { return nil, err } steerings := map[tc.DeliveryServiceName]tc.Steering{} for _, data := range steeringData { if _, ok := steerings[data.DeliveryService]; !ok { steerings[data.DeliveryService] = tc.Steering{ DeliveryService: data.DeliveryService, ClientSteering: data.DSType == tc.DSTypeClientSteering, Filters: []tc.SteeringFilter{}, // Initialize, so JSON produces `[]` not `null` if there are no filters. Targets: []tc.SteeringSteeringTarget{}, // Initialize, so JSON produces `[]` not `null` if there are no targets. } } steering := steerings[data.DeliveryService] if filters, ok := steeringFilters[data.TargetID]; ok { steering.Filters = append(steering.Filters, filters...) } target := tc.SteeringSteeringTarget{DeliveryService: data.TargetName} switch data.Type { case tc.SteeringTypeOrder: target.Order = int32(data.Value) case tc.SteeringTypeWeight: target.Weight = int32(data.Value) case tc.SteeringTypeGeoOrder: target.GeoOrder = util.IntPtr(data.Value) target.Latitude = util.FloatPtr(primaryOriginCoords[data.TargetID].Lat) target.Longitude = util.FloatPtr(primaryOriginCoords[data.TargetID].Lon) case tc.SteeringTypeGeoWeight: target.Weight = int32(data.Value) target.GeoOrder = util.IntPtr(0) target.Latitude = util.FloatPtr(primaryOriginCoords[data.TargetID].Lat) target.Longitude = util.FloatPtr(primaryOriginCoords[data.TargetID].Lon) } steering.Targets = append(steering.Targets, target) steerings[data.DeliveryService] = steering } arr := []tc.Steering{} for _, steering := range steerings { arr = append(arr, steering) } return arr, nil } type SteeringData struct { DeliveryService tc.DeliveryServiceName SteeringID int TargetName tc.DeliveryServiceName TargetID int Value int Type tc.SteeringType DSType tc.DSType } func steeringDataTargetIDs(data []SteeringData) []int { ids := []int{} for _, d := range data { ids = append(ids, d.TargetID) } return ids } func getSteeringData(tx *sql.Tx) ([]SteeringData, error) { qry := ` SELECT ds.xml_id as steering_xml_id, ds.id as steering_id, t.xml_id as target_xml_id, t.id as target_id, st.value, tp.name as steering_type, dt.name as ds_type FROM steering_target st JOIN deliveryservice ds on ds.id = st.deliveryservice JOIN deliveryservice t on t.id = st.target JOIN type tp on tp.id = st.type JOIN type dt on dt.id = ds.type ORDER BY steering_xml_id, target_xml_id ` rows, err := tx.Query(qry) if err != nil { return nil, errors.New("querying steering: " + err.Error()) } defer rows.Close() data := []SteeringData{} for rows.Next() { sd := SteeringData{} if err := rows.Scan(&sd.DeliveryService, &sd.SteeringID, &sd.TargetName, &sd.TargetID, &sd.Value, &sd.Type, &sd.DSType); err != nil { return nil, errors.New("get steering data scanning: " + err.Error()) } data = append(data, sd) } return data, nil } // getSteeringFilters takes a slice of ds ids, and returns a map of delivery service ids to patterns and delivery service names. func getSteeringFilters(tx *sql.Tx, dsIDs []int) (map[int][]tc.SteeringFilter, error) { qry := ` SELECT ds.id, ds.xml_id, r.pattern FROM deliveryservice ds JOIN deliveryservice_regex dsr ON dsr.deliveryservice = ds.id JOIN regex r ON dsr.regex = r.id JOIN type t ON r.type = t.id WHERE ds.id = ANY($1) AND t.name = $2 ORDER BY r.pattern, ds.type, dsr.set_number ` rows, err := tx.Query(qry, pq.Array(dsIDs), tc.DSMatchTypeSteeringRegex) if err != nil { return nil, errors.New("querying steering regexes: " + err.Error()) } defer rows.Close() filters := map[int][]tc.SteeringFilter{} for rows.Next() { dsID := 0 f := tc.SteeringFilter{} if err := rows.Scan(&dsID, &f.DeliveryService, &f.Pattern); err != nil { return nil, errors.New("scanning steering filters: " + err.Error()) } filters[dsID] = append(filters[dsID], f) } return filters, nil } type Coord struct { Lat float64 Lon float64 } // getPrimaryOriginCoords takes a slice of ds ids, and returns a map of delivery service ids to their primary origin coordinates. func getPrimaryOriginCoords(tx *sql.Tx, dsIDs []int) (map[int]Coord, error) { qry := ` SELECT o.deliveryservice, c.latitude, c.longitude FROM origin o JOIN coordinate c ON c.id = o.coordinate WHERE o.deliveryservice = ANY($1) AND o.is_primary ` rows, err := tx.Query(qry, pq.Array(dsIDs)) if err != nil { return nil, errors.New("querying steering primary origin coords: " + err.Error()) } defer rows.Close() coords := map[int]Coord{} for rows.Next() { dsID := 0 c := Coord{} if err := rows.Scan(&dsID, &c.Lat, &c.Lon); err != nil { return nil, errors.New("scanning steering primary origin coords: " + err.Error()) } coords[dsID] = c } return coords, nil }
package quic type sender interface { Send(p *packetBuffer) Run() error WouldBlock() bool Available() <-chan struct{} Close() } type sendQueue struct { queue chan *packetBuffer closeCalled chan struct{} // runStopped when Close() is called runStopped chan struct{} // runStopped when the run loop returns available chan struct{} conn sendConn } var _ sender = &sendQueue{} const sendQueueCapacity = 8 func newSendQueue(conn sendConn) sender { return &sendQueue{ conn: conn, runStopped: make(chan struct{}), closeCalled: make(chan struct{}), available: make(chan struct{}, 1), queue: make(chan *packetBuffer, sendQueueCapacity), } } // Send sends out a packet. It's guaranteed to not block. // Callers need to make sure that there's actually space in the send queue by calling WouldBlock. // Otherwise Send will panic. func (h *sendQueue) Send(p *packetBuffer) { select { case h.queue <- p: case <-h.runStopped: default: panic("sendQueue.Send would have blocked") } } func (h *sendQueue) WouldBlock() bool { return len(h.queue) == sendQueueCapacity } func (h *sendQueue) Available() <-chan struct{} { return h.available } func (h *sendQueue) Run() error { defer close(h.runStopped) var shouldClose bool for { if shouldClose && len(h.queue) == 0 { return nil } select { case <-h.closeCalled: h.closeCalled = nil // prevent this case from being selected again // make sure that all queued packets are actually sent out shouldClose = true case p := <-h.queue: if err := h.conn.Write(p.Data); err != nil { // This additional check enables: // 1. Checking for "datagram too large" message from the kernel, as such, // 2. Path MTU discovery,and // 3. Eventual detection of loss PingFrame. if !isMsgSizeErr(err) { return err } } p.Release() select { case h.available <- struct{}{}: default: } } } } func (h *sendQueue) Close() { close(h.closeCalled) // wait until the run loop returned <-h.runStopped }
/** * map the item in the stream to another item if an error occurred * * @param mappedItem the new mapped item * @param <R> the new item type * @return an {@link Optional} that holds the mapped item on error, or empty {@link Optional} * if no error occurred */ public <R> Optional<R> onErrorMapItem(R mappedItem) { if (error != null) { return new Optional<>(mappedItem, proxy.getConfiguration()); } else { return new Optional<>(null, proxy.getConfiguration()); } }
/* Function: PrintViterbiAMX() * * Purpose: Print out a normal main matrix from the original * Viterbi alignment algorithm. * */ void PrintViterbiAMX(FILE *fp, struct istate_s *icm, int statenum, char *seq, int N, int ***amx) { int diff, j, y; for (y = 0; y < statenum; y++) { fprintf(fp, "### A Matrix for state %d, type %d (%s), from node %d\n", y, icm[y].statetype, UstatetypeName(icm[y].statetype), icm[y].nodeidx); fprintf(fp, " "); for (diff = 0; diff <= N; diff++) fprintf(fp, "%6d ", diff); fprintf(fp, "\n"); for (j = 0; j <= N; j++) { fprintf(fp, "%c %3d ", ((j > 0) ? seq[j] : '*'), j); for (diff = 0; diff <= j; diff++) fprintf(fp, "%6d ", amx[j][diff][y]); fprintf(fp, "\n"); } fprintf(fp, "\n\n"); } }
Bilateral effect of aging population on consumption structure: Evidence from China The deepening of aging population inevitably in China will exert a far-reaching influence on national consumption and economic transformation. Based on interprovincial panel data in 2000–2018, this paper measured the ratio of five survival and enjoyment consumptions in disposable incomes, reconstructed the indicators for upgrading the consumption structure, used the bilateral stochastic cutting-edge model, and decomposed the aging population to realize the net effect from the positive and negative effects generated by the consumption structure. The findings indicated that (1) aging population played a positive and negative bilateral effect on upgrading the consumption structure, in which the positive effect upgraded 14.04% of the consumption structure while the negative effect degraded 6.18% of the consumption structure. The comprehensive net effect upgraded 7.86% of the consumption structure. (2) From the perspective of the time effect, under the positive and negative effects of aging population, the consumption structure was upgraded 7.861% on average every year. (3) Regarding the regional effect, the promotion role of aging population was the highest in the eastern region, followed by the west. The middle was the lowest. By combining with estimation results of each province, the promotion role brought by aging population in the northeast and southwest was lower. Based on the above-mentioned research results, this paper proposed some advice for positively developing silver hair economy, promoting the improvement on the consumption structure according to circumstances, developing the perfect aging consumption market, exploring the consumption potential of the elderly, accelerating the urbanization development progress, and stimulating consumption growth relying on the Internet. The deepening of aging population inevitably in China will exert a far-reaching influence on national consumption and economic transformation. Based on interprovincial panel data in -, this paper measured the ratio of five survival and enjoyment consumptions in disposable incomes, reconstructed the indicators for upgrading the consumption structure, used the bilateral stochastic cutting-edge model, and decomposed the aging population to realize the net e ect from the positive and negative e ects generated by the consumption structure. The findings indicated that ( ) aging population played a positive and negative bilateral e ect on upgrading the consumption structure, in which the positive e ect upgraded . % of the consumption structure while the negative e ect degraded . % of the consumption structure. The comprehensive net e ect upgraded . % of the consumption structure. ( ) From the perspective of the time e ect, under the positive and negative e ects of aging population, the consumption structure was upgraded . % on average every year. ( ) Regarding the regional e ect, the promotion role of aging population was the highest in the eastern region, followed by the west. The middle was the lowest. By combining with estimation results of each province, the promotion role brought by aging population in the northeast and southwest was lower. Based on the above-mentioned research results, this paper proposed some advice for positively developing silver Introduction To dialectically consider the effect of aging population, exploring how to relieve the demographic structure dividend under the background of keeping deepening aging population, promoting growth of domestic demands, and accelerating the domestic circulation, the following questions should be answered: first, what is the effect of aging population for the upgrade of the consumption structure; second, what is the size The resident consumption rate should be improved. Meanwhile, with the deepening of aging, on the one hand, aging population keeps growing, which undoubtedly will accelerate demands for medical healthcare, services for the aged, and facilities. This is an important opportunity to promote the upgrade of China's consumption structure. On the other hand, aging population means the population reduction of laborers. The reduction of primary groups is not good for consumption growth and consumption transformation. As a result, the influence of accurately estimating aging population on the transformation of the consumption structure will better serve economic development. Literature review As an important aspect of economic growth, consumption is always the research key for domestic scholars. Particularly, with the growth of China's economy, research perspectives of scholars keep enriching from the consumption level and consumption structure to the upgrade of the consumption structure. According to existing studies, the main studies for the upgrade of the consumption structure are mainly concentrated on two aspects: On the one hand, they focus on discussing the current upgrade state of the consumption structure in China. Han and Xia (3), Shi et al. (4), and Gu and Xia (5), respectively, measured the upgrade state of the consumption structure in China from the perspectives of "developmental coefficient, " consumption structure, and consumption habits, as well as the framework of "survival type-developmental type-enjoyment type, " and demonstrated that the current consumption structure of China remained the upgrade state. Ye and Tang (6) further used the entropy weight method to measure the consumption upgrade index of each province in China and found that there was an obvious difference between regions. The provincial consumption upgrade index was successively decreasing from the east to the west. On the other hand, they concentrate on exploring the factors affecting the upgrade of the consumption structure. At present, scholars mainly study their relationship from the perspectives of social capital and consumption behavior (7), financial constraint (8), and Internet (9). However, so far, there have been fewer references analyzing the factors affecting the upgrade of the consumption structure from the demographic perspective. Particularly, with the constant deepening of aging population, the population structure transformation undoubtedly will exert an important influence on the upgrade of the consumption structure, showing the important significance to study their relationship. First, aging population implies an increase in the elderly population ratio. The group is equipped with the significant features in consumption. As a whole, the consumption demands of the elderly in China are present in the trend of specialization and diversification. Moreover, the external dependency is . /fpubh. . gradually enhancing (10,11). Regarding consumption demands, older adults tend toward medical treatment and caring services. Moreover, the consumption concept of older adults is relatively mature and rational (12). The overall consumption level of the elderly is lower, which is even lower than the national average level (13). What is more, there is an obvious difference in the consumption quantity between urban-rural older adults (14); in terms of the consumption structure, after becoming elderly, the food expenditure ratio of older adults is rising (13). The expenditure of clothing expenditure and traffic consumption is present in the declining trend (15). The expenditure of medical healthcare is significantly growing (16). The special consumption features of the elderly undoubtedly will generate the special influence on the upgrade of the consumption structure. From the combination between aging and consumption, the scholars Grunberg and Modigliani (1954) (17) put forward the life cycle theory and discussed the relationship between the population structure variation and consumption. This theory argues that rational consumers will be based on the utility maximization principle to deal with savings and incomes at different age stages. With the constant enhancement of China's aging and the consumption level, Chinese scholars also conduct lots of studies from the perspectives of aging and consumption structure. First, in terms of research methods, Li and Gao (18), Cha and Zhou (19), Yu and Sun (20), and Bao and Li (21) made use of the gray system theory and method to verify whether aging population exerted an influence on the consumption structure, but they did not specially clarify the role direction of aging population. After that, scholars established the unitary linear model based on the life cycle theory to demonstrate the effect direction of aging population. Speaking of the research framework, the current studies are based on the following three frameworks. The first one is based on eight consumption types used by the National Bureau of Statistics to measure the consumption upgrade effect of aging population. For example, based on panel data of 30 provinces in China, Zhang et al. (22) redivided eight consumptions into "food and clothing consumption" and "other consumptions" to study the consumption upgrade effect of aging population. The second one is based on the survival materials, developmental materials, and enjoyment materials divided by Engels to measure the upgrade effect of the consumption structure for aging population. For instance, (23) measured the ratio of developmental and enjoyment expenditure in the total expenditure. Kou and Zhang (24) were based on the consumption framework of the type including survival-developmental-enjoyment. Tian (25) It refers to eight categories of food expenditure, clothing expenditure, housing expenditure, tra c communication expenditure, educational cultural entertainment expenditure, medical expenditure, and daily consumption service expenditure used by National Bureau of Statistics. divided consumption into the health type, enjoyment type, and developmental type to measure the consumption upgrade effect of aging population. The third one is based on Stone's LES model and divided consumption into the basic consumption and developmental consumption to measure the upgrade effect of the consumption structure for aging population. For instance, Li (26) utilized the extended linear expenditure system (ELES) to divide the expenditure term in resident consumption structure into general commodities, and current research conclusions are mainly concentrated on the following two aspects: First, aging population is good for consumption structure upgrade (22)(23)(24); second, aging population is not good for upgrading the consumption structure (21,25). Fewer scholars showed that the influence of aging population on the upgrade of the consumption structure was less significant (26). On the whole, main conclusions of existing references for the upgrade effect of the consumption structure for aging population are mainly concentrated on the positive or negative unilateral effect but they have not noticed the simultaneous positive and negative bilateral effect for aging population changing with time and economic development. In the meanwhile, there are no relevant studies on time heterogeneity presented by aging population changing with time. In studies of different areas, scholars often make a comparison according to the eastern, middle, and western areas but cannot display the specific aging population effect of each province. Under the circumstance, through the bilateral stochastic boundary model, this paper conducted the estimation comparison on the positive and negative effects of aging population for the upgrade of the consumption structure and conducted quantitative estimation for the possible positive and negative effects on this basis, so as to evaluate the comprehensive influence of aging population on the upgrade of the consumption structure. What is more, this paper estimated the time development trend of the net effect for aging population and the upgrade effect of the independent consumption structure in each province, so as to propose more practical countermeasures. Theoretical assumptions After entering 21st century, China starts entering the aging society, and the aging degree keeps deepening. This aspect is attributed to the reduction of the young population and the increase in the elderly population brought by the reduction of birth rate and death rate, showing the aging population. Beyond that, due to expected lifetime dilation of population, the existing population age keeps growing. The number of the elderly in society is present in the gradually growing trend. The role of factors in two factors ultimately will increase the elderly population ratio. The social aging degree keeps deepening, while the change in both aspects will exert multiple influences on consumption. On the one hand, aging population is possible to bring the promotion effect for the upgrade of the consumption structure: (1) The life cycle theory argues that most people prefer the lifetime balance consumption. The redundant income in the young stage will be often used for paying debts in their youth or saving money for pension. Hence, when the ratio of elderly population in society is increasing, the consumption of entire society tends to increase. (2) At the same time, after labor population quits the labor force market, they have more time to enjoy leisure time so that the opportunity cost of enjoying leisure will be reduced, so as to promote the increase in enjoyment consumption for such a group, driving consumption upgrade. (3) With the increase in age, people's body function keeps degeneration while people's self-caring ability keeps weakening. This inevitably will drive the demands for medical treatment, pension, and elderly caring. The deepening of aging undoubtedly will drive the social consumption in medical healthcare and aging service consumption. (4) After post-60s and post-70s generations gradually become elderly, the consumption concept of older adults will tend to premature consumption and enjoyable consumption. (5) The extensive use of the Internet will make consumption more convenient, reducing consumption barriers caused by inconvenient trip. Besides, the upgrade of the consumption structure brought by aging population may display the obstruction effect: (1) Aging population means that the ratio of young people in the total population is reduced, while young people are social labor participants and main consumer groups. The ratio reduction of young people inevitably will reduce each consumption part. (2) Expected lifetime keeps lengthening, so older adults should allocate the longer time period to realize utility maximization and reduce the current consumption expenditure. Meanwhile, the willingness of preventive savings in their youth will be increased, while they will reduce the current consumption for elderly pension. (3) With the growth of age, after they leave the operating post, people will have more time to make a comparison on the purchased commodities. Older adults tend toward purchasing commodities with high-performance cost ratio while reducing the impulsion consumption. (4) With age, people's ability to accept and learn new things is weakening while the consumption mode keeps updating and becomes more intelligent. The lagged consumption mode affects the diversified development of the consumption structure. (5) The older generation is affected by their long-term life habits, so their overall saving conception is stronger and their consumption willingness is lower. On this basis, this paper proposed the hypothesis 1: Aging population showed the positive and negative bilateral effect on the upgrade of the consumption structure. Meanwhile, since the reform and opening-up, China has undergone earth-shaking changes in economy, politics, and culture. People's disposable incomes keep increasing. Relative to 2013, per capita disposal incomes of China in 2020 were increased by 75.79%; the industrial structure kept optimization. In 2000-2019, the GDP ratio of the primary and secondary industries was, respectively, reduced by 7.11 and 38.97% from 14.67 and 45.54%. The GDP ratio of the tertiary industry was increased to 53.92% from 39.79%; people's consumption concept kept changing, while the consumption environment was constantly optimizing. On this basis, this paper put forward the hypothesis 2: The positive promotion effect of aging population for the upgrade of the consumption structure will perform the trend of increasing over time. China has a vast territory, and each area has significant differences in economic development and population structure. In this way, the upgrade effect of the consumption structure brought by aging population in each region inevitably shows some differences. As a whole, the overall aging population degree in east China is higher, while the economic development level is also higher. The consumption market development is relatively sound, and high-tech application degree is high. Also, people's consumption concept is relatively advanced. The west is situated in the remote area. The defects of the geographical environment make its economic development lag behind. The population is rare, while the consumption market development is unsound. Moreover, people's consumption concept is relatively lagging. On this basis, this paper came up with the hypothesis 3: There is heterogeneity of regions between aging population and the upgrade of the consumption structure. Moreover, the upgrade of the consumption structure in the east has the strongest positive effect. Empirical model and data description The description of the bilateral stochastic cutting-edge model Through the above-mentioned analysis, it is concluded that there is the mutually exclusive effect of positive and negative directions in the upgrade of the consumption structure caused by aging population. Hence, based on the research idea of Kumbhakar and Parmeter (27), this paper constructed the bilateral stochastic cutting-edge model: (1) in which Upgrade it is the consumption structure level; x it refers to a series of control variables affecting the upgrade of the consumption structure, including per capita disposable incomes, deposit balance, consumption tendency, child-rearing ratio, urbanization level, industrial development level, social safeguard level, Internet penetration rate, and telephone popularity rate. δ is the parameter vector to be estimated; i(x it ) refers to the cutting-edge industrial structure level; ξ it denotes the compound . /fpubh. . residual term, ξ it = w it − u it + ε it , in which, ε it is the stochastic error term, showing the unobservable factors on the consumption structure level. Since compound residual term ξ it is possible to be equal to 0, it will result in the bias in OLS estimation results. As w it ≥ 0, it means the aging population can promote the upgrade of the consumption structure; as u it ≥ 0, it means that aging population is not good for the upgrade of the consumption structure; as w it =0, u it ≥ 0 or u it = 0, w it ≥ 0, the model means the bilateral stochastic cutting-edge model. As w it = u it = 0, the model is the OLS model. Through the Formula (1), the actual effect of aging population for the upgrade of the consumption structure is the result under the combined action of positive and negative bilateral effect of aging population: Aging population promotes the upgrade of the consumption structure so that the consumption structure level is higher than the cuttingedge consumption structure level, while aging population obstructs the upgrade of the consumption structure so that the consumption structure level is lower than the cuttingedge consumption structure level. The net effect based on the combined influence of promotion and obstruction can measure the deviation degree of the practical consumption structure level. Due to the bias in OLS estimation, to estimate parameter δ and residual terms w it and u it , this paper used the maximum likelihood estimation (MLE) to get the effective estimation results. To this end, ξ it is the compound residual term and its distribution should satisfy the following conditions: the stochastic error term is mutually independent; ε it observes normal distribution. In other words, ε it ∼ iidN(0,σ 2 ε ), w it and u it observe the exponential distribution, namely w it ∼ iidEXP(σ w σ 2 w ),u it ∼ iidEXP(σ u σ 2 u ). The error term and upgrade characteristic of the consumption structure x it are irrelevant. Based on the distribution assumption of the above-mentioned residual term, the probability density function of the compound residual term § it is deduced below: In Formula (2), (•) is the accumulative distribution function of the standard normal distribution. ϕ(•) is the probability density function and other parameters are set up as Furthermore, based on the estimation of the above-mentioned parameters, the MLE in n observational value samples can be written as follows: , the maximum likelihood function (3) can be used to get all parameter values of MLE to further deduce the conditional density function of w it and u it : This paper focused on the positive and negative bilateral effect of aging population for the upgrade of the consumption structure. As a result, based on Formula (4) and Formula (5), the degree that aging population promotes or obstructs the upgrade of the actual consumption structure deviates the upgrade of the cutting-edge consumption structure. What is more, this paper changed the deviation degree's absolute value that aging population affects the consumption structure level into the percentage that is higher or lower than the upgrade level of the cutting-edge consumption structure. The transformed estimation value is estimated as follows: Furthermore, the net effect (NE) of aging population for the upgrade of the consumption structure can be deduced from Formula (6) and Formula (7): Variable selection and data source Based on the above-mentioned measurement model and data availability, relevant variables can be set up as below: Explained variable: The upgrade level of the consumption structure It is believed that when residents' consumption demands are changed from the survival-oriented consumption of food, clothing, and housing to five developmental and enjoyable consumption transformation of traffic communication expenditure, leisure entertainment expenditure, educational expenditure, medical expenditure, and daily service expenditure, it is deemed as the residents' upgrade of consumption structure. Hence, by following and measuring residents' consumption level index-residents' average consumption tendency (the ratio between residents' average consumption expenditure and disposable incomes in each region every year), this paper defined the ratio of traffic communication expenditure, leisure entertainment expenditure, educational expenditure, medical expenditure, and daily service expenditure in residents' disposable income (service product consumption tendency) as the index to measure residents' upgrade of the consumption structure. Explaining variable: Aging population level This paper applied the ratio of aging population over 65 years old in the total population. Meanwhile, to further verify the empirical results in this paper, the old-age dependency ratio variable could be used for the robustness test. By learning from existing relevant studies, control variables selected in this paper mainly included (1) child-rearing ratio which can be measured by the ratio between child population below 14 years old and labor population; (2) urbanization level which can be measured by the ratio of urban population in total population; (3) residents' per capita disposable incomes; (4) residents' deposit balance; (5) social safeguard level; (6) industrial developmental level which can be measured by the ratio between the level of the tertiary industry and gross domestic product (GDP); (7) Internet penetration rate. Table 1. Empirical results and analyses The estimation of the bilateral stochastic cutting-edge model The reference result Based on the above-mentioned information, this paper estimated the bilateral effect of aging population for the upgrade of the consumption structure from the perspective of Formula (1). The estimation results can be shown in Table 2. Among them, model 1 in the second row refers to the simple OLS estimation. The values from third row to fifth row refer to MLE estimation results, in which the third row is the uncontrollable time and regional fixed effect. The fourth row is the controllable regional fixed effect, and the fifth row is the controllable time fixed effect. The sixth row is the simultaneous time and regional fixed effect. On this basis, the elderly population ratio over 65 years old was introduced while considering the effect of aging population for the upgrade of the consumption structure, in which the seventh row might only consider the unilateral estimation result of aging population for the negative effect of upgrading the consumption structure. The eighth row might be the unilateral estimation result of aging population for the positive effect of upgrading the consumption structure. The ninth row might be the estimation result that might simultaneously consider the bilateral effect of aging population for upgrading the consumption structure. By comparing the maximum likelihood ratio of each model in Table 2, it could be found that the estimation result of model 7 was the Data in the bracket refer to t-value. * , ** , and *** , respectively, represent significance in 10, 5, and 1%. Variance decomposition: Measurement of the positive and negative e ects According to the regression result of model 8 in Table 2, the promotion and obstruction effect of aging population for the upgrade of the consumption structure is illustrated in Table 3. The estimation results show that aging population exactly shows the positive and negative bilateral effect for the upgrade of the consumption structure. This is consistent with the theoretical hypothesis in this paper, in which the positive effect's estimation coefficient of aging population for the upgrade of the consumption structure was 0.1632 and the negative effect's estimation coefficient was 0.0727, showing that the positive effect was obviously higher than the negative effect. The comprehensive net effect was 0.0905. The further analysis showed that in the influence ratio, the total variable of the stochastic error term that could not be explained by aging population was 0.032 while the ratio in the effect's total variable of the upgrade of the consumption structure to be explained by the bilateral effect of aging population was up to 99.78%, showing that the total utility of aging population explained most parts of the total variable in the upgrade of the consumption structure and verified that aging population exerted the influence on the upgrade of the consumption structure. In total utility of aging population for the upgrade of the consumption structure, aging population's positive effect ratio for the upgrade of the consumption structure was up to 83.43%. The negative effect ratio accounted for 16.57%. The overall results indicated that aging population's positive effect for the upgrade of the consumption structure was greater than that of the negative effect. In this way, the overall upgrade level of the consumption structure was higher than that of the cutting-edge upgrade level of the consumption structure. The influence degree of aging population for the upgrade of the consumption structure. To further change aging population's deviation degree for the upgrade of the consumption structure level into the percentage that is higher than the cuttingedge upgrade level of the consumption structure level, based on Formulas (6)- (8), it, respectively, represented the positive and negative effects of aging population in this paper. The upgrade of the consumption structure deviated the cuttingedge upgrade level of the consumption structure's net effect percentage distribution characteristics. The results can be shown in Table 4. It can be found from the estimation results of Table 4 that on average, aging population promoted 14.04% for the upgrade of the consumption structure and obstructed 6.18% for the upgrade of the consumption structure. The net effect of their mutual influence made the actual upgrade level of the consumption structure slightly higher than 7.86% of the cuttingedge upgrade level of the consumption structure. In other words, if the cutting-edge upgrade level of the consumption structure is assumed as 100%, the ultimate actual level is 107.86%. Details from the fourth row to the sixth row reported the distribution status of the aging population's positive effect, negative effect, . /fpubh. . and their net effect. The findings showed the aging population's influence for the upgrade of the consumption structure showed the significant difference. Among them, the estimation results of 25 percentiles indicated that under the combined role of aging population's positive and negative effects, the upgrade of the consumption structure in 1/4 provinces was obstructed so that the actual net effect was lower than 0.75% of the cuttingedge level. The reason is that the economic development of 1/4 provinces lags behind and the industrial development is unsound, showing the small stimulation role for the elderly population consumption. For 50 percentiles, the positive effect of 1/4 provinces exceeded the negative effect so that the ultimate net effect was positive. The actual net effect was higher than 5.78% of the cutting-edge level. For 75 percentiles, the positive effect of 1/4 provinces surpassed the negative effect so that the positive net effect was further improved. The ultimate actual net effect was higher than 14.56% of the cutting-edge level. This paper displayed the chart of frequency distribution among three of them to intuitively display the positive, negative, and net effect distribution situations for aging population affecting the upgrade of the consumption structure (Figures 1-3). Figures 1, 2 indicated the positive effect and negative effect of aging population performing the distribution characteristics of rightward trailing. Among them, Figure 2 stated that aging population's negative obstruction effect should disappear around 30%. Figure 1 showed that the positive promotion effect of aging population still showed the trailing phenomenon around 50%, showing that the promotion role of aging population for the upgrade of the consumption structure was slightly large. Figure 3 indicated that according to the distribution comparison of positive effect and negative effect of aging population, aging population's negative effect was obviously larger than the ratio of the positive effect. The time characteristic analysis of aging population net e ect Table 5 exhibits the time distribution characteristics of aging population for the upgrade of the consumption structure. As shown in Table 5 level, aging population's upgrade of the consumption structure had the larger obstruction role. Below 25 percentiles, aging population's net effect in half of years should be the negative value. However, with the constant increase in aging population level, the promotion role of aging population for the upgrade of the consumption structure should be more advantageous. Below 75 percentiles, aging population's net effect should be the positive value, showing the aging population level kept enhancing and it would be good for upgrading the consumption structure as a whole. The regional characteristic analysis of aging population's net e ect Table 6 exhibits the regional distribution characteristic of aging population for the upgrade of the consumption structure. As shown in Table 6, it displayed the aging population's net effect results in the east, the middle, and the west of China by regarding each province and geographical orientation as the foundation for division. According to the effect result comparison of three areas, it could be seen that aging population's net effect in the eastern areas reached the maximum, 9.93%, followed by 9.07% in the western areas, and 3.16% in the middle areas. On this basis, the specific aging population's net effect results in 31 provinces could be further gained. Among all provinces, Zhejiang Province had the maximum aging population's net effect which was 32.22%. Twenty-six provinces had the positive aging population's net effect, with the proportion over 4/5. Five provinces had the negative aging population's net effect, with the proportion <1/5. As a whole, aging population's net effect level performed by five provinces including Xinjiang, Jiangxi, Hebei, Hainan, and Heilongjiang remained the lower level. In other words, aging population's net effect level remained the negative or low level in the northeast and southwest. By combining with 7th National Population Census Data Results, the elderly population ratio over 65 years old was 13.5%. Among provinces with the low aging population's net effect level, the elderly population over 65 years old in Liaoning (17.42%), Jilin (15.61%), Heilongjiang (15.61%), Hubei (14.59%), and Hebei (13.92%) was higher than the national level. The value of Shanxi (12.9%), Jiangxi (11.89%), Tibet (5.67%), and Hainan (10.43%) was lower than the national level. On the whole, except for Tibet, aging population degree of other provinces remained the relatively high level but did not gain the silver hair economic dividend brought by aging population. The reason is that the northeast including Shanxi is always the heavy industrial base, energy base, and granary of China. In the period before the foundation of China, it has contributed a huge power for China's development and it was once the most developed area in China, showing that northeast has had some economic foundation. However, the corresponding heavy industry occupies an important position in northeast. All the times, the northeast does not make a breakthrough progress in exploring economic transformation and development of the service industry is not perfect, so it cannot effectively develop the elderly market. This may restrict . /fpubh. . Bilateral e ect estimation with di erent aging degrees Through the above-mentioned empirical analysis, it could be observed that the aging population's positive promotion role for the upgrade of the consumption structure was greater than that of the obstruction role. On this basis, this paper further classified the aging population degrees and explored the constant deepening influence of the aging population degrees on the upgrade of the consumption structure. According to the international standard, when the elderly proportion ratio over 65 years old exceeds 7%, it means that the country enters the aging society. When this ratio surpasses 14%, it implies that the state enters the deep aging society. When this ratio exceeds 21%, it means that it enters the ultra-aging society. Since the maximum elderly population over 65 years old did not exceed 21%, this paper divided aging degrees into 0-7, 7-14%, and 14-21%. It could be observed from the empirical results in Table 7 that as remaining the 0-7% interval of aging population degrees, aging population could positively promote the upgrade of the consumption structure (1%). With the deepening of aging population, when the aging degrees reached 14-20%, aging population positively facilitated the upgrade of the consumption structure (9.05%), showing that the constant deepening of the aging degrees, the aging population's promotion role on the upgrade of the consumption structure kept enhancing. Bilateral e ect degree of di erent urbanization degrees Generally speaking, with the enhancement on the urbanization level in an area, on the one hand, it is good for young people to gather to drive consumption growth in this area, improve the regional consumption structure, and reversely neutralize the negative effect brought by aging population. On the other hand, the enhancement on the urbanization level means the comprehensive enhancement of the regional economic developmental level, the developmental level of tertiary industry, and elderly product development degree. The enhancement on the urbanization level can provide more convenient, safer, and more abundant consumption experience for older adults. This can positively stimulate consumption growth of older adults and upgrade the consumption structure for the elderly. To verify this guess, this paper divided the urbanization degree into three levels and verified aging population's effect for the upgrade of the consumption structure under different urbanization degrees. . /fpubh. . As illustrated in Table 8, under the circumstance with the low urbanization level, aging population's promotion role for the upgrade of the consumption structure was only 1%. With the enhancement of the urbanization level, when the regional urbanization level reached 30-70%, aging population promoted the upgrade of the consumption structure (8.72%); when regional urbanization level surpassed 70%, aging population's promotion role for the upgrade of the consumption structure was 9.05%, showing that with the constant enhancement on the urbanization level, aging population's promotion role for the upgrade of the consumption structure was enhanced accordingly. Improving the urbanization rate could effectively improve aging population's promotion role for consumption and reduce the inhibition role for aging population. Robustness test Old-age dependency ratio refers to the specific value between the elderly in non-labor age population and labor age population or it is called the elderly burden coefficient, showing the number of the elderly to be burdened by labor age population in society. This can reveal the social aging degree to some extent. To verify robustness of estimation results, on the basis of the original estimated results, the elderly population ratio above 65 years old was replaced as old-age dependency ratio. Then, the bilateral effect of aging population for the upgrade of the consumption structure was estimated again. The paper exhibited the estimation result for upgrading the consumption structure by regarding old-age dependency ratio as the main explaining variable. To save the length, the paper directly displayed the bilateral effect result after variance decomposition, as shown in Table 9. The estimation results indicated that old-age dependency ratio showed the positive and negative bilateral effect for the upgrade of the consumption structure, in which old-age dependency ratio showed 0.158 positive effect estimate coefficient for the upgrade of the consumption structure while the negative estimate coefficient was 0.0473. Such a result remained the same with the reference result, verifying the above-mentioned results. Regarding the net effect of old-age dependency ratio, the negative effect of old-age dependency ratio was smaller than that of the positive effect. Similarly, it showed that aging population should be good for 91.68% of the upgrade of the consumption structure. The positive effect of old-age dependency ratio accounted for 91.76%, while the negative effect accounted for 8.24%, showing that oldage dependency ratio's positive role played a dominant role, making the upgrade of the consumption structure positively deviate from the cutting-edge level. Table 10 shows the positive effect, negative effect, and net effect of old-age dependency ratio for the upgrade of the consumption structure. The findings indicated that with the continuous enhancement on old-age dependency ratio, the positive effect of old-age dependency ratio promoted the upgrade of the consumption structure by 13.61%. The negative effect of old-age dependency ratio reduced the upgrade of the consumption structure by 4.42%. The comprehensive net benefits promoted the actual upgrade of the consumption structure to be higher than that of cutting-edge level by 9.19%. Conclusion and advice On the grounds of the above-mentioned analysis, this paper drew the following conclusions: (1) Aging population showed a certain negative and negative bilateral effect for the upgrade of the consumption structure. Moreover, the positive effect of aging population was greater than that of the negative effect. (2) . /fpubh. . From the perspective of time effect, under the combined role of aging population's positive and negative effects, the upgrade of the consumption structure was promoted by 7.861% on average every year. (3) Aging population's effect for the upgrade of the consumption structure showed regional heterogeneity. The aging population's positive promotion role in the eastern areas reached the maximum, followed by the western areas, and middle areas were the minimum. The reason is that aging population's net effect of middle areas and western areas includes three provinces of northeast and southwestern areas. (4) With the continuous enhancement of aging population and urbanization level, aging population's promotion effect for the upgrade of the consumption structure enhanced with it. For this reason, the paper put forward the following advice: First, it is necessary to positively develop economy and promote improvement of the consumption structure according to the circumstances. By estimating net effect of aging population in different provinces, it could be found that provinces with the relatively significant aging population's positive effect mainly included areas with the higher economic developmental level including Zhejiang, Shanghai, Guangdong, and Jiangsu. Hence, it is necessary to facilitate consumption growth under the circumstance of keeping deepening aging population, construct domestic circulation, and promote consumption growth. Under the circumstance, it is essential to develop national economy. Only by remaining faster and better economic development, it can effectively facilitate consumption upgrade. The dramatic drop of aging population's negative effect brought by the economic crisis in 2008 also verified the importance of economic development. What is more, economic development and industrial upgrade of each area should be promoted to the point which is reinforced according to circumstances. On the basis of estimating aging population's net effect of each province, aging population's net effect in southwest and northeast should be dominated by the lower net effect or negative effect. For the southwest with the relatively lagging economy, the regional characteristic economy should be developed. Meanwhile, northeast and Shanxi that have been equipped with favorable industrial foundation should drive development of tertiary industry and realize economic transformation. Hubei can depend on the high-tech industrial strength to realize the organic combination of the pension industry and high-tech industry and help to develop silver hair economy. What is more, it is essential to develop the perfect elderly consumption market and explore the consumption potential of old groups. China has the large population base and fast aging speed (28). The "silver hair economy" inevitably will become the key of future service industry development. Also, the empirical results also indicated that with the continuous enhancement on aging degree, aging population's promotion role for the upgrade of the consumption structure kept enhancing, showing that aging population contains the huge consumption growth potential. Hence, developing elderly caring products and services, spiritual caring, tourist industry, and leisure products for older adults could be developed with pertinence. Meantime, based on medical healthcare, it is essential to develop products with the cross integration of medical pension combination, medical food combination, medical use combination, and medical accommodation combination, so as to promote growth of other consumption expenditure with medical healthcare. Lastly, it is necessary to accelerate the urbanization development progress and depend on the Internet to comprehensively stimulate consumption growth. The estimation results of Table 2 found that the urbanization level, Internet penetration rate, and developmental level of tertiary industry showed the significant positive promotion role for the upgrade of the consumption structure. The empirical results also indicated that with the continuous enhancement on the urbanization level, aging population's promotion role for the upgrade of the consumption structure was enhanced. As a result, in future development process, the elderly consumption could be stimulated by keep improving the urbanization level and developmental level of tertiary industry. Meanwhile, the Internet utilization can make consumption realize rapid development through digitalization and networking (29,30), so as to make safe, convenient, and reliable elderly services. Depending on the Internet technology, safer and more sustainable consumption channels could be created to develop characteristic features of online purchase and door-to-door old consumption and to stimulate elderly consumption growth depending on the sound consumption environment. Data availability statement The original contributions presented in the study are included in the article/supplementary material, further inquiries can be directed to the corresponding author/s.
""" Config generator """ import sys import abc import os import six from starttls_policy_cli import constants from starttls_policy_cli import policy from starttls_policy_cli import util class ConfigGenerator(object): # pylint: disable=useless-object-inheritance """ Generic configuration generator. The two primary public functions: generate() print_instruct() """ __metaclass__ = abc.ABCMeta def __init__(self, policy_dir, enforce_testing=False): self._policy_dir = policy_dir self._enforce_testing = enforce_testing self._policy_filename = os.path.join(self._policy_dir, constants.POLICY_FILENAME) self._config_filename = os.path.join(self._policy_dir, self.default_filename) self._policy_config = None def _load_config(self): if self._policy_config is None: self._policy_config = policy.Config(filename=self._policy_filename) self._policy_config.load() return self._policy_config def _write_config(self, result, output): six.print_(result, file=output) def _expired_warning(self): """Warns user about policy list expiration. """ six.print_("\nACTION REQUIRED: your policy list at {config_location} has expired! " "Generating empty config.\n" "Check to see whether your update mechanism " "(cronjob, systemd timer) is working.\n" .format(config_location=self._policy_filename), file=sys.stderr) def generate(self): """Generates and dumps MTA configuration file to `policy_dir`. """ policy_list = self._load_config() if util.is_expired(policy_list.expires): self._expired_warning() result = self._generate_expired_fallback(policy_list) else: result = self._generate(policy_list) with open(self._config_filename, "w") as config_file: self._write_config(result, config_file) def manual_instructions(self): """Prints manual installation instructions to stdout. """ six.print_("{line}Manual installation instructions for {mta_name}{line}{instructions}" .format(line="\n" + ("-" * 50) + "\n", mta_name=self.mta_name, instructions=self._instruct_string())) @abc.abstractmethod def _generate(self, policy_list): """Creates configuration file. Returns a unicode string (text to write to file).""" @abc.abstractmethod def _generate_expired_fallback(self, policy_list): """Creates configuration file for expired policy list. Returns a unicode string (text to write to file).""" @abc.abstractmethod def _instruct_string(self): """Explain how to install the configuration file that was generated.""" @abc.abstractproperty def mta_name(self): """The name of the MTA this generator is for.""" @abc.abstractproperty def default_filename(self): """The expected default filename of the generated configuration file.""" class PostfixGenerator(ConfigGenerator): """Configuration generator for postfix. """ def _generate(self, policy_list): policies = [] max_domain_len = len(max(policy_list, key=len)) for domain, tls_policy in sorted(six.iteritems(policy_list)): policies.append(self._policy_for_domain(domain, tls_policy, max_domain_len)) return "\n".join(policies) def _generate_expired_fallback(self, policy_list): return "# Policy list is outdated. Falling back to opportunistic encryption." def _instruct_string(self): filename = self._config_filename abs_path = os.path.abspath(filename) return ("\nFirst, run:\n\n" "postmap {abs_path}\n\n" "Then, you'll need to point your Postfix configuration to {filename}.\n" "Check if `postconf smtp_tls_policy_maps` includes this file.\n" "If not, run:\n\n" "postconf -e \"smtp_tls_policy_maps=$(postconf -h smtp_tls_policy_maps)" " hash:{abs_path}\"\n\n" "And finally:\n\n" "postfix reload\n").format(abs_path=abs_path, filename=filename) def _policy_for_domain(self, domain, tls_policy, max_domain_len): line = ("{0:%d} " % max_domain_len).format(domain) mode = tls_policy.mode if mode == "enforce" or self._enforce_testing and mode == "testing": line += " secure match=" line += ":".join(tls_policy.mxs) elif mode == "testing": line = "# " + line + "undefined due to testing policy" return line @property def mta_name(self): return "Postfix" @property def default_filename(self): return "postfix_tls_policy"
<filename>Trees/main.py import trees Data = [] labels = ['Outlook' , 'Temperatures' , 'Humidity' , 'Wind' , 'Play'] file = open("data.txt" , "r") numLines = len(file.readlines()) file.seek(0) for i in range(numLines): Data.append(file.readline().strip().split("\t")) file.close() inV = ['sunny', 'mild' , 'normal' ,'strong'] myTree = trees.createTree(Data , labels) print(trees.classify(myTree , labels , inV))
<reponame>tikivn/redux-miniprogram-bindings export declare const isFunction: (value: unknown) => value is Function; export declare const isPlainObject: <T extends Record<string, unknown> = Record<string, unknown>>(value: unknown) => value is T; export declare const getType: (value: unknown) => string; export declare const getKeys: { (o: object): string[]; (o: {}): string[]; }; export declare const hasOwnProperty: (v: string | number | symbol) => boolean; export declare const warn: (message: string) => never;
<filename>infra/Pos/Slotting/Impl/Ntp.hs {-# LANGUAGE DataKinds #-} {-# LANGUAGE TypeFamilies #-} -- | NTP-based implementation of slotting. module Pos.Slotting.Impl.Ntp ( -- * State NtpSlottingState , NtpSlottingVar -- * Mode , NtpMode , NtpWorkerMode -- * MonadSlots, redirects, etc. , mkNtpSlottingVar -- * Methods , ntpGetCurrentSlot , ntpGetCurrentSlotBlocking , ntpGetCurrentSlotInaccurate , ntpCurrentTime -- * Workers , ntpWorkers ) where import Universum import qualified Control.Concurrent.STM as STM import Control.Lens (makeLenses) import Control.Monad.Trans.Control (MonadBaseControl) import Data.List ((!!)) import Data.Time.Units (Microsecond) import Formatting (int, sformat, shown, stext, (%)) import Mockable (Catch, CurrentTime, Delay, Fork, Mockables, Throw, currentTime, delay) import NTP.Client (NtpClientSettings (..), ntpSingleShot, startNtpClient) import NTP.Example () import Serokell.Util (sec) import System.Wlog (WithLogger, logDebug, logInfo, logWarning) import qualified Pos.Core.Constants as C import Pos.Core.Configuration (HasConfiguration) import Pos.Core.Slotting (unflattenSlotId) import Pos.Core.Types (EpochIndex, SlotId (..), Timestamp (..)) import Pos.Infra.Configuration (HasInfraConfiguration) import qualified Pos.Slotting.Configuration as C import Pos.Slotting.Impl.Util (approxSlotUsingOutdated, slotFromTimestamp) import Pos.Slotting.MemState (MonadSlotsData, getCurrentNextEpochIndexM, getCurrentNextEpochSlottingDataM, waitCurrentEpochEqualsM) ---------------------------------------------------------------------------- -- TODO ---------------------------------------------------------------------------- -- TODO: it's not exported from 'node-sketch' and it's too hard to do -- it because of the mess in 'node-sketch' branches. -- -- It should be exported and used here, I think. type NtpMonad m = ( MonadIO m , MonadBaseControl IO m , WithLogger m , Mockables m [ Fork , Throw , Catch ] , MonadMask m ) ---------------------------------------------------------------------------- -- State ---------------------------------------------------------------------------- -- | Data needed for the slotting algorithm to work. data NtpSlottingState = NtpSlottingState { -- | Slot which was returned from getCurrentSlot last time. _nssLastSlot :: !SlotId -- | Margin (difference between global time and local time) which -- we got from NTP server last time. , _nssLastMargin :: !Microsecond -- | Time (local) for which we got margin in last time. , _nssLastLocalTime :: !Timestamp } type NtpSlottingVar = TVar NtpSlottingState makeLenses ''NtpSlottingState mkNtpSlottingVar :: ( NtpMonad m , Mockables m [ CurrentTime , Delay ] , HasConfiguration , HasInfraConfiguration ) => m NtpSlottingVar mkNtpSlottingVar = do let _nssLastMargin = 0 _nssLastLocalTime <- Timestamp <$> currentTime -- current time isn't quite valid value, but it doesn't matter (@pva701) let _nssLastSlot = unflattenSlotId 0 res <- newTVarIO NtpSlottingState {..} -- We don't want to wait too much at the very beginning, -- 1 second should be enough. let settings = (ntpSettings res) { ntpResponseTimeout = 1 & sec } res <$ singleShot settings where singleShot settings = unless C.isDevelopment $ do logInfo $ "Waiting for response from NTP servers" ntpSingleShot settings ---------------------------------------------------------------------------- -- Mode ---------------------------------------------------------------------------- type NtpMode ctx m = ( MonadIO m , MonadThrow m , WithLogger m , MonadSlotsData ctx m , Mockables m [ CurrentTime , Delay ] , HasConfiguration , HasInfraConfiguration ) type NtpWorkerMode m = (HasInfraConfiguration, NtpMonad m) ---------------------------------------------------------------------------- -- MonadSlots implementation ---------------------------------------------------------------------------- ntpCurrentTime :: (NtpMode ctx m) => NtpSlottingVar -> m Timestamp ntpCurrentTime var = do lastMargin <- view nssLastMargin <$> atomically (STM.readTVar var) Timestamp . (+ lastMargin) <$> currentTime ---------------------------------------------------------------------------- -- Getting current slot ---------------------------------------------------------------------------- data SlotStatus = CantTrust Text -- ^ We can't trust local time. | OutdatedSlottingData !EpochIndex -- ^ We don't know recent -- slotting data, last known -- current epoch is attached. | CurrentSlot !SlotId -- ^ Slot is calculated successfully. ntpGetCurrentSlot :: (NtpMode ctx m) => NtpSlottingVar -> m (Maybe SlotId) ntpGetCurrentSlot var = ntpGetCurrentSlotImpl var >>= \case CurrentSlot slot -> pure $ Just slot OutdatedSlottingData currentEpochIndex -> do logWarning $ sformat ("Can't get current slot, because slotting data"% " is outdated. Last known current epoch = "%int) currentEpochIndex Nothing <$ printSlottingData CantTrust t -> do logWarning $ "Can't get current slot, because we can't trust local time, details: " <> t Nothing <$ printSlottingData where -- Here we could print all the slotting data printSlottingData = do (sd, _) <- getCurrentNextEpochSlottingDataM logWarning $ "Slotting data: " <> show sd ntpGetCurrentSlotInaccurate :: (NtpMode ctx m) => NtpSlottingVar -> m SlotId ntpGetCurrentSlotInaccurate var = do res <- ntpGetCurrentSlotImpl var case res of CurrentSlot slot -> pure slot CantTrust _ -> _nssLastSlot <$> atomically (STM.readTVar var) OutdatedSlottingData _ -> ntpCurrentTime var >>= approxSlotUsingOutdated ntpGetCurrentSlotImpl :: (NtpMode ctx m) => NtpSlottingVar -> m SlotStatus ntpGetCurrentSlotImpl var = do NtpSlottingState {..} <- atomically $ STM.readTVar var t <- Timestamp . (+ _nssLastMargin) <$> currentTime case canWeTrustLocalTime _nssLastLocalTime t of Nothing -> do (currentEpochIndex, _) <- getCurrentNextEpochIndexM res <- max _nssLastSlot <<$>> slotFromTimestamp t let setLastSlot s = atomically $ STM.modifyTVar' var (nssLastSlot %~ max s) whenJust res setLastSlot pure $ maybe (OutdatedSlottingData currentEpochIndex) CurrentSlot res Just reason -> pure $ CantTrust reason where -- We can trust getCurrentTime if it is: -- • not bigger than 'time for which we got margin (last time) -- + NTP delay (+ some eps, for safety)' -- • not less than 'last time - some eps' canWeTrustLocalTime :: Timestamp -> Timestamp -> Maybe Text canWeTrustLocalTime t1@(Timestamp lastLocalTime) t2@(Timestamp t) = do let ret = sformat ("T1: "%shown%", T2: "%shown%", reason: "%stext) t1 t2 if | t > lastLocalTime + C.ntpPollDelay + C.ntpMaxError -> Just $ ret $ "curtime is bigger then last local: " <> show C.ntpPollDelay <> ", " <> show C.ntpMaxError | t < lastLocalTime - C.ntpMaxError -> Just $ ret $ "curtime is less then last - error: " <> show C.ntpMaxError | otherwise -> Nothing ntpGetCurrentSlotBlocking :: (NtpMode ctx m) => NtpSlottingVar -> m SlotId ntpGetCurrentSlotBlocking var = ntpGetCurrentSlotImpl var >>= \case CantTrust _ -> do delay C.ntpPollDelay ntpGetCurrentSlotBlocking var OutdatedSlottingData current -> do waitCurrentEpochEqualsM (current + 1) ntpGetCurrentSlotBlocking var CurrentSlot slot -> pure slot ---------------------------------------------------------------------------- -- Workers ---------------------------------------------------------------------------- -- | Workers necessary for NTP slotting. ntpWorkers :: NtpWorkerMode m => NtpSlottingVar -> [m ()] ntpWorkers = one . ntpSyncWorker -- Worker for synchronization of local time and global time. ntpSyncWorker :: NtpWorkerMode m => NtpSlottingVar -> m () ntpSyncWorker = void . startNtpClient . ntpSettings ntpHandlerDo :: (MonadIO m, WithLogger m) => NtpSlottingVar -> (Microsecond, Microsecond) -> m () ntpHandlerDo var (newMargin, transmitTime) = do logDebug $ sformat ("Callback on new margin: "%int% " mcs") newMargin let realTime = Timestamp $ transmitTime + newMargin atomically $ STM.modifyTVar var ( set nssLastMargin newMargin . set nssLastLocalTime realTime) ntpSettings :: (HasInfraConfiguration, MonadIO m, WithLogger m) => NtpSlottingVar -> NtpClientSettings m ntpSettings var = NtpClientSettings { -- list of servers addresses ntpServers = [ "time.windows.com" , "clock.isc.org" , "ntp5.stratum2.ru"] -- got time margin callback , ntpHandler = ntpHandlerDo var -- logger name modifier , ntpLogName = "ntp" -- delay between making requests and response collection; -- it also means that handler will be invoked with this lag , ntpResponseTimeout = C.ntpResponseTimeout -- how often to send responses to server , ntpPollDelay = C.ntpPollDelay -- way to sumarize results received from different servers. , ntpMeanSelection = \l -> let len = length l in sort l !! ((len - 1) `div` 2) }
def check(cls): cls._tries += 1 try: args = cls._get_injected_args() cls._check_whether_attempted(*args) cls._do_check(*args) except NotAttempted as e: return ProblemStatement(cls._problem + ' ' + str(e)) except (Incorrect, AssertionError) as e: return TestFailure(str(e)) else: return Correct(cls._correct_message())
An expedition launched to track the drift of tsunami debris is using remote sensing to compare the actual event to computer predictions of the trajectory while improving officials ability to warn residents about impending landfall. The tsunami that followed on the heels of the March 11, 2011, earthquake in northern Japan produced as much as 25 million tons of debris. Much of the debris was swept into the ocean. What stayed afloat drifted apart under the influence of winds and currents, most of it traveling eastward. Predicted to reach the west coast of the United States and Hawaiʻi within the coming years, the debris’ composition and how much is still floating on the surface are largely unknown. One thing is certain, however: the debris is hazardous to navigation, marine life and, when washed ashore, to coastlines. Expedition launched to track debris To track where this debris is headed and provide appropriate warnings, a team of scientists and conservationists from the University of Hawaiʻi’s Mānoa and Hilo campuses, Scripps Institution of Oceanography and the Ocean Recovery Alliance quickly created a plan to survey the debris field and mark it with satellite-tracked drifting buoys. Contributing their expertise to the effort are two University of Hawaiʻi participants, International Pacific Research Center Senior Researcher Nikolai Maximenko and Scientific Computer Programmer Jan Hafner The team launched an expedition at the end of November 2011 from Honolulu to Midway Atoll and beyond. Armed with equipment shipped from California by Horizon Lines, navigational software provided by Nobeltec, a computer model of probable trajectories and observations of actual debris by Russian training ship crew, the team surveyed the probable pathways of tsunami debris moving toward the Northwest Hawaiian Islands. The expedition deployed 11 drifting buoys, designed to simulate the motion of different types of debris, in a line between Midway and the leading edge of the tsunami debris field. Data from these satellite-tracked drifters, used in conjunction with computer models, allow remote monitoring of the movement of the debris field, giving scientists and operational agencies a better awareness of the status of the debris field and of the region’s current system. Four hundred numbered wooden blocks were also deployed along the route, often near floating objects. Boaters, fishermen and beach-goers who find the blocks and contact the scientists as instructed on the blocks will help increase understanding of the motion of debris and currents in this remote region. Henry “Hank” Carson, a UH Hilo postdoctoral scientist, is coordinating the wooden block information collection. Debris is passing to the north of Midway Among the most important results of the expedition to date is the recognition that tsunami debris has recently not advanced towards Midway, but instead has been flowing eastward well to the north of the atolls. Analysis of the ocean-current field shows why. In recent weeks, the general flow around all Hawaiian Islands has been from the southwest, producing a front located 300–400 miles northwest of the Midway. This front and associated northeastward jet keeps the tsunami debris north of the islands…at least for the time being. Although this flow has prevented tsunami debris from approaching the islands, it carries a lot of “ordinary” debris (mainly old plastic) from the oceanic garbage patch located between Hawaiʻi and California. The expedition documented 175 such objects, many photographed and collected for more thorough laboratory examination. The sightings of non-tsunami debris match reports at Kure and Midway Islands of items washing up on the southern beaches of the atolls. Some of the items could be tracked to the main Hawaiian Islands. Systematic examination of the samples (including water samples) has not revealed any significant radiation. More information Visit the tsunami debris tracking project. See summaries of research by Maximenko and Hafner the tsunami debris. Adapted from a news release by Gisela E. Speidel, International Pacific Research Center
<gh_stars>1-10 package org.javers.repository.jql; import org.javers.core.metamodel.object.GlobalIdFactory; import org.javers.core.metamodel.type.TypeMapper; class QueryCompiler { private final GlobalIdFactory globalIdFactory; private final TypeMapper typeMapper; QueryCompiler(GlobalIdFactory globalIdFactory, TypeMapper typeMapper) { this.globalIdFactory = globalIdFactory; this.typeMapper = typeMapper; } void compile(JqlQuery query) { query.compile(globalIdFactory, typeMapper); if(query.isVoOwnerQuery()) { VoOwnerFilter filter = query.getVoOwnerFilter(); globalIdFactory.touchValueObjectFromPath(filter.getOwnerEntity(), filter.getPath()); } } }
<gh_stars>1000+ /* * services/authzone.h - authoritative zone that is locally hosted. * * Copyright (c) 2017, NLnet Labs. All rights reserved. * * This software is open source. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of the NLNET LABS nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /** * \file * * This file contains the functions for an authority zone. This zone * is queried by the iterator, just like a stub or forward zone, but then * the data is locally held. */ #ifndef SERVICES_AUTHZONE_H #define SERVICES_AUTHZONE_H #include "util/rbtree.h" #include "util/locks.h" #include "services/mesh.h" #include "services/rpz.h" struct ub_packed_rrset_key; struct regional; struct config_file; struct config_auth; struct query_info; struct dns_msg; struct edns_data; struct module_env; struct worker; struct comm_point; struct comm_timer; struct comm_reply; struct auth_rrset; struct auth_nextprobe; struct auth_probe; struct auth_transfer; struct auth_master; struct auth_chunk; /** * Authoritative zones, shared. */ struct auth_zones { /** lock on the authzone trees */ lock_rw_type lock; /** rbtree of struct auth_zone */ rbtree_type ztree; /** rbtree of struct auth_xfer */ rbtree_type xtree; /** do we have downstream enabled */ int have_downstream; /** number of queries upstream */ size_t num_query_up; /** number of queries downstream */ size_t num_query_down; /** first auth zone containing rpz item in linked list */ struct auth_zone* rpz_first; /** rw lock for rpz linked list, needed when iterating or editing linked * list. */ lock_rw_type rpz_lock; }; /** * Auth zone. Authoritative data, that is fetched from instead of sending * packets to the internet. */ struct auth_zone { /** rbtree node, key is name and class */ rbnode_type node; /** zone name, in uncompressed wireformat */ uint8_t* name; /** length of zone name */ size_t namelen; /** number of labels in zone name */ int namelabs; /** the class of this zone, in host byteorder. * uses 'dclass' to not conflict with c++ keyword class. */ uint16_t dclass; /** lock on the data in the structure * For the node, parent, name, namelen, namelabs, dclass, you * need to also hold the zones_tree lock to change them (or to * delete this zone) */ lock_rw_type lock; /** auth data for this zone * rbtree of struct auth_data */ rbtree_type data; /** zonefile name (or NULL for no zonefile) */ char* zonefile; /** fallback to the internet on failure or ttl-expiry of auth zone */ int fallback_enabled; /** the zone has expired (enabled by the xfer worker), fallback * happens if that option is enabled. */ int zone_expired; /** zone is a slave zone (it has masters) */ int zone_is_slave; /** for downstream: this zone answers queries towards the downstream * clients */ int for_downstream; /** for upstream: this zone answers queries that unbound intends to * send upstream. */ int for_upstream; /** check ZONEMD records */ int zonemd_check; /** reject absence of ZONEMD records */ int zonemd_reject_absence; /** RPZ zones */ struct rpz* rpz; /** store the env (worker thread specific) for the zonemd callbacks * from the mesh with the results of the lookup, if nonNULL, some * worker has already picked up the zonemd verification task and * this worker does not have to do it as well. */ struct module_env* zonemd_callback_env; /** for the zonemd callback, the type of data looked up */ uint16_t zonemd_callback_qtype; /** zone has been deleted */ int zone_deleted; /** deletelist pointer, unused normally except during delete */ struct auth_zone* delete_next; /* not protected by auth_zone lock, must be last items in struct */ /** next auth zone containing RPZ data, or NULL */ struct auth_zone* rpz_az_next; /** previous auth zone containing RPZ data, or NULL */ struct auth_zone* rpz_az_prev; }; /** * Auth data. One domain name, and the RRs to go with it. */ struct auth_data { /** rbtree node, key is name only */ rbnode_type node; /** domain name */ uint8_t* name; /** length of name */ size_t namelen; /** number of labels in name */ int namelabs; /** the data rrsets, with different types, linked list. * if the list if NULL the node would be an empty non-terminal, * but in this data structure such nodes that represent an empty * non-terminal are not needed; they just don't exist. */ struct auth_rrset* rrsets; }; /** * A auth data RRset */ struct auth_rrset { /** next in list */ struct auth_rrset* next; /** RR type in host byteorder */ uint16_t type; /** RRset data item */ struct packed_rrset_data* data; }; /** * Authoritative zone transfer structure. * Create and destroy needs the auth_zones* biglock. * The structure consists of different tasks. Each can be unowned (-1) or * owner by a worker (worker-num). A worker can pick up a task and then do * it. This means the events (timeouts, sockets) are for that worker. * * (move this to tasks). * They don't have locks themselves, the worker (that owns it) uses it, * also as part of callbacks, hence it has separate zonename pointers for * lookup in the main zonetree. If the zone has no transfers, this * structure is not created. */ struct auth_xfer { /** rbtree node, key is name and class */ rbnode_type node; /** lock on this structure, and on the workernum elements of the * tasks. First hold the tree-lock in auth_zones, find the auth_xfer, * lock this lock. Then a worker can reassign itself to fill up * one of the tasks. * Once it has the task assigned to it, the worker can access the * other elements of the task structure without a lock, because that * is necessary for the eventloop and callbacks from that. */ lock_basic_type lock; /** zone name, in uncompressed wireformat */ uint8_t* name; /** length of zone name */ size_t namelen; /** number of labels in zone name */ int namelabs; /** the class of this zone, in host byteorder. * uses 'dclass' to not conflict with c++ keyword class. */ uint16_t dclass; /** task to wait for next-probe-timeout, * once timeouted, see if a SOA probe is needed, or already * in progress */ struct auth_nextprobe* task_nextprobe; /** task for SOA probe. Check if the zone can be updated */ struct auth_probe* task_probe; /** Task for transfer. Transferring and updating the zone. This * includes trying (potentially) several upstream masters. Downloading * and storing the zone */ struct auth_transfer* task_transfer; /** a notify was received, but a zone transfer or probe was already * acted on. * However, the zone transfer could signal a newer serial number. * The serial number of that notify is saved below. The transfer and * probe tasks should check this once done to see if they need to * restart the transfer task for the newer notify serial. * Hold the lock to access this member (and the serial). */ int notify_received; /** true if the notify_received has a serial number */ int notify_has_serial; /** serial number of the notify */ uint32_t notify_serial; /** the list of masters for checking notifies. This list is * empty on start, and a copy of the list from the probe_task when * it is done looking them up. */ struct auth_master* allow_notify_list; /* protected by the lock on the structure, information about * the loaded authority zone. */ /** is the zone currently considered expired? after expiry also older * serial numbers are allowed (not just newer) */ int zone_expired; /** do we have a zone (if 0, no zone data at all) */ int have_zone; /** current serial (from SOA), if we have no zone, 0 */ uint32_t serial; /** retry time (from SOA), time to wait with next_probe * if no master responds */ time_t retry; /** refresh time (from SOA), time to wait with next_probe * if everything is fine */ time_t refresh; /** expiry time (from SOA), time until zone data is not considered * valid any more, if no master responds within this time, either * with the current zone or a new zone. */ time_t expiry; /** zone lease start time (start+expiry is expiration time). * this is renewed every SOA probe and transfer. On zone load * from zonefile it is also set (with probe set soon to check) */ time_t lease_time; }; /** * The next probe task. * This task consists of waiting for the probetimeout. It is a task because * it needs an event in the eventtable. Once the timeout has passed, that * worker can (potentially) become the auth_probe worker, or if another worker * is already doing that, do nothing. Tasks becomes unowned. * The probe worker, if it detects nothing has to be done picks up this task, * if unowned. */ struct auth_nextprobe { /* Worker pointer. NULL means unowned. */ struct worker* worker; /* module env for this task */ struct module_env* env; /** increasing backoff for failures */ time_t backoff; /** Timeout for next probe (for SOA) */ time_t next_probe; /** timeout callback for next_probe or expiry(if that is sooner). * it is on the worker's event_base */ struct comm_timer* timer; }; /** * The probe task. * Send a SOA UDP query to see if the zone needs to be updated (or similar, * potential, HTTP probe query) and check serial number. * If yes, start the auth_transfer task. If no, make sure auth_nextprobe * timeout wait task is running. * Needs to be a task, because the UDP query needs an event entry. * This task could also be started by eg. a NOTIFY being received, even though * another worker is performing the nextprobe task (and that worker keeps * waiting uninterrupted). */ struct auth_probe { /* Worker pointer. NULL means unowned. */ struct worker* worker; /* module env for this task */ struct module_env* env; /** list of upstream masters for this zone, from config */ struct auth_master* masters; /** for the hostname lookups, which master is current */ struct auth_master* lookup_target; /** are we looking up A or AAAA, first A, then AAAA (if ip6 enabled) */ int lookup_aaaa; /** we only want to do lookups for making config work (for notify), * don't proceed with UDP SOA probe queries */ int only_lookup; /** we have seen a new lease this scan, because one of the masters * replied with the current SOA serial version */ int have_new_lease; /** once notified, or the timeout has been reached. a scan starts. */ /** the scan specific target (notify source), or NULL if none */ struct auth_master* scan_specific; /** scan tries all the upstream masters. the scan current target. * or NULL if not working on sequential scan */ struct auth_master* scan_target; /** if not NULL, the specific addr for the current master */ struct auth_addr* scan_addr; /** dns id of packet in flight */ uint16_t id; /** the SOA probe udp event. * on the workers event base. */ struct comm_point* cp; /** is the cp for ip6 or ip4 */ int cp_is_ip6; /** timeout for packets. * on the workers event base. */ struct comm_timer* timer; /** timeout in msec */ int timeout; }; /** * The transfer task. * Once done, make sure the nextprobe waiting task is running, whether done * with failure or success. If failure, use shorter timeout for wait time. */ struct auth_transfer { /* Worker pointer. NULL means unowned. */ struct worker* worker; /* module env for this task */ struct module_env* env; /** xfer data that has been transferred, the data is applied * once the transfer has completed correctly */ struct auth_chunk* chunks_first; /** last element in chunks list (to append new data at the end) */ struct auth_chunk* chunks_last; /** list of upstream masters for this zone, from config */ struct auth_master* masters; /** for the hostname lookups, which master is current */ struct auth_master* lookup_target; /** are we looking up A or AAAA, first A, then AAAA (if ip6 enabled) */ int lookup_aaaa; /** once notified, or the timeout has been reached. a scan starts. */ /** the scan specific target (notify source), or NULL if none */ struct auth_master* scan_specific; /** scan tries all the upstream masters. the scan current target. * or NULL if not working on sequential scan */ struct auth_master* scan_target; /** what address we are scanning for the master, or NULL if the * master is in IP format itself */ struct auth_addr* scan_addr; /** the zone transfer in progress (or NULL if in scan). It is * from this master */ struct auth_master* master; /** failed ixfr transfer, retry with axfr (to the current master), * the IXFR was 'REFUSED', 'SERVFAIL', 'NOTIMPL' or the contents of * the IXFR did not apply cleanly (out of sync, delete of nonexistent * data or add of duplicate data). Flag is cleared once the retry * with axfr is done. */ int ixfr_fail; /** we saw an ixfr-indicating timeout, count of them */ int ixfr_possible_timeout_count; /** we are doing IXFR right now */ int on_ixfr; /** did we detect the current AXFR/IXFR serial number yet, 0 not yet, * 1 we saw the first, 2 we saw the second, 3 must be last SOA in xfr*/ int got_xfr_serial; /** number of RRs scanned for AXFR/IXFR detection */ size_t rr_scan_num; /** we are doing an IXFR but we detected an AXFR contents */ int on_ixfr_is_axfr; /** the serial number for the current AXFR/IXFR incoming reply, * for IXFR, the outermost SOA records serial */ uint32_t incoming_xfr_serial; /** dns id of AXFR query */ uint16_t id; /** the transfer (TCP) to the master. * on the workers event base. */ struct comm_point* cp; /** timeout for the transfer. * on the workers event base. */ struct comm_timer* timer; }; /** list of addresses */ struct auth_addr { /** next in list */ struct auth_addr* next; /** IP address */ struct sockaddr_storage addr; /** addr length */ socklen_t addrlen; }; /** auth zone master upstream, and the config settings for it */ struct auth_master { /** next master in list */ struct auth_master* next; /** master IP address (and port), or hostname, string */ char* host; /** for http, filename */ char* file; /** use HTTP for this master */ int http; /** use IXFR for this master */ int ixfr; /** this is an allow notify member, the master can send notifies * to us, but we don't send SOA probes, or zone transfer from it */ int allow_notify; /** use ssl for channel */ int ssl; /** the port number (for urls) */ int port; /** if the host is a hostname, the list of resolved addrs, if any*/ struct auth_addr* list; }; /** auth zone master zone transfer data chunk */ struct auth_chunk { /** next chunk in list */ struct auth_chunk* next; /** the data from this chunk, this is what was received. * for an IXFR that means results from comm_net tcp actions, * packets. also for an AXFR. For HTTP a zonefile chunk. */ uint8_t* data; /** length of allocated data */ size_t len; }; /** * Create auth zones structure */ struct auth_zones* auth_zones_create(void); /** * Apply configuration to auth zones. Reads zonefiles. * @param az: auth zones structure * @param cfg: config to apply. * @param setup: if true, also sets up values in the auth zones structure * @param is_rpz: set to 1 if at least one RPZ zone is configured. * @param env: environment for offline verification. * @param mods: modules in environment. * @return false on failure. */ int auth_zones_apply_cfg(struct auth_zones* az, struct config_file* cfg, int setup, int* is_rpz, struct module_env* env, struct module_stack* mods); /** initial pick up of worker timeouts, ties events to worker event loop * @param az: auth zones structure * @param env: worker env, of first worker that receives the events (if any) * in its eventloop. */ void auth_xfer_pickup_initial(struct auth_zones* az, struct module_env* env); /** * Cleanup auth zones. This removes all events from event bases. * Stops the xfr tasks. But leaves zone data. * @param az: auth zones structure. */ void auth_zones_cleanup(struct auth_zones* az); /** * Delete auth zones structure */ void auth_zones_delete(struct auth_zones* az); /** * Write auth zone data to file, in zonefile format. */ int auth_zone_write_file(struct auth_zone* z, const char* fname); /** * Use auth zones to lookup the answer to a query. * The query is from the iterator. And the auth zones attempts to provide * the answer instead of going to the internet. * * @param az: auth zones structure. * @param qinfo: query info to lookup. * @param region: region to use to allocate the reply in. * @param msg: reply is stored here (if one). * @param fallback: if true, fallback to making a query to the internet. * @param dp_nm: name of delegation point to look for. This zone is used * to answer the query. * If the dp_nm is not found, fallback is set to true and false returned. * @param dp_nmlen: length of dp_nm. * @return 0: failure (an error of some sort, like servfail). * if 0 and fallback is true, fallback to the internet. * if 0 and fallback is false, like getting servfail. * If true, an answer is available. */ int auth_zones_lookup(struct auth_zones* az, struct query_info* qinfo, struct regional* region, struct dns_msg** msg, int* fallback, uint8_t* dp_nm, size_t dp_nmlen); /** * Answer query from auth zone. Create authoritative answer. * @param az: auth zones structure. * @param env: the module environment. * @param qinfo: query info (parsed). * @param edns: edns info (parsed). * @param buf: buffer with query ID and flags, also for reply. * @param repinfo: reply information for a communication point. * @param temp: temporary storage region. * @return false if not answered */ int auth_zones_answer(struct auth_zones* az, struct module_env* env, struct query_info* qinfo, struct edns_data* edns, struct comm_reply* repinfo, struct sldns_buffer* buf, struct regional* temp); /** * Find the auth zone that is above the given qname. * Return NULL when there is no auth_zone above the give name, otherwise * returns the closest auth_zone above the qname that pertains to it. * @param az: auth zones structure. * @param name: query to look up for. * @param name_len: length of name. * @param dclass: class of zone to find. * @return NULL or auth_zone that pertains to the query. */ struct auth_zone* auth_zones_find_zone(struct auth_zones* az, uint8_t* name, size_t name_len, uint16_t dclass); /** find an auth zone by name (exact match by name or NULL returned) */ struct auth_zone* auth_zone_find(struct auth_zones* az, uint8_t* nm, size_t nmlen, uint16_t dclass); /** find an xfer zone by name (exact match by name or NULL returned) */ struct auth_xfer* auth_xfer_find(struct auth_zones* az, uint8_t* nm, size_t nmlen, uint16_t dclass); /** create an auth zone. returns wrlocked zone. caller must have wrlock * on az. returns NULL on malloc failure */ struct auth_zone* auth_zone_create(struct auth_zones* az, uint8_t* nm, size_t nmlen, uint16_t dclass); /** set auth zone zonefile string. caller must have lock on zone */ int auth_zone_set_zonefile(struct auth_zone* z, char* zonefile); /** set auth zone fallback. caller must have lock on zone. * fallbackstr is "yes" or "no". false on parse failure. */ int auth_zone_set_fallback(struct auth_zone* z, char* fallbackstr); /** see if the auth zone for the name can fallback * @param az: auth zones * @param nm: name of delegation point. * @param nmlen: length of nm. * @param dclass: class of zone to look for. * @return true if fallback_enabled is true. false if not. * if the zone does not exist, fallback is true (more lenient) * also true if zone does not do upstream requests. */ int auth_zones_can_fallback(struct auth_zones* az, uint8_t* nm, size_t nmlen, uint16_t dclass); /** process notify for auth zones. * first checks the access list. Then processes the notify. This starts * the probe sequence or it notes the serial number (if any) * @param az: auth zones structure. * @param env: module env of the worker that is handling the notify. it will * pick up the task probe (or transfer), unless already in progress by * another worker. * @param nm: name of the zone. Uncompressed. from query. * @param nmlen: length of name. * @param dclass: class of zone. * @param addr: source address of notify * @param addrlen: length of addr. * @param has_serial: if true, the notify has a serial attached. * @param serial: the serial number, if has_serial is true. * @param refused: is set to true on failure to note refused access. * @return fail on failures (refused is false) and when access is * denied (refused is true). True when processed. */ int auth_zones_notify(struct auth_zones* az, struct module_env* env, uint8_t* nm, size_t nmlen, uint16_t dclass, struct sockaddr_storage* addr, socklen_t addrlen, int has_serial, uint32_t serial, int* refused); /** process notify packet and read serial number from SOA. * returns 0 if no soa record in the notify */ int auth_zone_parse_notify_serial(struct sldns_buffer* pkt, uint32_t *serial); /** for the zone and if not already going, starts the probe sequence. * false if zone cannot be found. This is like a notify arrived and was * accepted for that zone. */ int auth_zones_startprobesequence(struct auth_zones* az, struct module_env* env, uint8_t* nm, size_t nmlen, uint16_t dclass); /** read auth zone from zonefile. caller must lock zone. false on failure */ int auth_zone_read_zonefile(struct auth_zone* z, struct config_file* cfg); /** find the apex SOA RRset, if it exists. NULL if no SOA RRset. */ struct auth_rrset* auth_zone_get_soa_rrset(struct auth_zone* z); /** find serial number of zone or false if none (no SOA record) */ int auth_zone_get_serial(struct auth_zone* z, uint32_t* serial); /** Find auth_zone SOA and populate the values in xfr(soa values). */ int xfr_find_soa(struct auth_zone* z, struct auth_xfer* xfr); /** compare auth_zones for sorted rbtree */ int auth_zone_cmp(const void* z1, const void* z2); /** compare auth_data for sorted rbtree */ int auth_data_cmp(const void* z1, const void* z2); /** compare auth_xfer for sorted rbtree */ int auth_xfer_cmp(const void* z1, const void* z2); /** Create auth_xfer structure. * Caller must have wrlock on az. Returns locked xfer zone. * @param az: zones structure. * @param z: zone with name and class * @return xfer zone or NULL */ struct auth_xfer* auth_xfer_create(struct auth_zones* az, struct auth_zone* z); /** * Set masters in auth xfer structure from config. * @param list: pointer to start of list. The malloced list is returned here. * @param c: the config items to copy over. * @param with_http: if true, http urls are also included, before the masters. * @return false on failure. */ int xfer_set_masters(struct auth_master** list, struct config_auth* c, int with_http); /** xfer nextprobe timeout callback, this is part of task_nextprobe */ void auth_xfer_timer(void* arg); /** callback for commpoint udp replies to task_probe */ int auth_xfer_probe_udp_callback(struct comm_point* c, void* arg, int err, struct comm_reply* repinfo); /** callback for task_transfer tcp connections */ int auth_xfer_transfer_tcp_callback(struct comm_point* c, void* arg, int err, struct comm_reply* repinfo); /** callback for task_transfer http connections */ int auth_xfer_transfer_http_callback(struct comm_point* c, void* arg, int err, struct comm_reply* repinfo); /** xfer probe timeout callback, part of task_probe */ void auth_xfer_probe_timer_callback(void* arg); /** xfer transfer timeout callback, part of task_transfer */ void auth_xfer_transfer_timer_callback(void* arg); /** mesh callback for task_probe on lookup of host names */ void auth_xfer_probe_lookup_callback(void* arg, int rcode, struct sldns_buffer* buf, enum sec_status sec, char* why_bogus, int was_ratelimited); /** mesh callback for task_transfer on lookup of host names */ void auth_xfer_transfer_lookup_callback(void* arg, int rcode, struct sldns_buffer* buf, enum sec_status sec, char* why_bogus, int was_ratelimited); /* * Compares two 32-bit serial numbers as defined in RFC1982. Returns * <0 if a < b, 0 if a == b, and >0 if a > b. The result is undefined * if a != b but neither is greater or smaller (see RFC1982 section * 3.2.). */ int compare_serial(uint32_t a, uint32_t b); /** * Generate ZONEMD digest for the auth zone. * @param z: the auth zone to digest. * omits zonemd at apex and its RRSIG from the digest. * @param scheme: the collation scheme to use. Numbers as defined for ZONEMD. * @param hashalgo: the hash algo, from the registry defined for ZONEMD type. * @param hash: the result buffer. * @param buflen: size of the result buffer, must be large enough. or the * routine fails. * @param resultlen: size of the hash in the result buffer of the result. * @param region: temp region for allocs during canonicalisation. * @param buf: temp buffer during canonicalisation. * @param reason: failure reason, returns a string, NULL on success. * @return false on failure. */ int auth_zone_generate_zonemd_hash(struct auth_zone* z, int scheme, int hashalgo, uint8_t* hash, size_t buflen, size_t* resultlen, struct regional* region, struct sldns_buffer* buf, char** reason); /** ZONEMD scheme definitions */ #define ZONEMD_SCHEME_SIMPLE 1 /** ZONEMD hash algorithm definition for SHA384 */ #define ZONEMD_ALGO_SHA384 1 /** ZONEMD hash algorithm definition for SHA512 */ #define ZONEMD_ALGO_SHA512 2 /** returns true if a zonemd hash algo is supported */ int zonemd_hashalgo_supported(int hashalgo); /** returns true if a zonemd scheme is supported */ int zonemd_scheme_supported(int scheme); /** * Check ZONEMD digest for the auth zone. * @param z: auth zone to digest. * @param scheme: zonemd scheme. * @param hashalgo: zonemd hash algorithm. * @param hash: the hash to check. * @param hashlen: length of hash buffer. * @param region: temp region for allocs during canonicalisation. * @param buf: temp buffer during canonicalisation. * @param reason: string returned with failure reason. * @return false on failure. */ int auth_zone_generate_zonemd_check(struct auth_zone* z, int scheme, int hashalgo, uint8_t* hash, size_t hashlen, struct regional* region, struct sldns_buffer* buf, char** reason); /** * Perform ZONEMD checks and verification for the auth zone. * This includes DNSSEC verification if applicable. * @param z: auth zone to check. Caller holds lock. wrlock. * @param env: with temp region, buffer and config. * @param mods: module stack for validator env. * @param result: if not NULL, result string strdupped in here. * @param offline: if true, there is no spawned lookup when online is needed. * Those zones are skipped for ZONEMD checking. * @param only_online: if true, only for ZONEMD that need online lookup * of DNSKEY chain of trust are processed. */ void auth_zone_verify_zonemd(struct auth_zone* z, struct module_env* env, struct module_stack* mods, char** result, int offline, int only_online); /** mesh callback for zonemd on lookup of dnskey */ void auth_zonemd_dnskey_lookup_callback(void* arg, int rcode, struct sldns_buffer* buf, enum sec_status sec, char* why_bogus, int was_ratelimited); /** * Check the ZONEMD records that need online DNSSEC chain lookups, * for them spawn the lookup process to get it checked out. * Attaches the lookup process to the worker event base and mesh state. * @param az: auth zones, every zones is checked. * @param env: env of the worker where the task is attached. */ void auth_zones_pickup_zonemd_verify(struct auth_zones* az, struct module_env* env); #endif /* SERVICES_AUTHZONE_H */
def unit_of_measurement(self): if self.forecast_day is not None: return FORECAST_SENSOR_TYPES[self.kind][self._unit_system] return SENSOR_TYPES[self.kind][self._unit_system]
HARARE, Zimbabwe (CNN) -- Zimbabwe's inflation rate has soared in the past three months and is now at 11.2 million percent, the highest in the world, according to the country's Central Statistical Office. Zimbabwe's inflation rate has soared to a world high. Official figures dated Monday show inflation has surged from the rate of 2.2 million percent recorded in May, despite the government's price controls. The country's finance minister confirmed the new figure in an interview but said the rising inflation rate was not confined to Zimbabwe alone. "While our case has been aggravated by the illegal sanctions imposed by the Western powers, rising food prices are a world phenomenon because of the use of bio-fuel," said Samuel Mumbengegwi. "But we will continue to fight inflation by making sure that prices charged are realistic." In February, the price of a loaf of bread in the country was less than 200,000 Zimbabwe dollars. On Monday, that same loaf of bread cost 1.6 trillion Zimbabwe dollars. Analysts have said the Zimbabwean government's official inflation rate figures are conservative. Last week, one of Zimbabwe's leading banks, Kingdom Bank, said the country's inflation rate was now more than 20 million percent. The locally-owned bank predicted tougher times ahead for Zimbabwe in the absence of donor support and foreign investment in an economy that has been in freefall for almost a decade. Don't Miss Zimbabwe summit fails to reach agreement Once considered the breadbasket of Africa, Zimbabwe has been in the throes of an economic meltdown ever since the country embarked on a chaotic land reform program that has decimated commercial agriculture. Analysts say the crisis has worsened following President Robert Mugabe's disputed reelection in the June 27 presidential run-off. His challenger Morgan Tsvangirai boycotted the race over widespread allegations of violence and voter intimidation. The economic crisis has destroyed Zimbabwe's currency and made it difficult for Zimbabweans to buy basic commodities, electricity, fuel, and medicines. Many Zimbabweans have left the country amid rising unemployment and deepening poverty. Last week a summit in South Africa of regional African leaders failed to persuade Zimbabwe's political parties to agree to form a government of national unity, which observers view as the best way to end Zimbabwe's record recession.
What once was available only for sale through doctors office, this powerful digestive supplement is now available to the public. Purely Scientific stunned the competition by releasing an all new Digestive Enzyme Solution formulation intended to stand-out in the crowd of other ‘similar’ products. There is a plethora of digestive enzymes products on the market, but this premiere product is the only one which effectively targets three highly desired benefits in an all-in-one solution. While individual products exist which target one, or maybe two of these goals, an all-in-one easy-to-consume capsule that targets all three has never been done before. Purely Scientific gave their scientists a challenge: Create an all-in-one solution that effectively targets the top three most commonly desired digestive benefits people face: 1) a powerful digestive enzyme essentials formulation to optimize overall digestion, 2) support for the common gluten and lactose intolerance issues, and 3) a generous supply of the best probiotics to turbo-charge the other two results. Designed originally to be sold only to doctors, Purely Scientific sent their researchers went to work formulating this product using only the purest, highest quality ingredients. Based upon the company’s philosophy of basing all formulations only on scientific findings and clinical studies, with no fillers whatsoever, the formulation went through rigorous testing. Only gold-standard, proven, patented ingredients were utilized, at clinically proven dosages. The scientists succeeded with their challenge. This formulation was previously only sold to patients through doctor’s offices. Doctors across the country were delighted with the patient health outcomes achieved from this digestive enzyme and probiotics supplement. Patients showed remarkable improvement with such diverse diseases and conditions as: IBS, Crohns, Ulcerative Colitis, Gluten Sensitivity, Lactose Sensitivity, as well as more generalized digestive conditions displaying symptoms of bloating, discomfort, and indigestion. Purely Scientific’s spokesperson notes, “Of course, individuals and individual responses vary and any one person’s response cannot be guaranteed. However, given the large clinical studies and success already achieved, someone experiencing digestive issues would be wise to give this premiere digestive enzyme solution a try.”
Does the concept of an intelligent designer make sense? Human beings explain features of the world around them in two main ways. One way is to supply naturalistic explanations that appeal to features of the natural world, such as natural events, forces and laws. The explanations of physics and chemistry fall into this category. The other way is to offer intentional explanations – explanations that appeal to the beliefs and desires of more or less rational agents. Why is there a tree in this spot? Because Ted wanted to see a tree from his bedroom window, and so planted a sapling here correctly supposing it would grow into a handsome tree. When we are unable to explain something naturalistically, it is, of course, tempting to look for an intentional explanation instead. When we could not offer naturalistic explanations for why the heavenly bodies moved about as they did, we supposed that they must be, or must be moved by, agents - gods of some sort. When we could not otherwise explain diseases and natural disasters, we put them down to the actions of malevolent agents, such as witches and demons. When we could not provide naturalistic explanations for the growth of plants and the cycle of the seasons, we again invoked agents – sprites, fairies, and gods of various sorts. As our scientific understanding of the world has increased, the need to invoke witches, fairies, demons and other such agents to account for features of the natural world has diminished. However, when we ask: why does the natural world exist at all, and what explains why it has the fundamental laws does? such naturalistic explanations are not available. So an explanation in terms of the activity of some sort of transcendent agent might seem attractive. But does such an explanation even make sense? Suppose I claim that there exists a non-spatial mountain. It’s a mountain – with a sharp summit flanked by valleys and steep crags. Only it is not located or extended in space at all. It does not have spatial dimensions. This mountain transcends our spatial world. You might well ask me why I suppose there is any such mountain. And if I cannot give you good reasons, you will rightly be sceptical. But actually, isn’t there a rather more fundamental problem with my claim that such a mountain exists? Can’t we know, before we get to the question of whether there is any evidence for the existence of my non-spatial mountain, that there can be no such thing? For the very idea of such a mountain makes no sense. My hypothetical mountain has a summit and valleys and steep cliffs, but these are all features that require spatial extension. A summit requires that one part of the mountain be higher than another. A valley must be lower than the surrounding terrain. The concepts of a mountain, summit, valleys, and so on are concepts that can only sensibly be applied within a spatial context. Strip that context away and we end up talking nonsense. But if we now turn to the concept of a transcendent designer, does that make any more sense? The concept of an agent has its home within a temporal setting. An agent is someone or thing that performs actions as a result of its various beliefs and desires. But actions are events that happen at particular moments in time. And beliefs and desires are psychological states that have a temporal duration. Now when we suppose that the spatio-temporal universe was created by some sort of agent, we are presumably supposing it was designed by a non-temporal agent – an agent that does not (or at least did not then) exist in time. For there was not yet any time for the agent to exist in. But if desires are psychological states with temporal duration, how, then, could this agent possess the desire to create the universe? And how did it perform the act of creation if there was not yet any time in which actions might be performed? It is hard to see how talk of a non-temporal agent makes any more sense than talk of a non-spatial mountain. We could sidestep these puzzles by supposing that God exists, and has always existed, in time. This provides God with the necessary temporal dimension in which he might possess the desire to produce a universe, draw up a design, and perform the act of creation But it raises a host of other bizarre questions, such as: why did God wait so long before creating the universe (presumably, if God did not himself have a beginning, an infinitely long time)? And what was he doing in the meantime?
def show_nth_sample(self, n): x = self.get_nth_sample(n).squeeze() plt.imshow(x) plt.show()
def _check_level(label, expected, actual): if SP.issparse(expected): assert_true(SP.issparse(actual)) assert_array_almost_equal(actual.todense(), expected.todense(), err_msg=label, decimal=5) return assert_true(types_compatible(expected, actual), "Expected type %s, got %s at %s" % (type(expected), type(actual), label)) if not isinstance(expected, (np.void, np.ndarray, MatlabObject)): assert_equal(expected, actual) return assert_true(expected.shape == actual.shape, msg='Expected shape %s, got %s at %s' % (expected.shape, actual.shape, label) ) ex_dtype = expected.dtype if ex_dtype.hasobject: if isinstance(expected, MatlabObject): assert_equal(expected.classname, actual.classname) for i, ev in enumerate(expected): level_label = "%s, [%d], " % (label, i) _check_level(level_label, ev, actual[i]) return if ex_dtype.fields: for fn in ex_dtype.fields: level_label = "%s, field %s, " % (label, fn) _check_level(level_label, expected[fn], actual[fn]) return if ex_dtype.type in (text_type, np.unicode_, np.bool_): assert_equal(actual, expected, err_msg=label) return assert_array_almost_equal(actual, expected, err_msg=label, decimal=5)
<gh_stars>1-10 import {ICacheProvider, ICacheProviderOptions, CacheProvider} from '@sarahjs/core'; /** * Controll caches lifetime. */ export class MemoryProvider extends CacheProvider implements ICacheProvider { private caches: any; name: string; constructor(options?: ICacheProviderOptions) { super(); this.caches = {}; this.name = options.providerName; if (options) { if (options.compareFn) { this.compare = options.compareFn; } } } private compare(singleHash: string) { let value = null; Object.keys(this.caches).forEach((singleStoredHash) => { if (singleStoredHash === singleHash) value = this.caches[singleStoredHash]; }); return value; } public async set(requestedData: {val: any, hash: string}[], ttl?: number): Promise<any | any[]> { for (let i = 0; i < requestedData.length; i ++) { const singleRequestData = requestedData[i]; if (!this.compare(singleRequestData.hash)) { this.caches[singleRequestData.hash] = singleRequestData.val; } if (ttl) { this.invalidateCache(singleRequestData.hash, ttl) .then(() => { // console.log(); }) .catch((err) => { // console.log(); }); } } return requestedData.map((single) => single.val); } public async get(requestedHashes: string[]): Promise<any | any[]> { // console.log('get', requestedHashes); const foundCache = []; requestedHashes.forEach((singleRequest) => { const isFound = this.compare(singleRequest); if (isFound) foundCache.push(this.caches[singleRequest]); }); return foundCache; } public async invalidateCache(requestedHash: any, ttl: number) { setTimeout(() => { this.removeFromCache(requestedHash); }, ttl); } private removeFromCache(requestedHash: string) { // console.log('removing', requestedHash); this.caches[requestedHash] = null; } }
import * as React from 'react'; import { RenderInlineProps } from "slate-react"; import * as css from './PlaceholderPlugin.scss'; import cx from 'classnames'; import { uuiMod } from "@epam/uui-core"; export class PlaceholderBlock extends React.Component<RenderInlineProps> { render() { const { attributes, node } = this.props; const src = node.data.get('name'); return <span { ...attributes } className={ cx(css.placeholderBlock, this.props.isFocused && uuiMod.focus) }> { src } </span>; } }
<reponame>edoburu/fluentcms-contactform<filename>fluentcms_contactform/migrations/0001_initial.py<gh_stars>1-10 import phonenumber_field.modelfields from django.db import migrations, models import fluentcms_contactform.appsettings class Migration(migrations.Migration): dependencies = [ ("fluent_contents", "0001_initial"), ] operations = [ migrations.CreateModel( name="ContactFormData", fields=[ ( "id", models.AutoField( verbose_name="ID", serialize=False, auto_created=True, primary_key=True ), ), ( "submit_date", models.DateTimeField(auto_now_add=True, verbose_name="Submit date"), ), ( "ip_address", models.GenericIPAddressField(null=True, verbose_name="IP address", blank=True), ), ("internal_note", models.TextField(verbose_name="Internal Notes", blank=True)), ( "is_archived", models.BooleanField( default=False, help_text="Mark the form as archived when the e-mail has been handled.", db_index=True, verbose_name="Archived", ), ), ("name", models.CharField(max_length=200, verbose_name="Name")), ("email", models.EmailField(max_length=200, verbose_name="Email")), ( "phone_number", phonenumber_field.modelfields.PhoneNumberField( max_length=128, null=True, verbose_name="Phone number", blank=True ), ), ("subject", models.CharField(max_length=200, verbose_name="Subject", default="")), ("message", models.TextField(verbose_name="Message")), ], options={ "ordering": ("submit_date",), "verbose_name": "Contact form data", "verbose_name_plural": "Contact form data", }, bases=(models.Model,), ), migrations.CreateModel( name="ContactFormItem", fields=[ ( "contentitem_ptr", models.OneToOneField( parent_link=True, auto_created=True, primary_key=True, serialize=False, to="fluent_contents.ContentItem", on_delete=models.CASCADE, ), ), ( "form_style", models.CharField( max_length=100, verbose_name="Form", choices=fluentcms_contactform.appsettings.FORM_STYLE_CHOICES, ), ), ( "email_to", models.EmailField( help_text="The email address where submitted forms should be sent to.", max_length=200, verbose_name="Email to", ), ), ("success_message", models.TextField(verbose_name="Thank you message")), ], options={ "db_table": "contentitem_fluentcms_contactform_contactformitem", "verbose_name": "Contact form", "verbose_name_plural": "Contact form", }, bases=("fluent_contents.contentitem",), ), ]
package com.mangooa.common.platform.group; /** * 群组类型。 * * @author <NAME> * @since 1.0.0 **/ public enum GroupType { /** * 全局组,所有租户都可以访问。 */ GLOBAL_SCOPE, /** * 应用组。 */ APP_SCOPE; }
export interface Block { role: string; value: midHeaderNode | pageNode | quoteNode | textNode | rowNode | columnNode | imageNode | codeNode | enumListNode; } export type NodeTitle = ( | [string] | [ string, ( | ( | [/** 我来内部链接 */ "BiLink", string, string] | [/** 一般超链接 */ "Link", string] | [/** 行内代码 */ "<>"] | [/** 加粗 */ "B"] )[] | [/** 啥也不做 */] ), ] )[]; export interface Node { id: string; active: boolean; attributes: { title?: NodeTitle; }; created_by: string; created_time: number; edited_by: string; edited_time: number; page_id: string; parent_id: string; parent_type: string; permissions: any[]; setting: {}; sub_nodes: string[]; text_content: string; type: string; ver: number; workspace_id: string; } export interface midHeaderNode extends Node { type: "midHeader"; attributes: { title: NodeTitle }; } export interface pageNode extends Node { type: "page"; attributes: { title: NodeTitle }; } export interface quoteNode extends Node { type: "quote"; attributes: { title: NodeTitle }; } export interface textNode extends Node { type: "text"; attributes: { title: NodeTitle }; } export interface rowNode extends Node { type: "row"; attributes: {}; } export interface columnNode extends Node { type: "column"; } export interface imageNode extends Node { type: "image"; attributes: { dimensions: { width: number; height: number; }[]; original: { width: number; height: number; }[][]; img: string[][]; source: string; bucket: string[][]; title: NodeTitle; }; } export interface codeNode extends Node { type: "code"; attributes: { title: NodeTitle; lineBreak: false; ligatures: false; language: "HTML"; line_number: false }; } export interface todoListNode extends Node { type: "todoList"; attributes: { title: NodeTitle; checked?: "no" | "yes" }; } export interface bullListNode extends Node { type: "bullList"; attributes: { title: NodeTitle; checked?: "no" | "yes" }; } export interface enumListNode extends Node { type: "enumList"; attributes: { title: NodeTitle; checked?: "no" | "yes" }; } export interface toggleListNode extends Node { type: "toggleList"; attributes: { title: NodeTitle; checked?: "no" | "yes" }; } export interface calloutNode extends Node { type: "callout"; attributes: { title: NodeTitle }; } export interface bookmarkNode extends Node { type: "bookmark"; attributes: { title: NodeTitle; /** 书签导向的真实地址 */ source: string; rich_media: { /** 网页的描述 */ description?: string; hostname?: string; icons: { href: string }[]; /** 缩略图 */ thumbnail: { href: string }[]; title: string; }[]; }; } export interface fileNode extends Node { type: "file"; attributes: { title: NodeTitle; alias: string[]; file: string[]; bucket: string[][]; }; } export interface blockEquationNode extends Node { type: "blockEquation"; attributes: { title: NodeTitle; }; } /** 嵌入块 */ export interface embedNode extends Node { type: "embed"; attributes: { title: NodeTitle; /** 嵌入块的地址 */ source: string; /** 嵌入块的地址 */ embedLink: string; }; } export interface simpleTableNode extends Node { type: "simpleTable"; attributes: { title: NodeTitle; cells: string; cellsJson: { column: { id: string; attributes: { title: NodeTitle; }; setting: {}; }[]; flex: number; }[]; }; } export interface pageChunkRes { code: number; data: { block: { [id: string]: Block; }; position: unknown; }; message: string; } const t = [ { column: [ { id: "b1Bh3ZixtGpqabK5H9Ydd8", attributes: { title: [["这是简单表格", [["B"]]]] }, setting: {} }, { id: "iHjmeSHDhTtyUnVEgfsTWS", attributes: { title: [["测试页面", [["BiLink", "cVskxFpJphYGSeukC7CdVx", "jf3mMBrXqF8vdnqQqVjg5X"]]]] }, setting: {}, }, { id: "cAc1GmjYiSnn8ynQ7WsAMt", attributes: { title: [[""]] }, setting: {} }, { id: "hDqmduG6xS8eXUjn3pwCN3", attributes: { title: [[""]] }, setting: {} }, ], flex: 213, }, { column: [ { id: "jjEHZn7sH8ugFio7EK5Z6v", attributes: { title: [["第二列"]] }, setting: {} }, { id: "kvMoYCoLqbcn72hM3ouGw2", attributes: { title: [[""]] }, setting: {} }, { id: "jciogQDoDgdJ4wELDjcvap", attributes: { title: [[""]] }, setting: {} }, { id: "3az2Bs1gpeNCuCm61B5RPE", attributes: { title: [[""]] }, setting: {} }, ], flex: 100, }, { column: [ { id: "2CnQmWwweUqLyjhQz44SU8", attributes: { title: [[""]] }, setting: {} }, { id: "4s8UdCfJRSE7nHU3HQP7nq", attributes: { title: [[""]] }, setting: {} }, { id: "i8PeeR8pDDNdE8Z61PdbcZ", attributes: { title: [[""]] }, setting: {} }, { id: "t2CgiRkcq1T9FoB3wjwijD", attributes: { title: [[""]] }, setting: {} }, ], flex: 100, }, ];
/* blocks until there are no remainder async messages. */ void sched_no_async_msg( void ) { pthread_mutex_lock(&mutex_async_msg_count); if( async_msg != 0 ) { pthread_cond_wait(&cond_no_remain_async_msg, &mutex_async_msg_count); } pthread_mutex_unlock(&mutex_async_msg_count); }
<gh_stars>0 /* Command cloudchaser is a minimal build sentry for Flitter. It will check permissions based on environment variables and then return via exit code. This is designed to be run from the context of a git push. 2014/10/29 10:23:52 Usage: cloudchaser <revision> <sha> */ package main
/** * Notifies reception of exposure capabilities and settings. * * @param supportedShutterSpeeds supported manual shutter speed values * @param supportedManualIsoSensitivities supported manual ISO sensitivity values * @param supportedMaxIsoSensitivities supported maximum ISO sensitivity values * @param mode exposure mode * @param shutterSpeed manual shutter speed * @param manualIsoSensitivity manual ISO sensitivity * @param maxIsoSensitivity maximum ISO sensitivity * @param autoExposureMeteringMode auto exposure metering mode */ abstract void onExposureSettings(@NonNull EnumSet<CameraExposure.ShutterSpeed> supportedShutterSpeeds, @NonNull EnumSet<CameraExposure.IsoSensitivity> supportedManualIsoSensitivities, @NonNull EnumSet<CameraExposure.IsoSensitivity> supportedMaxIsoSensitivities, @NonNull CameraExposure.Mode mode, @NonNull CameraExposure.ShutterSpeed shutterSpeed, @NonNull CameraExposure.IsoSensitivity manualIsoSensitivity, @NonNull CameraExposure.IsoSensitivity maxIsoSensitivity, @NonNull CameraExposure.AutoExposureMeteringMode autoExposureMeteringMode);
The crystal structure of KSHV ORF57 reveals dimeric active sites important for protein stability and function Kaposi’s sarcoma-associated herpesvirus (KSHV) is a γ-herpesvirus closely associated with Kaposi’s sarcoma, primary effusion lymphoma and multicentric Castleman disease. Open reading frame 57 (ORF57), a viral early protein of KSHV promotes splicing, stability and translation of viral mRNA and is essential for viral lytic replication. Previous studies demonstrated that dimerization of ORF57 stabilizes the protein, which is critical for its function. However, the detailed structural basis of dimerization was not elucidated. In this study, we report the crystal structures of the C-terminal domain (CTD) of ORF57 (ORF57-CTD) in both dimer at 3.5 Å and monomer at 3.0 Å. Both structures reveal that ORF57-CTD binds a single zinc ion through the consensus zinc-binding motif at the bottom of each monomer. In addition, the N-terminal residues 167–222 of ORF57-CTD protrudes a long “arm” and holds the globular domains of the neighboring monomer, while the C-terminal residues 445–454 are locked into the globular domain in cis and the globular domains interact in trans. In vitro crosslinking and nuclear translocation assays showed that either deletion of the “arm” region or substitution of key residues at the globular interface led to severe dimer dissociation. Introduction of point mutation into the zinc-binding motif also led to sharp degradation of KSHV ORF57 and other herpesvirus homologues. These data indicate that the “arm” region, the residues at the globular interface and the zinc-binding motif are all equally important in ORF57 protein dimerization and stability. Consistently, KSHV recombinant virus with the disrupted zinc-binding motif by point mutation exhibited a significant reduction in the RNA level of ORF57 downstream genes ORF59 and K8.1 and infectious virus production. Taken together, this study illustrates the first structure of KSHV ORF57-CTD and provides new insights into the understanding of ORF57 protein dimerization and stability, which would shed light on the potential design of novel therapeutics against KSHV infection and related diseases. Introduction Kaposi's sarcoma-associated herpesvirus (KSHV, also known as human herpesvirus 8, HHV8) is a human tumor virus belonging to lymphotropic gammaherpesvirus subfamily . KSHV was first discovered from Kaposi's sarcoma with endothelial origin, and also found as the etiological agent of two lymphoproliferative disorders, primary effusion lymphoma (PEL) and multicentric Castleman's disease (MCD) . Like other herpesviruses, KSHV establishes life-long persistent latent infection. Endogenous and exogenous stress can trigger KSHV reactivation and viral gene expression in a cascade fashion. During reactivation, the viral lytic switch protein RTA (replication and transcription activator) activates the expression of viral early genes including MTA (mRNA transcript accumulation) encoded by open reading frame 57 (ORF57) . ORF57 or MTA translocates into cell nucleus and promotes lytic replication . Thus, ORF57 deletion virus leads to defective expression of viral lytic genes and abortive viral replication, suggesting that ORF57 is indispensable for KSHV lytic replication . During lytic replication, ORF57 promotes different viral mRNA accumulation and gene expression through distinct mechanisms. ORF57 stabilizes polyadenylated nuclear (PAN) RNA by cooperative binding to a 9-nt core of the MRE (MTA responsive element) within PAN RNA with PABPC1 . ORF57 also enhances ORF59 mRNA stabilization through preventing RBM15-mediated hyperpolyadenylation and nuclear retention . Moreover, ORF57 directly interacts with SRSF3, thereby preventing SRSF3 from binding to K8β intron and increasing K8β splicing . ORF57 also binds to the MTA responsive element (MRE) of vIL-6 to prevent the miR-1293-Ago2 RISC from associating with vIL-6 RNA and enhance vIL-6 translation . Recent report shows ORF57 interaction with PACT and PKR and to antagonize host antiviral defenses for virus lytic infection and production . In addition, ORF57 might sequester hTREX complex to promote double-strand break response and DNA damage . As an important viral protein, the domain organization and the functional elements of ORF57 has been extensively studied . ORF57 comprises of two domains with distinct putative structures: the highly disordered N-terminal domain (NTD) from residue 1 to 166, and the helix-rich C-terminal domain (CTD) from residue 167 to 455. Nuclear localization of ORF57 is mediated by the three nuclear localization signals (NLSs) within NTD . The flexible NTD facilitates binding diversity and interacts with a number of cellular factors . NTD is highly phosphorylated, and the phosphorylation sites include S95/S97 identified by mass spectrometry and S21/T32/S43 identified by in vitro phosphorylation assay . NTD also contains a caspase-7 cleavage site as a cellular antiviral response, and blockade of caspase-7 cleavage promotes the expression of a subset of viral lytic genes . In contrast to the NTD of which the protein sequence is not highly conserved, CTD is relatively conserved in the putative secondary structure among herpesvirus homologues, including ICP27 of HSV1 (Herpes Simplex Virus Type 1) , UL69 of HCMV (Human Cytomegalovirus) , IE4 of VZV (Varicella-Zoster Virus) , EB2 of EBV (Epstein-Barr Virus) and mORF57 of MHV68 (Murine Gamma Herpesvirus 68). The CTD of KSHV ORF57 mediates self-interaction, which is a common feature found in other homologues . So far, only the structure of the C-terminal HSV1 ICP27 (aa 241-512) and solution structure of partial HVS ORF57 (aa 103-120)-ALYREF (aa 54-155) complex have been resolved . Despite extensive studies on the important functions of ORF57 during KSHV lytic replication, the structural basis for the characterized ORF57 functions remains unclear. In the previous study, Majerciak et al. identified three dimerization-essential residues E287, E288 and W292 by synthetic peptide competition assays and point mutations and demonstrated that ORF57-CTD dimerization prevents protein degradation . However, the structural relationship between ORF57 dimerization and stability was not fully explored. In this study, we resolved the crystal structures of ORF57-CTD in dimer at 3.5 Å and in monomer at 3.0 Å by X-ray diffraction. Each monomer CTD has an N-terminal arm and a C-terminal globular domain bearing a zinc-binding motif. The arm from one monomer hooks the globular domain of a neighboring monomer in an anti-parallel manner and makes the aligned globular domain residues expose to each other. The electrostatic interactions of the interface residues between two anti-parallel monomers further stabilize the dimerization. Functional studies demonstrated that deletion or point mutation of either one of three structural elements (zinc-binding motif, arm and globular interface) prevents ORF57 from dimerization and destabilizes ORF57. Protein purification, crystallization, dehydration and data collection ORF57-CTD (from residue 167 to 455) is a theoretical 32 kDa protein, with an isoelectric point (pI) approximately 8.5. Previous studies indicated that the majority of ORF57-CTD exists as dimers . Expression and purification processes have been described in experimental procedures. Two kinds of ORF57 crystals obtained in our experiments were used to collect X-ray diffraction data separately (S1 Fig). The spindle-shaped crystal crystallized in the reservoir contains 0.8 M Li 2 SO 4 , 0.1 M BIS-TRIS propane and other crystallized conditions shown in Table 1. The cubic crystal crystallized at very high concentrations of salt buffer contains 0.01 M MgCl 2 , 0.05 M HEPES (pH 7.0), 4.0 M LiCl. After data calculation, the cubic crystal consists only of monomers in the crystal asymmetric unit, while in the spindle shape crystal ORF57-CTD forms a dimer in the crystal asymmetric unit and the resolution was relatively lower ( Table 2). As the cubic crystal crystallized in the buffer which has a very high concentration of the salt, it is possible that the dimer was dissolved into monomers by hydrophobic interactions during crystallization. 22.5% and 26.9%, respectively ( Table 2). As shown in Fig 1B, the structure of monomeric ORF57-CTD forms a globular domain which consisting of bundle of 11 α-helixes connected with unstructured loops of various sizes. The N-terminus is represented by an extended "arm" region lacking any α-helixes, but only two unstable small η-helixes (3 10 -helixes). The dimeric crystal belongs to space group P6 4 22 and the final structure ( Fig 1C and S2 Movie) was refined shown in a cartoon model and colored with rainbow spectrum (α: α-helixes; β: β-sheet; η: η-helixes), the monomer structure harbors two additional η-helixes in the N terminus. Each monomer contains Zinc cation (Zn 2+ , grey sphere). (C) Dimeric structure of ORF57-CTD displays in two views rotated 90˚. The helixes display as a cylinder, β-sheet display as an arrow. The monomer A and B are colored with rainbow spectrum and gray, respectively. Detailed in the colored monomer A are the numbered helixes within the globular domain and the "arm" region. (D) Electrostatic charges of the monomeric structure of dimerized ORF57-CTD (scaled color in PyMol). Red, negative charge; at resolution of 3.5 Å with R and Rfree values to 25.9% and 30.6%, respectively ( Table 2). The electron densities for residues V168-R189 and P211-D220 and S455 are not visible in the monomeric structure. Moreover, in the dimeric structure, only residues V168-L175 and S455 are unidentified because of the poor densities. The two monomers in the dimeric structure are nearly identical, and the root-mean-square deviation (RMSD) of atomic positions is only 0.528 Å when superimposed, but the N-terminus of the monomeric ORF57-CTD in dimer extends out to a different direction from the N-terminus of monomer if superimposed (S2 Fig). The dimeric structure is formed by the extended N-terminal "arm" region contacting with the globular domain of the neighboring monomer and shaping as a coffee bean conformation with approximate dimensions of 69.0 × 50.9 × 50.0 Å in the asymmetric unit, with a deep groove extends around the interface of the two globular domains. Each monomer contains a zinc ion chelated by four residues, the cysteines at the position 333, 427, 432, and the histidine at 423, which form a consensus zinc-binding motif. The N-terminal "arm" region of one monomer protrudes out on the globular domain of another anti-parallel monomer, while the C-terminal end is enclosed into the globular domain of the same monomer. The "arm" region of ORF57-CTD docks on the surface of the globular domain in trans The N-terminal arm of ORF57-CTD (residues from 167-222) contains a number of highly hydrophobic residues (especially residues I, V and F) (Fig 1D and 1E and S3A Fig). The arm extends from the globular domain of the same monomer to dock in a canyon on the surface of the globular domain of the neighboring monomer like a barb ( Fig 1C and Fig 1D). Thus, the arm region in the dimeric structure lacks two unstable small η-helixes. PDBePISA analysis by comparing with other interactions at the dimer interface showed that the "arm" region makes major contributions, mainly (>75%) by its hydrophobic residues, suggesting the "arm" region very likely participating the dimerization (S5 Fig). The C-terminus of ORF57-CTD inserts into the globular domain in cis The C-terminus of ORF57-CTD (residues from 445 to 454) has a medium degree of conservation among herpesvirus homologues (S4 Fig). In the structure of ORF57-CTD, the C-terminal ends inserts into the same monomer (Fig 2A). The C-terminus is rich in hydrophobic residues (Fig 2A and S3B Fig) and forms a series of hydrogen bonds and hydrophobic contact to the surrounding residues (Fig 2B and 2C). The hydrophobic interactions between the C-terminus and the surrounding residues mainly come from the nitrogen and oxygen atoms of the backbones (Fig 2B and 2C). Specifically, the nitrogen atom of F451 in the backbone forms a hydrogen bond with OE2 atom of E288 and Y452 is stabilized by the residues K239, I241 and C202. The amino group of F450 also forms a hydrogen bond with OE2 of E288 ( Fig 2C). G448, which is also rather conserved ( Fig 2C and S8 Fig), forms a hydrogen bond between the oxygen atom and the NH1 group of R270. F445, N446 and K447 are not conserved ( Fig 2C and S8 Fig). The carbonyl oxygen atoms of F445 and K447 interact with the OG1 atom of T281 and T273, respectively. As a single residue within the C-terminus, N446 provides most interactions. The ND2 atom of N446 contacts with the oxygen atoms of A443 and D234, the OD1 atom forms a hydrogen bond with the NZ atom of K239, and the oxygen atom is stabilized by K239 and L449 as described above. These interactions lock the C-terminus into the globular domain in cis. Comparison with the structure of HSV1 ICP27 DALI search indicates two similar structures (PDB ID: 4YXP and 5BQK) with our structures of ORF57-CTD. In fact, both are the structures of HSV1 ICP27-CTD, which determined by two research groups separately . Although ORF57-CTD and ICP27-CTD have low protein sequence identities (~15%), their structures are quite similar (Fig 2D), with RMSD value of 2.8 Å for 174 superimposed Cα atoms, suggesting that their conserved roles in self-interaction possibly rely on their structures. However, there are two notable differences. First, in contrast to ICP27 "arm" domain containing 2 helixes, the ORF57 arm domain lacks such a notable α-helix in its "arm". Second, the C-terminal residues 500-512 (YVHGKYFYCNSLF) of ICP27-CTD docks on the neighboring monomer in trans and is required for dimerization . In contrast, the C-terminal residues 445-454 (NKGLFFYPL) of ORF57-CTD is locked in the same monomer in cis and thereby is not directly involved in dimerization. The structural determinants of ORF57 dimer formation Based on these structural features, we concluded that each ORF57 monomer consists of two structurally distinct domains: (1) the "arm" region covering residues 167-219 and (2) the globular domain consisting of 11 helixes covering the rest of ORF57-CTD. The connection between the two monomers is mediated via multiple hydrophilic interactions including hydrogen bonds and salt bridges and form two major interaction surfaces (S5 Fig). One interface is the interaction between the "arm" region and the surfaces of the globular domain, and the other interface is the interaction between the two globular domains (Fig 3A). The arm-globular interface comprises a large number of residues and bonds ( Fig 3B), whereas the globular-globular interface has fewer residues and bonds ( Fig 3C). To determine the roles of each interface in ORF57 dimerization, we constructed an arm-null-mutant Δ219 expressing only the globular domain and an internal arm deletion mutant Δ167-219 in the context of full length ORF57 (Fig 4A) and demonstrated that the arm-null ORF57 is labile and deficient from dimerization ( Fig 4B and 4C). As shown in S6A Fig, the FLAG-tagged, arm-null Δ219 mutant displayed a little protein expression only in HEK293 cells treated with proteasome inhibitor MG132 when compared with wt ORF57 fusion (compare lanes 3-4 to 1-2), despite the similar RNA levels were seen in all three constructs (S6B Fig). This indicates that deletion of the arm from ORF57 leads to high instability of the arm-deletion mutant. To overcome this problem, we fused ORF57 with GFP after mutation of cryptic splice sites leading to alternative splicing of ORF57 GFP , this led to partial stabilization (S6C Fig) of ORF57 arm-null mutant. The capability of these proteins to form dimers in the presence or absence of DSS crosslinker was initially compared by Western blot analysis using cell lysates derived from the corresponding expression vector-transfected HEK293 cells. As expected, the full-length ORF57 and the arm-bearing Δ166 mutant efficiently formed homodimers as seen in the previous publication , but the arm-null Δ219 and Δ167-219 mutants failed to form a homodimer ( Fig 4B, compare lanes 5-6 to 1-4 and lanes 9-10 to 7-8), suggesting the important role of the "arm" region in ORF57 dimerization. The arm-null Δ219 and Δ167-219 lacking of dimerization was further confirmed in COS-1 cells by nuclear translocation assays. As shown in Fig 4C by cotransfection of the GFP-tagged, arm-null Δ219 or arm-bearing Δ166 with Flag-tagged wt ORF57, we found the cytoplasmic arm-bearing Δ166, but not the cytoplasmic arm-null Δ219, could dimerize with wt ORF57 in the cytoplasm and subsequently was translocated into the nuclear compartment (80% vs 22%, Fig 4C). This observation was strongly supported in a separate experiment by using a mutant ORF57 with an internal arm deletion of which the mutant remains as a nuclear protein. In the following nuclear translocation assays, the mutant with internal arm deletion also showed no dimerization with a GFP-tagged, cytoplasmic mutant ORF57 mt1+2+3 which contains the arm and an intact C-terminal domain ( Fig 4C). We saw only 6% of double positive cells displaying some degrees of nuclear translocation. Altogether, our data indicate that lack of the arm region prohibits heterodimerization of ORF57-GFP from ORF57-FLAG. (1) the interaction between the "arm" region and the outer surface of the other monomer and (2) the mutual interaction of two globular domains at the interface (dashed box). (B and C) The hydrogen bonds between two monomers contribute to dimerization. The interaction at the interface between the "arm" region (green) and the globular domain (red) is mediated by a large number of residues within the "arm" region and the surface of the globular domain (B).The interaction at the interface between the globular domains (colored in green and red respectively) is limited only to several residues (C). The distances between individual residues are showed in Å. https://doi.org/10.1371/journal.ppat.1007232.g003 Contributions of the interface residues to ORF57 dimerization The globular domain of ORF57-CTD consists of 11 helixes and forms the contact interface in the center of the dimer (Fig 3A and 3C). Globular-globular interaction is mediated via a few hydrogen bonds between several residues ( Fig 3C and S5 Fig). To determine their contributions to dimerization, we constructed a set of point mutations in the context of full-length ORF57 ( Fig 5A). Specifically, we mutated R271 to A271, involved in interaction with E431. The neighboring R270 was also simultaneously mutated to A270 to avoid possible charge compensation after R271 mutation. In addition, we simultaneously mutated two neighboring arginines (R325 and R327) to alanine to prevent R325 from interacting with A275 and R327 from The dimer formation was monitored by Western blotting using anti-GFP (lanes 1-6) or anti-FLAG (lanes 7-10) antibodies. Circular symbols, monomer; square symbols, dimer; diamond symbol, the nonspecific protein band. (C) COS-1 cells seeded on glass cover slips were cotransfected with a GFP-tagged arm-null mutant (ORF57Δ219-GFP) and an empty vector or FLAG-tagged full-length ORF57 (ORF57-FLAG). The arm-bearing, dimerization-competent mutant (ORF57Δ166-GFP) was used as a positive control. The FLAG-tagged mutant with internal arm deletion (ORF57Δ167-219-FLAG) was cotransfected with a dimerization-competent mutant fulllength ORF57-GFP containing three disrupted nuclear localization signals (ORF57mt1+2+3-GFP). Localization of GFP-tagged proteins (green) was determined using direct fluorescence microscopy. The FLAG-tagged proteins (red) were visualized by indirect immunofluorescence staining using anti-FLAG M2 antibody. Cell nuclei (blue) were counterstained with Hoechst 33342 dye. Indicated in the merged panel are % cells with nuclear translocation from 50 double-color positive cells. N/A, not applicable. In these studies, we used a dimerizationincompetent W292P mutant and a dimerization-competent K345A mutant as controls . All mutants were cloned and expressed as a C-terminal FLAG-tagged fusion protein ( Fig 5A). The equal amount of the wild-type or mutant ORF57 expression vector was transfected into HEK293 cells separately. The level of expressed ORF57 proteins and corresponding mRNA was determined by Western and Northern blotting, respectively. As shown in Fig 5B, all ORF57 with interface mutations, similar to the dimerization-incompetent mutant W292P, displayed significant reduction in the protein levels in comparison to the wild-type ORF57 and the dimer-competent K345A, although no obvious changes were observed in their RNA levels by Northern Blotting. This indicates that each interface mutant was transcribed efficiently, but the mutant protein was unstable. To test whether the mutant protein was degraded via a proteasome-mediated pathway, we next compared their expression in the presence or absence of a proteasome inhibitor MG132 (Fig 5C) and showed a partial restoration of the expression, similarly to the W292P mutant. To further examine whether the protein instability of each interface mutant was caused by loss of its dimerization activity as seen for the W292P mutant , we performed the in vitro crosslinking assay after the expression of each mutant in HEK293 cells. To eliminate the possibility that the lack of dimer detection was because of low level of mutant protein expression due to its instability, we performed the same assay with the protein expressed in HEK293 cells in the presence of MG132 and confirmed lack of dimerization of all interface mutants, as seen for the W292P, was not because of their protein expression level after carefully normalizing the expressed protein amount from individual expression vectors (Fig 5D, compare lanes 3-10 and 15-16 to lanes 1-2, 11-12 and 13-14). To further verify the inability of ORF57 interface mutants to form dimer, we performed the nuclear translocation assay in COS-1 cells by cotransfection of the individual interface mutants along with a GFP-tagged full-length ORF57 with all three disrupted nuclear localization signals (NLS) (ORF57 mt1+2+3) (Fig 5A). Mutation of all three NLSs impairs ORF57 nuclear localization and accumulates the GFP-tagged protein as a dysfunctional, but dimerable protein in the cytoplasm. Thus, the GFP-tagged ORF57 mt 1+2+3 could be translocated into the nucleus upon coexpression with wild-type ORF57 containing functional NLSs through dimerization, with 88% efficiency . Although both R270A/R271A and R325A/R327A were nuclear proteins, similarly to the dimerization-incompetent mutant W292P , they were unable to form a dimer with the ORF57 mt1+2+3 and thus were incapable (only 2% and 4% efficiency) HEK293 cells transfected with indicated FLAG-tagged ORF57 expression vectors were harvested at 24 h after transfection for extraction of total proteins and RNA. The protein levels of ORF57 or its mutants were determined by Western blotting using anti-FLAG antibody (top). Cellular βtubulin was used as a loading control. The RNA levels were determined by Northern blotting (bottom). The level of 18S rRNA was used to ensure equal RNA loading. (C) ORF57 interface mutants are partially degraded through proteasome pathway. HEK293 cells were transfected with indicated ORF57 expression vectors and treated at 20 h after transfection with MG132 for additional 4 h or with DMSO (vehicle) as a control. The cell lysates were examined for ORF57 protein by Western blotting using anti-FLAG antibody. Cellular β-tubulin was used as loading control. (D) ORF57 mutants with point mutation of the globular-globular interface residues are incapable of protein dimerization. HEK293 cells were transfected with equal amount (2 μg) of the vectors expressing either wild type (WT) or globular domain mutant ORF57-FLAG. The cells at 24 h after transfection were treated with proteasome inhibitor MG132 for 6 h (lanes 1-12). Alternatively, the expression of wild type and mutant protein was adjusted by transfection of variable amount of the plasmid DNA with 0.5 μg for WT and 2 μg for mutant (lanes . The cells were harvested in PBS and exposed to chemical crosslinker DSS (50 μM in final concentration) or DMSO (vehicle) for 30 min. The dimer formation was monitored by Western blotting using an anti-FLAG antibody. M, monomer; D, dimer. (E) COS-1 cells seeded on glass cover slips were cotransfected with a GFP-tagged ORF57 mt1+2+3 (a three nuclear localization signal-deficient mutant, mt1+2+3) and a FLAG-tagged fulllength of ORF57 interface mutant. The empty vector was used as a negative control and ORF57 WT as a positive control. The previously reported dimerization-incompetent ORF57-W292P and dimerization-competent ORF57-K345A were also used as controls. The localization of GFP-tagged proteins (green) was detected using direct fluorescent microscopy. The FLAG-tagged proteins (red) were visualized by indirect immunofluorescent staining using an anti-FLAG M2 antibody. Cell nuclei (blue) were counterstained with Hoechst 33342 dye. Indicated in the merged panel are % cells with nuclear translocation from 50 double-color positive cells. N/A, not applicable. https://doi.org/10.1371/journal.ppat.1007232.g005 KSHV-ORF57 structure to translocate the GFP-tagged cytoplasmic ORF57 mt1+2+3 into the nucleus when expressed. In contrast, the dimer competent K345A retained its ability to translocate the GFP-tagged mutant into the nucleus (66% efficiency) by dimerization, close to what we seen for the wildtype ORF57 (Fig 5E and S7 Fig). In conclusion, the interface residues R270A/R271A and/or R325A/R327A in the CTD globular domain are essential for ORF57 dimerization. The mutation of these residues prevents dimer formation and consequently induces protein instability. However, the mutation of K345A in the vicinity of R325, R327 and Y349 residues (Fig 3C and S11 Fig) had no effect on ORF57 stability, dimer formation, and functionality, indicating that this residue is non-essential for ORF57 dimer formation via the C-terminal globular domain. The zinc-binding motif is important for protein stability of ORF57 and its homologues A zinc-binding motif is a relative small structural motif and is characterized by the coordination of one or more zinc ions to stabilize protein . As shown in Fig 6A, the zinc-binding motif coordinated by residues C333, H423, C427 and C432 was identified within the ORF57-CTD, with H423, C427 and C432 being located in a compact helix-loop-helix motif (from helix 10 to 11). Specifically, H423 and C432 are positioned on the helix 10 and 11, respectively, while C427 is resided on the interconnecting loop. The forth residue C333 within the loop linking helix 5 and 6, is positioned approximate 100 residues upstream to the other three (H423, C427 and C432) in the sequence. The zinc ion is chelated in the tetrahedral geometry, in which the bond length for Zn-S ranges from 2.2 to 2.4 Å and for Zn-N is about 2.1 Å. The alignment analysis indicated that this CHCC motif is highly conserved among ORF57 homologues from different herpesviruses (Fig 6B and S8 Fig). Due to its location buried inside the globular domain, we hypothesized that this motif may contribute to the stability and correct protein folding. To test this hypothesis, we mutated cysteines at position 333, 427 and 432 to a serine, and histidine at the position 423 to a leucine separately or in combination in the context of full-length ORF57. The expression plasmids were transiently transfected into HEK293 cells and then the expression levels of the mutants were determined by Western blotting for ORF57 protein and real-time PCR for ORF57 RNA. The wild-type ORF57 protein exhibited a much higher protein level compared to its mutants (Fig 6C, top left panel), though no significant differences were observed on their transcriptional level (Fig 6C, lower left bar graph). To examine whether CHCC motif mutant had a short half-life, we compared the CHCC mutant with the wt ORF57 in HEK293 cells in the presence of cycloheximide (CHX) for indicated time (S9 Fig), the Western blot results revealed that the CHCC mutant protein display a shorter halflife over the wt ORF57. We further investigated if the zinc-binding motif is involved in ORF57 dimerization, we used the same methods described in Fig 4 and Fig 5 and revealed that the CHCC mutant can be weakly dimerized in chemical cross-linking assays, but lacks the nuclear translocation activity for the cytoplasmic mutant ORF57mt1+2+3 (Fig 6C, right panel). Altogether, these data suggest an important role of the CHCC motif in ORF57-CTD folding and stability. Since the zinc-binding motif is highly conserved among ORF57 homologues (Fig 6B and S8 Fig), we further investigated possible roles of the zinc-binding motifs in the protein stability of the homologues from other herpesviruses. By conversion of the cysteine to serine and the leucine to histidine in the four residues of the zinc-binding motif within ICP27 (herpes simplex virus type 1), EB2 (Epstein-Barr virus, EBV), UL69 (human cytomegalovis, HCMV) and mORF57 (murine herpesvirus 68, MHV68) as described for KSHV ORF57 (Fig 6B and S8 Fig), we demonstrated that the predicted zinc-binding motifs in ICP27, EB2, UL69 and mORF57 are all important for high levels of the protein expression, but no significant contribution to transcription (Fig 6D-6G). These data provide the compelling evidence that the four residues in the zinc-binding motif of each homologue are important for their protein stability. The zinc-binding motif of ORF57 is important for viral gene expression during lytic replication Given that the zinc-binding motifs is highly conserved and has similar function for protein stability among different herpesviruses, it is possible that this motif is critical for the function of ORF57 stabilizing viral mRNA and promoting viral gene expression during KSHV lytic replication. To test this hypothesis, we substituted the four residues within the zinc-binding motif of ORF57 as described in Fig 6C in The resistant stable cell lines were exposed to doxycycline (DOX) to reactivate KSHV lytic replication, and the cells were harvested at the indicated time points for analysis. When compared with wild-type KSHV virus, the mutant virus expressed much less amount of ORF57 protein during lytic induction (Fig 7A). Remarkably, the mutant virus also exhibited severe reduction of K8.1 and ORF59 RNAs, two major targets of ORF57 (Fig 7B), accompanied by significant reduction of KSHV DNA replication and virus production (Fig 7C and 7D). The deficiency in expression of lytic gene products and viral replication caused by the zinc-binding motif mutation is quite similar to that caused by ORF57 deletion . Taken together, these results demonstrated that the identified zincbinding motif is critical for the expression and function of ORF57 to promote KSHV lytic replication. Discussion In the present study and for the first time, we resolved the 3D structure of the KSHV ORF57-CTD. In our experimental system, direct expression of the full-length ORF57 in Escherichia coli led to extremely low yield. After optimization of the conditions, the N-terminal truncation mutant retaining the residues 167 to 455 (ORF57-CTD) became relatively stable and was successfully crystallized. Finally, we obtained a dimeric structure at 3.5 Å and a monomeric structure at 3.0 Å through X-ray crystallography. ORF57-CTD monomer comprises 11 helixes which is generally in agreement with the predicted secondary structure by biochemical analyses . The solved structure of ORF57 Fig 6. The zinc-binding motif is important for accumulation of KSHV ORF57 protein and its herpesvirus homologues. (A) The structure of ORF57-CTD in a cartoon model with rainbow spectrum. The zinc (Zn 2+ ) ion (grey sphere) and the surrounding cysteine (C333, C427, C432) and histidine residues (H423) are highlighted and displayed as sticks.(B) Multiple alignment of the protein sequences was performed by Clustal Omega, including the C-terminal domain of KSHV-ORF57, ICP27 (herpes simplex virus type 1 and type 2, HSV1-ICP27 and HSV2-ICP27), ORF4 (varicella-zoster virus, VZV-ORF4), EB2 (Epstein-Barr virus, EBV-EB2), UL69 (human cytomegalovirus, HCMV-UL69), and mORF57 (murine gamma herpesvirus 68, MHV68-mORF57). Conserved residues are labeled in red, identical residues are highlighted in green, and the residues of the zinc-binding motif are marked by red stars. (C) Disruption of the zinc-binding motif reduces the stability and dimerizationin of KSHV ORF57 protein. HEK293 cells were transfected with KSHV ORF57 expression vectors with indicated serine for cysteine and leucine for histidine substitutions in the zinc-binding motif and harvested at 40 h post transfection. The cells were separated into two parts: one for Western blotting (top left panel) and one for RNA analysis (lower left bar graph). WT, wild type; CHCC Mut, a combined serine and leucine substitutions of all four residues in the zinc-binding motif. ORF57 with combined mutations of all four zinc-binding residues was tested for its dimerization activities both in vitro crosslinking and nuclear translocation assays (right two panels) as described in Fig 5D and 5E KSHV-ORF57 structure protein is quite different from the folding structure from I-TASSER database by Taylor , in which the dimer is formed between the core domains. The distinguishing feature of ORF57-CTD is the "arm" region which extends to the neighboring monomer in trans and the C-terminal residues 445-454 (NKGLFFYPL) which insert into the globular in cis. The conformation of the "arm" region of ORF57 is similar to the N-terminal "arm" region of ICP27-CTD from HSV-1 . However, the N-terminal "arm" region of ICP27-CTD forms two αhelixes, while the "arm" region of ORF57-CTD avoids any major structural motifs. Interestingly, the C-terminal end of ICP27-CTD, which also has an "arm" region extending to the neighboring monomer in trans, is essential for the dimerization, whereas the C-terminal end Results (mean ± SD) are representative of three independent experiments, Ã p < 0.05, ÃÃ p < 0.005, ÃÃÃ p < 0.0005, ÃÃÃÃ p < 0.00005. NS, not significant. of ORF57-CTD inserts itself into the globular domain in cis to restrict itself from interacting with another monomer (Fig 2D), suggesting that the residues in the C-terminal end might not be commonly used for dimerization among all ORF57 homologues. The self-association and homodimer formation are common features in eukaryotic proteins. Structural and biophysical studies show that dimer formation plays an important role in protein functions . Homodimerization improves protein stability, regulates enzyme activities, and increases interaction complexity. In general, most dimerization is mediated by noncovalent interactions. Contact interaction and domain swapping are two major forms of noncovalent interactions. Contact interaction is mainly mediated by several α-helix in one monomer, which form stable contacts with the other monomer. In our case, the homodimerization of ORF57-CTD is mediated by contact interactions at both arm-globular and globularglobular interfaces. One striking difference between this study and the mutagenesis data on ICP27 is that dimerization of KSHV ORF57 is more vulnerable to amino acid substitutions than ICP27 at the intermolecular interactions . This difference could be attributable to the ICP27 dimerization stabilized by an extensive network of intermolecular contacts, while ORF57 dimerization is stabilized only by its arm and a few binding interface between the globular domains (S5 Fig). Moreover, the surface electrostatic potential of the "arm" region that mediates intermolecular interactions also differs in some degree between ORF57-CTD and ICP27-CTD. The "arm" region of ORF57-CTD is rich in negative charged residues as compared with ICP27, indicating the different binding stability for dimerization of two proteins (S12 Fig). We demonstrated that disruption of even one single hydrogen bond (R270A+R271A) at the globular interface is sufficient to abolish ORF57 dimerization (Fig 5). Alternatively, these mutations (R270A+R271A) may completely change the local structures, more than just disrupting a signal hydrogen bond. In contrast, introduction of single substitutions along the binding surface of ICP27 had no effect on dimer formation . More interestingly, the vulnerable feature of KSHV ORF57 structure in dimer formation could be revealed by point mutations even in other regions (E287, E288, W292) not responsible for intermolecular contacts . In the current structure, the E287, E288 and W292 residues locate inside α4 helix in the center of the globular domain and interact with residues 407, 410, 412, 270, 309 within the monomer, but they do not interact with any residue from another monomer (S11A Fig). Thus, the mutation of these residues may result in the whole structure collapse. That the highly conserved residue E288 interacts with the C terminal end residues (Fig 2C) presumably contributes to maintain ORF57-CTD structure. One exception is the K345A mutation in our previous study . In this report, we found that K345 locates in α-helix 6 and the side chain of K345 protrudes out to the surface of the dimeric structure and not mediates interaction with the other monomer (S11B Fig). Previous study reported that several small compounds with special design inhibit the activity of KSHV protease (KSHV Pr) through disrupting dimer formation . KSHV Pr, which plays an important role in capsid maturation, is structural and functional conserved among all three human herpesvirus subfamilies . Monomeric KSHV Pr is inactive and partially disordered, but KSHV Pr becomes active in dimeric state. As the C-terminal domain of KSHV Pr plays a key role in dimerization , the designed compounds bind to the dimer interface of KSHV Pr and disrupt dimer formation to fulfill the inhibition. HIV protease (HIV Pr) is also a homodimeric enzyme and essential for viral assembly during viral production, and the dimeric structure consists of a four-stranded antiparallel β-sheet from the N-terminus and C-terminus of each monomer . Previous research designed a non-peptide-based linker to connect the N-terminus of each monomer, resulting in dimer dissociation and providing a potency strategy against HIV infection . Like KSHV Pr and HIV Pr, ORF57 is also dimerized, and disrupting dimer formation results in swift protein degradation . Therefore, our structure provides novel clues for compound development targeting the dimer interface of ORF57, finally preventing viral replication efficiency. The zinc-binding motif CHCC is a classical protein folding motif found in some pathogenic viruses and eukaryotic cells . The motif is mainly found in the nucleocapsid proteins of some retroviruses . The nucleocapsid protein of HIV-1 and HIV-2 contains two CHCC motifs that are critical for packaging the viral RNA . Glycoproteins of hantaviruses also forms two CHCC motifs , which are critical for viral replication and assembly. ORF57-CTD and ICP27-CTD harbor a zinc ion by this CHCC motif which is highly conserved among herpesviruses. The zinc ion chelated by the histidine and cysteines modulates the stability of ORF57. Therefore modifying the zinc-binding motif among ORF57, EB2, UL69 and mORF57 would result in dramatic reduction in protein levels. In this report, we demonstrated that disruption of the CHCC motif in ORF57 reduces the dimerization capability of ORF57 and induces protein instability. Thus, we conclude that the zinc ion is important for protein folding and stability of ORF57. Notably by illustration of first KSHV ORF57 structure, and revealing a novel anti-parallel dimeric fold, a consensus zinc-binding motif, and a novel conformation of the C-terminus which differs from ICP27, our study indicates that, among herpesvirus homologues, different protein in dimerization might have a different mechanism. Perhaps the most important remarkable advance in solving the first ORF57 structure in this study is providing many novel clues for studying ORF57 binding partners and designing small-molecule drugs against KSHV infection and oncogenesis through interfering with ORF57 dimerization and its partner proteins. Cloning, expression and purification of ORF57 The DNA fragment encoding the residues from 167 to 455 of KSHV ORF57 (GenBank ID: ABD28908.1) was cloned into the pET SUMO vector (Invitrogen, Catalog no. K300-01) in frame with the N-terminal 6×His tag and the SUMO fusion protein. The SUMO (Small Ubiquitin-related Modifier) protein was derived from Smt3 of Saccharomyces cerevisiae, and fusing with the SUMO led to increase ORF57-CTD expression and solubility. The protein was expressed in the BL21 (DE3) strain of Escherichia coli, supplemented with 50 mg of kanamycin/ml, cultivated at 16˚C in Terrific broth for 18 h after induction (at OD 600 = 0.8) by 0.5 mM IPTG (isopropyl β-D-1-thiogalactopyranoside). For purification, cells were collected at 4˚C, 3,500 × g for 30 min, the pellet was resuspended in lysis buffer (30 mM Tris-HCl , 500 mM NaCl, 1 mM PMSF) and lysed by high-pressure homogenization. After centrifugation at 16,000 × g for 40 min at 4˚C, the supernatant containing the His-tagged protein was loaded onto immobilized Ni-NTA column (GE Healthcare) pre-equilibrated by binding buffer (30 mM Tris-HCl , 500 mM NaCl, 10 mM imidazole), then Ni-NTA affinity column was washed by washing buffer (30 mM Tris-HCl , 500 mM NaCl, 120 mM imidazole), finally the recombination protein was eluted by elution buffer (30 mM Tris-HCl , 500 mM NaCl, 300 mM imidazole). Afterwards, the ORF57-CTD fusion protein was digested by ULP1 (Ubiquitin-like specific protease 1) overnight, and protein mixture were concentrated by 10 kDa centrifugal filter (Millipore). Then the concentrated recombinant protein was loaded onto gel filtration chromatography (Superdex 75; GE Healthcare) that had been equilibrated in buffer containing 50 mM MES (pH = 6.0), 300 mM NaCl using an Ä kta Purifier System (Amersham). The purity of protein was assessed by SDS-PAGE gel and stained by Coomassie brilliant blue, the highest purity fractions were concentrated by 10 kDa centrifugal filter for further crystal screening. The concentration of ORF57-CTD protein was assessed by determining the UV absorbance at 595 nm and by staining with Coomassie Brilliant Blue G250 at a ratio of 1:1000 (Bio-Rad), eventually the ORF57-CTD protein was immediately subject to the following experiments or quickly frozen by liquid nitrogen for long-term storage at -80˚C. Crystallization, data collection, structure determination and refinement The ORF57-CTD protein was concentrated to 15 mg/ml and crystallized by sitting-drop vapor diffusion method at 4˚C. Crystallization screens were performed by Hampton Research and Qiagen kits. The spindle shaped crystal crystalized in a 1:1 mixture solution of 0.1 M BIS-TRIS propane pH = 7.0, 0.8 M lithium sulfate monohydrate, etc. (Table 1). The crystal size was optimized by gradually changing salt concentration, pH and temperature. The spindle shape crystal had a very poor anisotropic diffraction resolution around 10 Å at room temperature, or under 100 kelvin after rapidly frozen by liquid nitrogen. To improve resolution, the crystals were soaked into different cryo-agents , a better diffraction pattern illustrated that soaking crystal in 25% (vol/vol) ethylene glycerol for few seconds to improve diffraction quality from 10 Å to 6 Å. Furthermore, the crystals were kept in the reservoir against the air for several minutes and then soaked into 25% ethylene glycerol before freezing, surprisingly leading to an improved diffraction to 3.5 Å. Micro seeding was performed to further improve diffraction resolution. The seeds stocking were prepared by grinding crystal on glass and pulverizing crystal to microscopic particles. Microseeds were collected in 10 ul crystallized buffer and diluted to 10 −7 for a seed working solution. The cubic crystal was obtained by adding 0.1 ul seeds stocking into a 2 ul solution mixture (1 ul protein and 1 ul reservoir), the reservoir contained 0.01 M MgCl 2 Á6H 2 O, 0.05 M HEPES sodium pH = 7.0, 4.0 M LiCl. Both of the crystals were dehydrated by against air for a few minutes and then soaked into cryo-protectant by adding 25% ethylene glycol in reservoir solution for a few seconds and finally flash-frozen in liquid nitrogen for data collection. Considering the native ORF57-CTD expressed in Terrific broth lacks of phase angels and to solve the phase ambiguity problem, Seleno-methionine (SeMet) was used in basic M9 medium to label ORF57-CTD-SUMO protein and screen for heavy metals in the expression of ORF57-CTD. Surprisingly ORF57-CTD-SUMO expressed highly in the presence of Zn 2+ . Subsequently, Se-labeled ORF57-CTD was successfully crystallized in the conditions for native cubic crystal. All of the diffraction data were collected at beamline BL17U1 of Shanghai Synchrotron Radiation Facility (SSRF). The monomer (cubic crystal) data set at 3 Å was collected at the peak wavelength of Zn atom (1.2834 Å) and the 3.5 Å dimer (spindle shaped crystal) data set was collected at the wavelength 0.97893 Å. All the data were integrated and scaled with HKL2000. The monomeric structure was solved by single-wavelength anomalous-dispersion (SAD) method using Zn ion as the anomalous scattering atom and then it was used as the searching model to determine the dimeric structure by molecular replacement (MR). Phenix was applied for structure refinement. Structural deviations were corrected manually by Coot . The data collection and refinement statistics are listed in Table 1. Structural figures were produced by Pymol . Chemical protein cross-linking Disuccinimidyl suberate (DSS, Thermo Fisher Scientific) was always freshly made by dissolving in DMSO as a 50 mM stock solution and diluted 1:100 with DMSO to working concentration of 500 μM (Thermo Fisher Scientific). Experimental procedures were described previously . Briefly, HEK293 cells plated in a 6-well plate (5×10 5 cells/well) were transfected with 0.5-2 μg of ORF57 expression vectors. To increase the protein level, in some cases, the cells were treated with 20 μM of proteasome inhibitor MG132 for 4-6 hours before harvesting. Twenty four hours after transfection the cells were harvested, washed with PBS and resuspended in 200 μl of phosphate-buffered saline (PBS). The cross-linking was carried out by addition of 2 μl of 500 μM DSS into 20 μl of cell suspension (50 μM of DSS in final concentration) followed by 30 min incubation at room temperature. Cells treated in parallel with DMSO (vehicle) were used as a negative control. The cross-linking was stopped by quenching of remaining DSS for 15 min by addition of 1 μl of 200 mM Tris-HCl (pH = 7.4). Finally, an equal amount of 2×SDS-loading buffer was added to sample for Western blotting. Nuclear translocation assay COS-1 cells grown on glass cover slips in 6-well plates were transfected with mixture of 0.5 μg of ORF57-GFP and 1.0 μg of ORF57-FLAG expressing vectors. Twenty four hours after transfection the cells were fixed with 2% paraformaldehyde for 30 min, followed by 10 min quenching with 100 mM glycine in PBS and permeabilization with 0.5% Triton X-100 in PBS for 15 min. The slides were subsequently blocked in 3% blot qualified bovine serum albumin (Promega) in PBS containing 0.02% Tween-20 (blocking solution), and incubated with anti-FLAG M2 primary antibody (F3165, Sigma-Aldrich) diluted in blocking buffer for 2 h at 37˚C and further stained with anti-mouse Alexa Fluor 594 secondary antibody in blocking buffer for 30 min at 37˚C. Cell nuclei were stained for 15 min at RT with Hoechst 33342 (Thermo Fisher Scientific). Slides were washed, mounted and captured with a fluorescence microscope (Olympus). The nuclear translocation frequency was determined by observation of 50 ORF57-GFP/ ORF57-FLAG double positive cells. A positive translocation was defined by the cells exhibiting higher, nuclear GFP signal intensity over the cytoplasmic count. Protein stability and half-life assays To inhibit proteasome-mediated degradation, the HEK293 cells in 6-well plate (5×10 5 cells/ well) were transfected in parallel with ORF57 wt or mutant expression vectors. Twenty hours after transfection the cells were treated with proteasome inhibitor MG132 (20 μM final) for additional 4 hours. The cells treated with DMSO (vehicle) were used as negative control. To determine the half-life of the wt ORF57 or CHCC mutant protein, HEK293 cells in 6-well plate (5×10 5 cells/well) were transfected with 0.5 μg of wt ORF57 or 2 μg of CHCC mutant expression vector. Forty hours after transfection, the cells were treated with 50 μM CHX in final concentration to inhabit protein synthesis and the cell lysates were collected at the indicated time for Western blot. The half-life (t 1/2 ) of wt ORF57 or CHCC mutant protein was determined by a line plot analysis as described . Northern blotting analysis HEK-293 cells transfected with 2 μg plasmids were harvested 24h after transfection and lysed in TRIzol reagent (Invitrogen). Total RNA samples were separated in 1% agarose and transferred onto nylon membrane, hybridization was performed using a 32 P-labeled probe (S1 Table) of ORF57 as described . Construction of BAC16 ORF57 mutant virus The KSHV genome was modified by using a two-step "scarless" homologous recombination procedure in GS1783 E. coli strain harboring BACmid containing full-length KSHV genome that was described previously . For CHCC mutant virus, the cysteine and histidine were replaced by serine and leucine, respectively. The Kan r I-SceI cassette was amplified from the pEP-Kan-S plasmid using primers described in S2 Table. The first round inserted two mutation sites, the second and third rounds inserted one mutation site separately. The recombination procedure was performed as previous described . The selected clone were sequenced and amplified. BAC DNA isolation and analysis BAC16 was isolated by NucleoBond Xtra Midi kit (MACHEREY-NAGEL), modified regions were PCR amplified and sequenced. To further verify the integrity of recombinant BACmids, the purified BAC DNA was digested with XhoI and separated on a 0.8% agarose gel in 0.5 × TAE under the following conditions: 80 V/cm for 4 h. Transfection of KSHV BAC16 and establishment of stable cell line The iSLK-PURO cells were grown to *80% confluence in a 6-well plate followed by transfection with 10 ug of BAC DNA using FuGENE HD Transfection Reagent (Promega). Cells were selected after 48 hours in a medium which contained 250 μg/ml G418 (Sigma-Aldrich), 1 μg/ ml puromycin (Sigma-Aldrich), 1000 μg/ml hygromycin B (Millipore). Three weeks later, iSLK-PURO cells infected with recombinant virus were established. RNA isolation and qPCR iSLK-PURO cells were treated with DOX (Clontech) to induce RTA expression and KSHV lytic cycle reactivation. To quantify virus genes expression, total RNA was extracted with Trireagent (Sigma-Aldrich) according to manufacturer's instructions. 0.5 ug of total RNA was reverse transcribed by using PrimeScript RT reagent Kit with gDNA Eraser, cDNA was quantified by using SYBR Fast qPCR Mix (Takara) and operated on LightCycler 480II (Roche). The relative quantification of gene expression was calculated by using the comparative threshold cycle (CT) method (2 −ΔΔ CT), cellular level of GAPDH RNA was used as a reference. The transcription level of wild type and mutant genes of ORF57, UL69, EB2, mORF57 were detected by the same way. The qPCR primers are listed in S3 Table. Virus replication and production assay iSLK-wt and CHCC mutant cells were induced with 1ug/ml of DOX and incubated at 37˚C for 72, 96 and 120 h, the KSHV genome was purified from the supernatant (200 ul of 2×10 6 cells) by TIANamp Blood DNA Kit (TIANGEN) and quantified by qPCR using K9 primers (S3 Table) for KSHV copy numbers. The supernatant (500ul) of each group at 72 hours was incubated with HEK293T cells plated in 6-well plate for 24 hours, and the percentage of GFP positive cells were calculated by FACS assay. Accession numbers The accession numbers for the crystal structures of ORF57-dimer and ORF57-monomer have been deposited in the Protein Data Bank (accession numbers are 5zb3 and 5zb1, respectively). The diagrams illustrates the polar intermolecular interactions between "arm" (green box) and globular (yellow box) domains (a) and between two globular domains (b) in the ORF57 dimer and ICP27 dimer (PDB ID: 4yxp).The numbered yellow boxes represent individual α-helixes. The dash lines of ORF57 and ICP27 show hydrogen bonds (blue lines) or salt bridges (red lines) between interacting residues. Interface interaction analyses of ORF57 and ICP27 were done by using PDBe-PISA (http://www.ebi.ac.uk/msd-srv/prot_int/cgi-bin/piserver) and the interface interaction residues of ORF57 are also listed in Supplemental Table S4. Multiple alignment of the protein sequences was performed by Clustal Omega for ICP27 (herpes simplex virus type 1 and type 2, HSV1-ICP27 and HSV2-ICP27), ORF4 (varicella-zoster virus, VZV-ORF4), EB2 (Epstein-Barr virus, EBV-EB2), UL69 (human cytomegalovirus, HCMV-UL69), and mORF57 (murine gamma herpesvirus 68, MHV68-mORF57), with the conserved residues in red surrounded by blue boxes, identical residues in red, and the residues of the zinc-binding motif in red stars. The secondary structural elements of ORF57-CTD were analyzed by ESPript3 (http://espript.ibcp.fr/ESPript/cgi-bin/ESPript.cgi), with the indicated αhelix (coil), η-helix (coil), β-sheet (arrows), and turn (T) above the alignment. (PPTX) S1 Movie. Crystal structure of ORF57 monomer with 3Å resolution showed as a cartoon in rainbow spectrum coloring with dark blue for the N-terminus and red color for the C-terminus. The black sphere marks the zinc atom. (MP4) S2 Movie. Crystal structure of ORF57 dimer with 3.5Å resolution. One monomer is showed as a cartoon in rainbow spectrum coloring with dark blue for the N-terminus and red color for the C-terminus. The second monomer is colored in grey. The black spheres mark the zinc atoms in each monomer. (MP4) S1
def post_connect(self, gateway:str, device:str): self.device = device self.setup_routes(gateway) if self.dns_handler is not None: self.dns_handler.SetDns(device, self.srv_options)
 #ifndef __EFFEKSEERRENDERER_GL_MATERIALLOADER_H__ #define __EFFEKSEERRENDERER_GL_MATERIALLOADER_H__ #include "../RendererUrho3D/EffekseerUrho3D.RendererImplemented.h" namespace Effekseer { class Material; class CompiledMaterialBinary; } // namespace Effekseer namespace EffekseerUrho3D { class MaterialLoader : public ::Effekseer::MaterialLoader { private: bool canLoadFromCache_ = false; std::string currentPath_; ::Effekseer::FileInterface* fileInterface_ = nullptr; ::Effekseer::DefaultFileInterface defaultFileInterface_; ::Effekseer::MaterialRef LoadAcutually(::Effekseer::MaterialFile& materialFile, ::Effekseer::CompiledMaterialBinary* binary); public: MaterialLoader(bool canLoadFromCache = false); virtual ~MaterialLoader(); ::Effekseer::MaterialRef Load(const char16_t* path) override; ::Effekseer::MaterialRef Load(const void* data, int32_t size, Effekseer::MaterialFileType fileType) override; void Unload(::Effekseer::MaterialRef data) override; }; } // namespace EffekseerRendererGL #endif // __EFFEKSEERRENDERER_GL_MODELLOADER_H__
def add_pos_neg_features(df, vocab_pos, vocab_neg, n=10): df_pos = df.where(df.positive==True) df_neg = df.where(df.positive==False) df_pos_terms = add_top_features(df_pos, vocab_pos, n) df_neg_terms = add_top_features(df_neg, vocab_neg, n) return df_pos_terms.unionAll(df_neg_terms)
// Normalize returns a new unit vector in the same direction as a. func (v Vector3) Normalize() Vector3 { n2 := v.Norm2() if n2 == 0 { return *NewVector3(0, 0, 0) } return v.Times(1 / math.Sqrt(n2)) }
In the early twentieth century, some of the most interesting voices in Irish public life, including Socialist leader James Connolly, expressed their support for the idea of an international language. A constructed international auxiliary language (differing from natural languages, which develop over time), Esperanto was the brainchild of Polish inventor L. L. Zamenhof. In 1887, under the pseudonym Doktoro Esperanto (Doctor Hopeful) he published Unua Libro, in which he introduced and described this new international language. Zamenhof did not believe that his constructed language would replace existing national tongues, but that it could exist alongside them and make human communication easier. The father of this ambitious project was twelve times nominated for the Nobel Peace Prize, and there are streets named in his honour all over the world, including in Israel, Italy, Brazil, Catalonia, the UK and Poland. Zamenhof’s vision of international parity was certainly a romantic one, telling one gathering in 1905: In our meeting there are no strong or weak nations, privileged or unfavoured ones, nobody is humiliated, nobody is harassed; we all support one another upon a neutral foundation, we all have the same rights, we all feel ourselves the members of the same nation, like the members of the same family, and for the first time in the history of human race, we -the members of different peoples- are one beside the other not as strangers, not like competitors, but like brothers who do not enforce their language, but who understand one another, trustfully, conceitedly, and we shake our hands with no hypocrisy like strangers, but sincerely, like people. Writing to the Freeman’s Journal in 1902, E.E Fournier expressed a belief that “it is high time that the attention of the Irish people should be directed to a language which appears to have completely solved the problem of providing an international means of communication without prejudice to the use and study of an existing national language.” Anyone curious about “a movement so full of possibilities for good” was encouraged to attend classes at the offices of the Celtic Association, 97 Stephen’s Green. Fournier, a distinguished intellectual and physicist, was at the very forefront of the Celtic Revival in Ireland and an early champion of Esperanto. Even earlier that this in 1899, James Connolly used the pages of his weekly The Workers’ Republic to outline his own belief in the need for a universal language, though not one that stood in conflict with existing languages: I believe the establishment of a universal language to facilitate communication between the peoples is highly to be desired. But I incline also to the belief that this desirable result would be attained sooner as the result of a free agreement which would accept one language to be taught in all primary schools, in addition to the national language, than by the attempt to crush out the existing national vehicles of expression. Connolly was by no means alone in the global socialist movement in his support for a universal language, the very idea of a language not imposed by imperialism or colonialism but rather built by people themselves had great appeal to working class leaders. As Peter Glover Forster notes in his history of the language, the “democratising spirit” of the language appealed to the labour movement, and publications like Der Arberiter Esperantist (Germany, 1911) and Le Travailleur Esperantiste (France, 1912) reflected this. In the world’s first socialist state, the Soviet Esperanto Union (SEU) was founded in 1921, believing in the revolutionary capability of a world language. As well as socialists, language academics were centrally important to the Esperanto movement across Europe, something that is reflected in contemporary newspaper reports on the meetings of the Dublin Esperanto Club which was driven by the prior mentioned E.E Fournier. He wrote keenly in the Irish press on international conferences and efforts to advance the language, heralding the first International Esperanto Conference in 1905. In 1907, La Irlanda Esperanto-Asocio was born, with 1916 proclamation signatory Joseph Mary Plunkett among its committee members. When the 1912 British Esperanto Conference was held in Belfast, its delegates enjoyed a brief sojourn in Dublin, visiting the National Museum, Trinity College Dublin and more besides, as well as meeting local politicians and even the Lord Lieutenant of Ireland. The first Esperantist in the English speaking world had strong connections to Dublin, though born in Chesire in England. Richard H. Geoghegan’s (1866-1943) family were Irish, and had lived for many years at 41 Upper Rathmines Road before his father emigrated to England for work. Geoghegan was responsible for the first published translation of L. L. Zamenhof’s work. Post-independence, there remained an active community of Esperanto speakers in Ireland, something well documented by the Esperanto Association of Ireland. Their history notes the 1930s to have been something of a golden age for the language, in a city where there were now “six shops selling Esperanto books. Browne & Nolan, Nassau Street, opened a special Esperanto Section in its shop.” If there was an E.E Fournier of these times, it was Lorcán Ó hUiginn, a stenographer in the Dáil by trade who brought an incredible new spirit of life into the movement in Ireland. Ó hUiginn taught popular classes on the language in the city and distributed publications in the language. Today, the work of Fournier, Ó hUiginn and others like them is continued by Esperanto Association of Ireland. Worldwide, there are some 2 to 10 million speakers of the invented language today. It is thus much more than just a historical curiosity.
// Attempt to prerender an unsafe page. The prerender navigation should be // cancelled and should not affect the security state of the primary page. IN_PROC_BROWSER_TEST_P(SafeBrowsingPrerenderBrowserTest, UnsafePrerender) { const GURL initial_url = embedded_test_server()->GetURL("/title1.html"); ASSERT_TRUE(ui_test_utils::NavigateToURL(browser(), initial_url)); const GURL prerender_url = embedded_test_server()->GetURL(kEmptyPage); SetURLThreatType(prerender_url, GetThreatType()); PrerenderAndExpectCancellation(prerender_url); }
A day after suffering a minor heart attack, B.B. King has returned to his Las Vegas residence and is receiving home hospice care. The legendary blues guitarist, singer and bandleader is 89 years old. Yesterday, the following message from him was posted on his Facebook page. "I am in home hospice care at my residence in Las Vegas. Thanks to all for your well wishes and prayers. B.B. King." What appear to be King's last days have not been without controversy. As he was taken to the hospital this week, his daughter, Patty, charged his manager , Laverne Toney, with elder abuse. She claims that Toney, who has power of attorney over King, overruled Patty's wish to have him seek treatment. It was only after the police were called that a team of paramedics took him to the hospital. Last year, Patty accused Toney of elder abuse and burglary, alleging that she has taken up to $30 million of King's money and jewelry, including a ring valued at $250,000, and withheld his diabetes medication from him while on tour. No charges were filed. In a career spanning nearly 70 years, Riley B. King went from a disk jockey at WDIA in Memphis -- where he acquired the nickname "Blues Boy," which was shortened to "B.B." -- to arguably the most important global ambassador of the blues. Playing his trademark black Gibson ES-355, which he named "Lucille," his recordings, particularly those from the '50s through the '70s, have been incredibly influential to generations of blues musicians.
import async_hooks from "async_hooks"; import { enable } from "^jab"; //enable already called. enable(async_hooks); enable(async_hooks);
/** * A <I>location</I> is an integer that identifies the position in a * file of a particular piece of program source text. This class is * never instantiated. It contains static functions for manipulating * locations. Locations are produced by the * <code>CorrelatedReader</code> class (and by this class), they are * stored in ASTs, and they are passed to <code>ErrorSet</code> to * identify the program source that caused a particular error or * warning. * <p> There are three kinds of locations. * <ol> * <li> <i>Regular locations</i> encode the file, line, column, and * offset of a character read by <code>CorrelatedReader</code>. A * call to the <code>getLocation()</code> method of a * <code>CorrelatedReader</code> object returns the regular location * of the last character read from that <code>CorrelatedReader</code> * object. The file/line/column/offset of that location can be * extracted via the methods described below. * <br> Following emacs, line numbers begin at 1, column numbers at 0, * and offsets at 1. A newline character is considered the last * character on a line. * <li> <i>Whole file locations</i> encode just file information. * They are currently used for error messages that are global to a * file (eg. the error message given when a file that is expected to * contain a package declaration does not have one). We expect they * will also be used in ASTs produced from class files (since there is * no meaningful line or column information, and offset information * would not be useful in an error message). * <li><i>The null location</i> is a constant that plays a similar * role for locations that null plays for reference types. * </ol> * * <p> Interface reviewed at Sparta Meeting 1/8/97. * * @see javafe.util.CorrelatedReader * @see javafe.util.ErrorSet */ public class Location { //@ public model JMLDataGroup internalState; /** Private constructor. Never called. */ private Location() {} /** The null location, is the constant 0. */ public static final int NULL = 0; /********************************************************************** * Check if a location is a whole file location. * <p>Precondition: loc should be a valid location (ie a regular, * whole file, or dummy location). *********************************************************************/ //@ pure public static boolean isWholeFileLoc(int loc) { return LocationManagerCorrelatedReader.isWholeFileLoc(loc); } /********************************************************************** * Extracts the file corresponding to a location. * <p>Precondition: loc should be a regular location or a whole file location. *********************************************************************/ //@ requires loc != Location.NULL; //@ modifies \nothing; //@ ensures \result != null; public static GenericFile toFile(int loc) { return LocationManagerCorrelatedReader.locToFile(loc); } /********************************************************************** * Extracts the filename corresponding to a location. * <p>Precondition: loc should be a regular location or a whole file location. *********************************************************************/ //@ requires loc != Location.NULL; //@ modifies \nothing; //@ ensures \result != null; public static String toFileName(int loc) { return LocationManagerCorrelatedReader.locToFile(loc).getHumanName(); } /********************************************************************** * Extracts the offset corresponding to a location. * The first character in a stream is at offset 1. * <p>Precondition: loc should be a regular location. *********************************************************************/ //@ requires loc != Location.NULL; //@ modifies \nothing; public static int toOffset(int loc) { return LocationManagerCorrelatedReader.locToOffset(loc); } /********************************************************************** * Extracts the line number corresponding to a location. * The first line in a stream is numbered 1. * <p>Precondition: loc should be a regular location. *********************************************************************/ //@ requires loc != Location.NULL; //@ modifies \nothing; //@ ensures \result >= 1; public static int toLineNumber(int loc) { return LocationManagerCorrelatedReader.locToLineNumber(loc); } /********************************************************************** * Extracts the column corresponding to a location. * The first column on each line is numbered 0. * <p>Precondition: loc should be a regular location. *********************************************************************/ //@ requires loc != Location.NULL; //@ modifies \nothing; //@ ensures \result >= 0; public static int toColumn(int loc) { return LocationManagerCorrelatedReader.locToColumn(loc); } /********************************************************************** * Convert a location into a printable description suitable for use * in a warning or error message. * <p>Precondition: loc should be a valid location (ie a regular, * whole file, or dummy location). *********************************************************************/ public static /*@non_null*/String toString(int loc) { if( loc == NULL ) return "<dummy location>"; String name = "\"" + toFileName(loc) + "\""; if (isWholeFileLoc(loc)) return name; return name + ", line " + toLineNumber(loc) +", col " + toColumn(loc); } public static String toFileLineString(int loc) { String s = Location.toFileName(loc); if (!Location.isWholeFileLoc(loc)) s = s + ":" + Location.toLineNumber(loc); return s; } /********************************************************************** * Create a whole file location corresponding to the given GenericFile. * Calls to <code>toFile</code> on that location will return * this file. *********************************************************************/ //@ ensures \result != Location.NULL; public static int createWholeFileLoc(/*@ non_null @*/ GenericFile file) { return FileCorrelatedReader.createWholeFileLoc(file); } /** * Create a fake location described by description.<p> * * This should only be used by debugging code and in places where * it should never see the light of day. * * The resulting location is a whole-file location associated * with an unopenable file with human-name description. */ //@ ensures \result != Location.NULL; public static int createFakeLoc(/*@ non_null @*/ String description) { return FileCorrelatedReader.createWholeFileLoc( new UnopenableFile(description)); } //@ requires 0 <= streamId; //@ requires streamId < LocationManagerCorrelatedReader.allCorrStreams.elementCount; //@ requires line > 0; //@ requires col >= 0; //@ ensures \result != Location.NULL ; public static int make(int streamId, int line, int col) { return LocationManagerCorrelatedReader.makeLocation(streamId, line, col); } /** * Attempts to return a location <code>n</code> characters further * to the right of <code>loc</code> on the same line. Returns the * same location if it is not a regular location. * * Produces an assertion failure if that location does not exist * (e.g., the line is too short.). */ //@ requires n >= 0; //@ ensures loc != NULL ==> \result != NULL; public static int inc(int loc, int n) { if (isWholeFileLoc(loc) || loc==NULL) return loc; // Should be a regular location here: // This assertion is commented out because when we translate // Java's assert construct into a conditional throws clause, // under some circumstances, the translated (fictional) IfStmt // does not actually fit on the original line. E.g., // assert false; // becomes // if !false throw new java.lang.AssertionError(); // which obviously is longer than the original statement. // Assert.notFalse(toLineNumber(loc) == toLineNumber(loc+n)); //@ nowarn pre return loc+n; } //@ nowarn Post; /** * Returns the internal stream ID used for the stream associated * with location <code>loc</code>. */ //@ requires loc != Location.NULL; public static int toStreamId(int loc) { return LocationManagerCorrelatedReader.locToStreamId(loc); } /** * Returns the file associated with stream id <code>id</code>, * where <code>id</code> has previously been returned by * <code>toStreamId</code>. */ //@ requires 0 <= id && id < LocationManagerCorrelatedReader.allCorrStreams.elementCount; public static GenericFile streamIdToFile(int id) { return LocationManagerCorrelatedReader.streamIdToFile(id); } }
""" @author: <NAME> @created by cemakpolat at 2021-12-29 """ from flask import Blueprint, request, jsonify import util userapi = Blueprint('userapi', __name__) logger = util.get_logger() @userapi.route('/user/preferences', methods=["GET", "POST"]) def assign_user_preferences(): profile = request.form.to_dict() if profile: return jsonify({"message": "User data is stored", "type": "success"}) else: return jsonify({"message": "User data could not be stored", "type": "error"})
// buildResourceRegexp translates the resource indicator to regexp. func buildResourceRegexp(s string) (*regexp.Regexp, error) { hash := strings.Split(s, ":") for i, v := range hash { if v == "" || v == "*" { hash[i] = ".*" } } return regexp.Compile(strings.Join(hash, ":")) }
/** * Indicates whether a discovery procedure is currently in progress. * * @return 0: No discovery procedure in progress; * 1: Discovery procedure in progress. */ int ble_gap_disc_active(void) { return ble_gap_master.op == BLE_GAP_OP_M_DISC; }
Dynamic State Estimation in the Presence of Sensor Outliers Using MAP-Based EKF In this letter, we consider the problem of dynamic state estimation (DSE) in scenarios where sensor measurements are corrupted with outliers. For such situations, we propose a filter that utilizes maximum a posteriori estimation in the extended Kalman filtering framework to identify and discard the outlier-ridden measurements from a faulty sensor at any given time instant. However, during this process, all those measurements that are not affected by outliers are still utilized for state estimation. Using an illustrative example of dynamic target tracking, we demonstrate the effectiveness of the proposed estimator.
'''import playsound playsound.playsound('ex021.mp3')''' print('Curso em video python'. '-'.join())
const Validator = require('validator'); const Empty = require('./is_empty'); module.exports = function validateLoginInput(data:{logemail:any,logpassword:any}) { var errors:any; var error_logpassword = ""; var error_logemail = ""; data.logemail = !Empty(data.logemail) ? data.logemail : ''; data.logpassword = !Empty(data.logpassword) ? data.logpassword : ''; if(!Validator.isEmail(data.logemail)) { error_logemail = 'Email is invalid'; } if(Validator.isEmpty(data.logemail)) { error_logemail = 'Email is required'; } if(!Validator.isLength(data.logpassword, {min: 6, max: 30})) { error_logpassword = 'Password <PASSWORD>'; } if(Validator.isEmpty(data.logpassword)) { error_logpassword = 'Password <PASSWORD>'; } if(error_logemail != "" || error_logpassword != "") { errors = {email:error_logemail,password:<PASSWORD>} } return { errors, isValid: Empty(errors) } }
use crate::errors::Error; use serde::{self, Deserialize}; type DateTime = chrono::DateTime<chrono::FixedOffset>; #[derive(Deserialize, Debug)] #[serde(rename_all = "camelCase")] pub struct Event { /// Short code describing the event /// /// TODO: Reverse engineer the possible values and their meaning pub code: String, #[serde(deserialize_with = "rfc3339::deserialize")] pub date: DateTime, pub label: String, pub order: u8, } /// Represents one of the five possible timeline events #[derive(Deserialize, Debug)] #[serde(rename_all = "camelCase")] pub struct TimelineEvent { pub code: String, #[serde(default, deserialize_with = "rfc3339::deserialize_option")] pub date: Option<DateTime>, /// 1; 2; 3; 4 or 5 pub id: u8, #[serde(rename = "shortLabel")] pub label: String, /// If true, this event is "achieved" or "unlocked". /// This probably implies that previous events are too. #[serde(rename = "status")] pub achieved: bool, } #[derive(Deserialize, Debug)] #[serde(rename_all = "camelCase")] pub struct Shipment { /// Date on which the shipment entered the logistic circuit #[serde(deserialize_with = "rfc3339::deserialize")] pub entry_date: DateTime, /// Estimated delivery date #[serde(rename = "estimDate")] #[serde(deserialize_with = "rfc3339::deserialize")] pub estimated_date: DateTime, /// Arbitrary-length list of events associated with a date #[serde(rename = "event")] pub events: Vec<Event>, /// Shipment ID. Corresponds to the ID used to fetch all of this data. #[serde(rename = "idShip")] pub id: String, /// # Example /// /// `"colissimo"` #[serde(rename = "product")] pub service: String, /// A five-step timeline of key events that the shipment has or will "achieve" pub timeline: [TimelineEvent; 5], } impl Shipment { pub fn timeline_step(&self) -> u8 { self.timeline .iter() .find(|event| event.achieved) .map(|event| event.id) .unwrap_or(0) } } #[derive(Deserialize, Debug)] #[serde(rename_all = "camelCase")] pub struct APIResponse { shipment: Option<Shipment>, return_message: Option<String>, } impl From<APIResponse> for Result<Shipment, Error> { fn from(api_response: APIResponse) -> Self { match api_response { APIResponse { shipment: Some(shipment), return_message: None, } => Result::Ok(shipment), APIResponse { shipment: None, return_message: Some(error), } => Result::Err(Error::Server(error)), _ => Result::Err(Error::Response), } } } mod rfc3339 { use super::*; use serde::{de::Error, Deserialize, Deserializer}; pub fn deserialize<'de, D: Deserializer<'de>>(deserializer: D) -> Result<DateTime, D::Error> { let time: &str = Deserialize::deserialize(deserializer)?; Ok(DateTime::parse_from_rfc3339(time).map_err(D::Error::custom)?) } pub fn deserialize_option<'de, D: Deserializer<'de>>( deserializer: D, ) -> Result<Option<DateTime>, D::Error> { #[derive(Deserialize)] struct Wrapper(#[serde(deserialize_with = "deserialize")] DateTime); Ok(Option::deserialize(deserializer)?.map(|Wrapper(datetime)| datetime)) } }
<filename>aws-cpp-sdk-mediaconvert/source/model/Mp4Settings.cpp /** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #include <aws/mediaconvert/model/Mp4Settings.h> #include <aws/core/utils/json/JsonSerializer.h> #include <utility> using namespace Aws::Utils::Json; using namespace Aws::Utils; namespace Aws { namespace MediaConvert { namespace Model { Mp4Settings::Mp4Settings() : m_audioDuration(CmfcAudioDuration::NOT_SET), m_audioDurationHasBeenSet(false), m_cslgAtom(Mp4CslgAtom::NOT_SET), m_cslgAtomHasBeenSet(false), m_cttsVersion(0), m_cttsVersionHasBeenSet(false), m_freeSpaceBox(Mp4FreeSpaceBox::NOT_SET), m_freeSpaceBoxHasBeenSet(false), m_moovPlacement(Mp4MoovPlacement::NOT_SET), m_moovPlacementHasBeenSet(false), m_mp4MajorBrandHasBeenSet(false) { } Mp4Settings::Mp4Settings(JsonView jsonValue) : m_audioDuration(CmfcAudioDuration::NOT_SET), m_audioDurationHasBeenSet(false), m_cslgAtom(Mp4CslgAtom::NOT_SET), m_cslgAtomHasBeenSet(false), m_cttsVersion(0), m_cttsVersionHasBeenSet(false), m_freeSpaceBox(Mp4FreeSpaceBox::NOT_SET), m_freeSpaceBoxHasBeenSet(false), m_moovPlacement(Mp4MoovPlacement::NOT_SET), m_moovPlacementHasBeenSet(false), m_mp4MajorBrandHasBeenSet(false) { *this = jsonValue; } Mp4Settings& Mp4Settings::operator =(JsonView jsonValue) { if(jsonValue.ValueExists("audioDuration")) { m_audioDuration = CmfcAudioDurationMapper::GetCmfcAudioDurationForName(jsonValue.GetString("audioDuration")); m_audioDurationHasBeenSet = true; } if(jsonValue.ValueExists("cslgAtom")) { m_cslgAtom = Mp4CslgAtomMapper::GetMp4CslgAtomForName(jsonValue.GetString("cslgAtom")); m_cslgAtomHasBeenSet = true; } if(jsonValue.ValueExists("cttsVersion")) { m_cttsVersion = jsonValue.GetInteger("cttsVersion"); m_cttsVersionHasBeenSet = true; } if(jsonValue.ValueExists("freeSpaceBox")) { m_freeSpaceBox = Mp4FreeSpaceBoxMapper::GetMp4FreeSpaceBoxForName(jsonValue.GetString("freeSpaceBox")); m_freeSpaceBoxHasBeenSet = true; } if(jsonValue.ValueExists("moovPlacement")) { m_moovPlacement = Mp4MoovPlacementMapper::GetMp4MoovPlacementForName(jsonValue.GetString("moovPlacement")); m_moovPlacementHasBeenSet = true; } if(jsonValue.ValueExists("mp4MajorBrand")) { m_mp4MajorBrand = jsonValue.GetString("mp4MajorBrand"); m_mp4MajorBrandHasBeenSet = true; } return *this; } JsonValue Mp4Settings::Jsonize() const { JsonValue payload; if(m_audioDurationHasBeenSet) { payload.WithString("audioDuration", CmfcAudioDurationMapper::GetNameForCmfcAudioDuration(m_audioDuration)); } if(m_cslgAtomHasBeenSet) { payload.WithString("cslgAtom", Mp4CslgAtomMapper::GetNameForMp4CslgAtom(m_cslgAtom)); } if(m_cttsVersionHasBeenSet) { payload.WithInteger("cttsVersion", m_cttsVersion); } if(m_freeSpaceBoxHasBeenSet) { payload.WithString("freeSpaceBox", Mp4FreeSpaceBoxMapper::GetNameForMp4FreeSpaceBox(m_freeSpaceBox)); } if(m_moovPlacementHasBeenSet) { payload.WithString("moovPlacement", Mp4MoovPlacementMapper::GetNameForMp4MoovPlacement(m_moovPlacement)); } if(m_mp4MajorBrandHasBeenSet) { payload.WithString("mp4MajorBrand", m_mp4MajorBrand); } return payload; } } // namespace Model } // namespace MediaConvert } // namespace Aws
<reponame>LinkedModernismProject/web_code """Generic metaclass. XXX This is very much a work in progress. """ import types class MetaMethodWrapper: def __init__(self, func, inst): self.func = func self.inst = inst self.__name__ = self.func.__name__ def __call__(self, *args, **kw): return self.func(self.inst, *args, **kw) class MetaHelper: __methodwrapper__ = MetaMethodWrapper # For derived helpers to override def __helperinit__(self, formalclass): self.__formalclass__ = formalclass def __getattr__(self, name): # Invoked for any attr not in the instance's __dict__ try: raw = self.__formalclass__.__getattr__(name) except AttributeError: try: ga = self.__formalclass__.__getattr__('__usergetattr__') except (KeyError, AttributeError): raise AttributeError(name) return ga(self, name) if type(raw) != types.FunctionType: return raw return self.__methodwrapper__(raw, self) class MetaClass: """A generic metaclass. This can be subclassed to implement various kinds of meta-behavior. """ __helper__ = MetaHelper # For derived metaclasses to override __inited = 0 def __init__(self, name, bases, dict): try: ga = dict['__getattr__'] except KeyError: pass else: dict['__usergetattr__'] = ga del dict['__getattr__'] self.__name__ = name self.__bases__ = bases self.__realdict__ = dict self.__inited = 1 def __getattr__(self, name): try: return self.__realdict__[name] except KeyError: for base in self.__bases__: try: return base.__getattr__(name) except AttributeError: pass raise AttributeError(name) def __setattr__(self, name, value): if not self.__inited: self.__dict__[name] = value else: self.__realdict__[name] = value def __call__(self, *args, **kw): inst = self.__helper__() inst.__helperinit__(self) try: init = inst.__getattr__('__init__') except AttributeError: init = lambda: None init(*args, **kw) return inst Meta = MetaClass('Meta', (), {}) def _test(): class C(Meta): def __init__(self, *args): print("__init__, args =", args) def m1(self, x): print("m1(x=%r)" % (x,)) print(C) x = C() print(x) x.m1(12) class D(C): def __getattr__(self, name): if name[:2] == '__': raise AttributeError(name) return "getattr:%s" % name x = D() print(x.foo) print(x._foo) ## print x.__foo ## print x.__foo__ if __name__ == '__main__': _test()
import React from 'react' import { RiCloseLine } from 'react-icons/ri' import LogoImg from '../../images/logo.png' import Section from './section' import SelectDir from './selectDir' type Props = { focusIframe: () => void onClickClose: () => void } const Config = ({ focusIframe, onClickClose }: Props): JSX.Element => { const handleClickClose = () => { onClickClose() focusIframe() } const handleClickSelectDir = () => { window.api.openSelectDir() focusIframe() } const handleClickRemoveCache = () => { window.api.removeCache() focusIframe() } const handleClickRemoveCookie = () => { window.api.removeCookie() focusIframe() } const handleClickCheckUpdate = () => { window.api.checkUpdate() focusIframe() } return ( <div className="flex flex-col items-center px-7 py-8 fixed top-1/2 left-1/2 -transform-50 w-96 bg-white border border-gray-400 rounded-xl shadow-2xl"> <button className="fixed top-3 right-3 text-xl text-gray-800" onClick={handleClickClose} > <RiCloseLine /> </button> <img className="w-64 drag-none" src={LogoImg} alt="serizawa" /> <div className="w-full mt-6"> <SelectDir onClick={handleClickSelectDir} /> <Section title="キャッシュを削除" btnText="削除" btnBg="bg-red-400" btnHoverBg="bg-red-600" onClick={handleClickRemoveCache} /> <Section title="初期化(ログアウト)" btnText="初期化" btnBg="bg-red-400" btnHoverBg="bg-red-600" onClick={handleClickRemoveCookie} /> <Section title="更新を確認" btnText="確認" btnBg="bg-gray-600" btnHoverBg="bg-gray-800" onClick={handleClickCheckUpdate} /> </div> <span className="block mt-8 text-xs text-gray-700"> {`Developed by arrow2nd - v${process.env.VERSION}`} </span> </div> ) } export default Config
<reponame>anunez97/fergilnad-game-engine // Dummy AI #ifndef _DummyAI #define _DummyAI #include "BaseAI.h" // AI used purely for testing, will not do anything class DummyAI : public BaseAI { public: DummyAI() {}; DummyAI(const DummyAI& other) = delete; DummyAI& operator=(const DummyAI& other) = delete; ~DummyAI() {}; void Execute() {}; }; #endif _DummyAI
/** Builder enabling use of Java 8 SAMs */ public static class Builder { private CommitRollbackFunction commit; private CommitRollbackFunction rollback; private RecoveryFunction recovery; public Builder withCommit(CommitRollbackFunction commit){ this.commit = commit; return this; } public Builder withRollback(CommitRollbackFunction rollback){ this.rollback = rollback; return this; } public Builder withRecovery(RecoveryFunction recovery){ this.recovery = recovery; return this; } public CommitRollbackRecoveryCallback build(){ Objects.requireNonNull(commit, "Please call withCommit(...)"); Objects.requireNonNull(rollback, "Please call withRollback(...)"); //recovery is optional, since you can configure adapter to handle state internally return new CommitRollbackRecoveryCallback(){ private static final long serialVersionUID = 1L; @Override public void commit(String txid) throws Exception { commit.apply(txid); } @Override public void rollback(String txid) throws Exception { rollback.apply(txid); } @Override public String[] getTransactionsInNeedOfRecovery() { if(recovery == null){ return new String[0]; }else{ return recovery.getTransactionsInNeedOfRecovery(); } } }; } public static interface RecoveryFunction { String[] getTransactionsInNeedOfRecovery(); } public static interface CommitRollbackFunction { void apply(String txid) throws Exception; } }
package models import ( i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization" ) // FilterGroup type FilterGroup struct { // Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. additionalData map[string]interface{} // Filter clauses (conditions) of this group. All clauses in a group must be satisfied in order for the filter group to evaluate to true. clauses []FilterClauseable // Human-readable name of the filter group. name *string } // NewFilterGroup instantiates a new filterGroup and sets the default values. func NewFilterGroup()(*FilterGroup) { m := &FilterGroup{ } m.SetAdditionalData(make(map[string]interface{})); return m } // CreateFilterGroupFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value func CreateFilterGroupFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) { return NewFilterGroup(), nil } // GetAdditionalData gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. func (m *FilterGroup) GetAdditionalData()(map[string]interface{}) { if m == nil { return nil } else { return m.additionalData } } // GetClauses gets the clauses property value. Filter clauses (conditions) of this group. All clauses in a group must be satisfied in order for the filter group to evaluate to true. func (m *FilterGroup) GetClauses()([]FilterClauseable) { if m == nil { return nil } else { return m.clauses } } // GetFieldDeserializers the deserialization information for the current model func (m *FilterGroup) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) { res := make(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) res["clauses"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetCollectionOfObjectValues(CreateFilterClauseFromDiscriminatorValue) if err != nil { return err } if val != nil { res := make([]FilterClauseable, len(val)) for i, v := range val { res[i] = v.(FilterClauseable) } m.SetClauses(res) } return nil } res["name"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetStringValue() if err != nil { return err } if val != nil { m.SetName(val) } return nil } return res } // GetName gets the name property value. Human-readable name of the filter group. func (m *FilterGroup) GetName()(*string) { if m == nil { return nil } else { return m.name } } // Serialize serializes information the current object func (m *FilterGroup) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) { if m.GetClauses() != nil { cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetClauses())) for i, v := range m.GetClauses() { cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable) } err := writer.WriteCollectionOfObjectValues("clauses", cast) if err != nil { return err } } { err := writer.WriteStringValue("name", m.GetName()) if err != nil { return err } } { err := writer.WriteAdditionalData(m.GetAdditionalData()) if err != nil { return err } } return nil } // SetAdditionalData sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. func (m *FilterGroup) SetAdditionalData(value map[string]interface{})() { if m != nil { m.additionalData = value } } // SetClauses sets the clauses property value. Filter clauses (conditions) of this group. All clauses in a group must be satisfied in order for the filter group to evaluate to true. func (m *FilterGroup) SetClauses(value []FilterClauseable)() { if m != nil { m.clauses = value } } // SetName sets the name property value. Human-readable name of the filter group. func (m *FilterGroup) SetName(value *string)() { if m != nil { m.name = value } }
Evaluation of aluminium, manganese, copper and selenium effects on human islets amyloid polypeptide hormone aggregation. Islet amyloid formation causes destruction of insulin-producing beta-cells of the pancreas. The subsequent lack of insulin leads to increased blood and urine glucose. In this research, the fluorimetric assay was used to examine the effects of aluminium and some nutritionally essential trace elements including, manganese, copper and selenium on amyloid formation of human peptide of amylin under near-physiological circumstances. Results obtained from in vitro study showed that after 120 h incubation by shaker incubator in 37 degrees C, copper and selenium at 8 microM inhibited amylin 8 microM from amyloid fibril formation by 22.1 and 11.3%, respectively (p<0.05) while the similar values of either aluminium and manganese promoted the formation of beta-pleated sheet structure by 19.3 and 13.2% respectively (p<0.05). If islet amyloid is cytotoxic to beta-cells then copper and selenium may be able to protect these cells against degeneration in diabetic patients especially in type 2 diabetes mellitus.
Ben Zobrist is leading the hit parade even before he steps to the plate. Zobrist knocked in the Cubs' first run in Game 2 against the Giants Saturday, perhaps thanks to a new walk-up song created by his wife, Christian singer Julianna Zobrist — a cover of Elton John's "Bennie and the Jets," with a tweak of the lyrics to give her husband some personalized plate music. Julianna posted a video of herself singing from the stands at Wrigley Field on Facebook. "Who heard it? An appropriate walk-up for my man, appropriately dubbed Benny the Jet. He's an old soul who plays old school baseball," she wrote. Ben has also used his wife's song "Alive." Most players have several options for walk-up and warm-up music, and players have the option to add new songs at any time if the songs are deemed appropriate. Here are the primary songs from the most current Cubs list. Jake Arrieta: "Super Duper," Angela Albert Almora Jr.: "Hasta que se Seque Malecon," Jacob Forever Javier Baez: "Informer," Snow Kris Bryant: "Warm It Up," Kris Kross Trevor Cahill: "N 2 Gether Now," Limp Bizkit Aroldis Chapman: "Wake Up," Rage Against The Machine Coghlan, Chris: Stone Cold Steve Austin entrance song Willson Contreras: "Que Suenen Los Tambores," Victor Manuelle Carl Edwards Jr.: "Big Poppa" (clean version), Notorious B.I.G. Dexter Fowler: "Jumpman," Drake and Future Tim Federowicz: "Old Thing Back," The Notorious B.I.G. featuring Ja Rule Justin Grimm: "Take It Outside," Brantley Gilbert Jason Hammel: "Alive," Pearl Jam Kyle Hendricks: "Sweet Emotion," Aerosmith Jason Heyward: "This Girl," Kungs vs Cookin on 3 Burners Munenori Kawasaki: "Crazy Design, Carlitos Wey," El Teke Teke Tommy La Stella: "Oh What a Night," Four Seasons John Lackey: "Friends in Low Places," Garth Brooks Jon Lester: "Gonna Know We Were Here," Jason Aldean Miguel Montero: "La Mordidita," Ricky Martin Mike Montgomery: "The Show Goes On" (clean version), Lupe Fiasco Joe Nathan: "These Days," Foo Fighters Spencer Patton: "Long Hot Summer Days," Turnpike Troubadours Clayton Richard: "Bring Em Out," T.I. Anthony Rizzo: "Intoxicated (Dante Remix)," Martin Solveig & GTA Hector Rondon: "Tu Me Quemas," Chino y Nacho featuring Genta De Zona David Ross: "Young Forever," Jay Z featuring Mr. Hudson Addison Russell: "No Problem," Chance The Rapper featuring 2 Chainz and Lil Wayne Joe Smith: "My Kinda Party," Jason Aldean Jorge Soler: "The Power," Snap Pedro Strop: "Penelope," Ella Mi Dice Matthew Szczur: "X Ambassadors," Renegades Travis Wood: "How I Got To Be This Way," Justin Moore Rob Zastryny: "The Buzz," Hermitude featuring Big K.R.I.T., Mataya and Young Tapz Ben Zobrist: "Alive," Julianna Zobrist
“For all I know, his desire to work out a solution is quite sincere,” Mr. Putin continued. “I met him recently on the sidelines of the G-20 summit in Los Cabos, Mexico, where we had a chance to talk. And though we talked mostly about Syria, I could still take stock of my counterpart. My feeling is that he is a very honest man, and that he sincerely wants to make many good changes. But can he do it? Will they let him do it?” Mr. Putin mentioned the American military establishment and the State Department as obstacles to a compromise, and he said he faced similar challenges working with Russia’s own generals and career diplomats. Newsletter Sign Up Continue reading the main story Please verify you're not a robot by clicking the box. Invalid email address. Please re-enter. You must select a newsletter to subscribe to. Sign Up You will receive emails containing news content , updates and promotions from The New York Times. You may opt-out at any time. You agree to receive occasional updates and special offers for The New York Times's products and services. Thank you for subscribing. An error has occurred. Please try again later. View all New York Times newsletters. With a reminder of Mr. Romney’s remark about Russia, Mr. Putin was asked if he could work with a Romney administration. “Yes, we can,” he said. “We’ll work with whichever president gets elected by the American people. But our effort will only be as efficient as our partners will want it to be.” He added a sharp rebuke, accusing Mr. Romney of using inflamed language for political gain. That criticism might seem curious coming from Mr. Putin. His own campaign for office made use of some pointed anti-American talk, including criticism of the missile plan, claims that Secretary of State Hillary Rodham Clinton had sent “a signal” to prompt antigovernment demonstrations after a disputed parliamentary election, and accusations that the American ambassador, Michael A. McFaul, was meddling in Russian domestic affairs. “As for Mr. Romney’s position, we understand that this is to a certain extent motivated by the election race,” Mr. Putin said in the televised interview. “But I also think that he was obviously wrong, because such behavior on the international arena is the same as using nationalism and segregation as tools of U.S. domestic policy. It has the same effect on the international arena when a politician, a person who aspires to lead a nation, especially a superpower like the U.S., proclaims someone to be an enemy.” He then circled back to Russia’s concerns about the missile defense program. “Our American partners keep telling us, ‘This is not directed against you.’ But what happens if Mr. Romney, who believes us to be America’s No. 1 foe, gets elected as president of the United States? In that case, the system will definitely be directed against Russia, as its infrastructure looks to be configured exactly for this purpose. “And you also have to think about its strategic character — it’s built not for a year or even a decade — and the chances that a man with Romney’s views could come to power are quite high. So what are we supposed to do to ensure our security?”
// RunRsh starts a remote shell session on the server func RunRsh(options *kubecmd.ExecOptions, f *clientcmd.Factory, cmd *cobra.Command, args []string) error { if len(args) != 1 { return cmdutil.UsageError(cmd, "rsh requires a single POD to connect to") } options.PodName = args[0] _, client, err := f.Clients() if err != nil { return err } options.Client = client namespace, _, err := f.DefaultNamespace() if err != nil { return nil } options.Namespace = namespace config, err := f.ClientConfig() if err != nil { return err } options.Config = config if err := options.Validate(); err != nil { return err } return options.Run() }
// Given an address (typically with 0 last octet), search for a matching address on our interfaces, and return its last octet // Used for checking/defaulting "myaddr" parameter of config entries. // If no matching interface is found, 0 is returned. static int last_addr_octet_on_net(struct sockaddr *sa) { struct sockaddr_in *mysin = (struct sockaddr_in *)sa; struct sockaddr_in6 *mysin6 = (struct sockaddr_in6 *)sa; struct ifaddrs *ifp, *ifp0; int octet = 0; char ipaddr[INET6_ADDRSTRLEN]; if (getifaddrs(&ifp0) < 0) { perror("getifaddrs"); return 0; } if (debug) fprintf(stderr,"Looking for interface matching subnet address %s\n", ip46_ntoa(sa, ipaddr, sizeof(ipaddr))); ifp = ifp0; while (ifp && (octet == 0)) { if ((sa->sa_family == AF_INET) && (ifp->ifa_addr->sa_family == AF_INET)) { struct sockaddr_in *sin = (struct sockaddr_in *)ifp->ifa_addr; u_int mask = ntohl(((struct sockaddr_in *)(ifp->ifa_netmask))->sin_addr.s_addr); if ((ntohl(mysin->sin_addr.s_addr) & mask) == (ntohl(sin->sin_addr.s_addr) & mask)) { if (debug) fprintf(stderr,"Found matching IP interface %s with address %s\n", ifp->ifa_name, ip46_ntoa(ifp->ifa_addr, ipaddr, sizeof(ipaddr))); octet = ntohl(sin->sin_addr.s_addr) & 0xff; } } else if ((sa->sa_family == AF_INET6) && (ifp->ifa_addr->sa_family == AF_INET6)) { struct sockaddr_in6 *sin6 = (struct sockaddr_in6 *)ifp->ifa_addr; if (memcmp(&mysin6->sin6_addr, &sin6->sin6_addr, sizeof(struct in6_addr)-1) == 0) { if (debug) fprintf(stderr,"Found matching IPv6 interface %s with address %s\n", ifp->ifa_name, ip46_ntoa(ifp->ifa_addr, ipaddr, sizeof(ipaddr))); octet = sin6->sin6_addr.s6_addr[15]; } } ifp = ifp->ifa_next; } freeifaddrs(ifp0); return octet; }
def _transform_standardize(self, x: pd.DataFrame, y: pd.Series) -> [pd.DataFrame, pd.Series]: assert self.settings['standardize'], "Standardize settings not found." features = self.settings['standardize']['input']['features'] means = self.settings['standardize']['input']['means'] stds = self.settings['standardize']['input']['stds'] if len(x.keys()) < len(features): indices = [[i for i, j in enumerate(features) if j == k][0] for k in x.keys()] features = [features[i] for i in indices] means = [means[i] for i in indices] stds = [stds[i] for i in indices] x[features] = (x[features] - means) / stds if self.mode == 'regression': y = (y - self.settings['standardize']['output']['mean']) / self.settings['standardize']['output']['std'] return x, y
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ /* */ /* This file is part of the class library */ /* SoPlex --- the Sequential object-oriented simPlex. */ /* */ /* Copyright (C) 1996-2016 Konrad-Zuse-Zentrum */ /* fuer Informationstechnik Berlin */ /* */ /* SoPlex is distributed under the terms of the ZIB Academic Licence. */ /* */ /* You should have received a copy of the ZIB Academic License */ /* along with SoPlex; see the file COPYING. If not email to [email protected]. */ /* */ /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ /**@file slufactor.h * @brief Implementation of Sparse Linear Solver. */ #ifndef _SLUFACTOR_H_ #define _SLUFACTOR_H_ #include <assert.h> #include "spxdefines.h" #include "timerfactory.h" #include "dvector.h" #include "slinsolver.h" #include "clufactor.h" namespace soplex { /// maximum nr. of factorization updates allowed before refactorization. #define MAXUPDATES 1000 /**@brief Implementation of Sparse Linear Solver. * @ingroup Algo * * This class implements a #SLinSolver interface by using the sparse LU * factorization implementet in #CLUFactor. */ class SLUFactor : public SLinSolver, protected CLUFactor { public: //-------------------------------- /**@name Types */ //@{ /// Specifies how to perform \ref soplex::SLUFactor::change "change" method. enum UpdateType { ETA = 0, ///< FOREST_TOMLIN ///< }; /// for convenience typedef SLinSolver::Status Status; //@} private: //-------------------------------- /**@name Private data */ //@{ DVector vec; ///< Temporary vector SSVector ssvec; ///< Temporary semi-sparse vector //@} protected: //-------------------------------- /**@name Protected data */ //@{ bool usetup; ///< TRUE iff update vector has been setup UpdateType uptype; ///< the current \ref soplex::SLUFactor::UpdateType "UpdateType". SSVector eta; ///< SSVector forest; ///< ? Update vector set up by solveRight4update() and solve2right4update() Real lastThreshold; ///< pivoting threshold of last factorization //@} //-------------------------------- /**@name Control Parameters */ //@{ /// minimum threshold to use. Real minThreshold; /// minimum stability to achieve by setting threshold. Real minStability; /// |x| < epsililon is considered to be 0. Real epsilon; /// Time spent in solves Timer* solveTime; Timer::TYPE timerType; /// Number of solves int solveCount; //@} protected: //-------------------------------- /**@name Protected helpers */ //@{ /// void freeAll(); /// void changeEta(int idx, SSVector& eta); //@} public: //-------------------------------- /**@name Update type */ //@{ /// returns the current update type uptype. UpdateType utype() const { return uptype; } /// sets update type. /** The new UpdateType becomes valid only after the next call to method load(). */ void setUtype(UpdateType tp) { uptype = tp; } /// sets minimum Markowitz threshold. void setMarkowitz(Real m) { if( m < 0.01 ) m = 0.01; if( m > 0.99 ) m = 0.99; minThreshold = m; lastThreshold = m; } /// returns Markowitz threshold. Real markowitz() { return lastThreshold; } //@} //-------------------------------- /**@name Derived from SLinSolver See documentation of \ref soplex::SLinSolver "SLinSolver" for a documentation of these methods. */ //@{ /// void clear(); /// int dim() const { return thedim; } /// int memory() const { return nzCnt + l.start[l.firstUnused]; } /// const char* getName() const { return (uptype == SLUFactor::ETA) ? "SLU-Eta" : "SLU-Forest-Tomlin"; } /// Status status() const { return Status(stat); } /// Real stability() const; /// std::string statistics() const; /// Status load(const SVector* vec[], int dim); //@} public: //-------------------------------- /**@name Solve */ //@{ /// Solves \f$Ax=b\f$. void solveRight (Vector& x, const Vector& b); /// Solves \f$Ax=b\f$. void solveRight (SSVector& x, const SVector& b); /// Solves \f$Ax=b\f$. void solveRight4update(SSVector& x, const SVector& b); /// Solves \f$Ax=b\f$ and \f$Ay=d\f$. void solve2right4update(SSVector& x, Vector& y, const SVector& b, SSVector& d); /// Sparse version of solving two systems of equations void solve2right4update(SSVector& x, SSVector& y, const SVector& b, SSVector& d); /// Solves \f$Ax=b\f$, \f$Ay=d\f$ and \f$Az=e\f$. void solve3right4update(SSVector& x, Vector& y, Vector& z, const SVector& b, SSVector& d, SSVector& e); /// sparse version of solving three systems of equations void solve3right4update(SSVector& x, SSVector& y, SSVector& z, const SVector& b, SSVector& d, SSVector& e); /// sparse version of solving one system of equations with transposed basis matrix void solveLeft(Vector& x, const Vector& b); /// Solves \f$Ax=b\f$. void solveLeft(SSVector& x, const SVector& b); /// Solves \f$Ax=b\f$ and \f$Ay=d\f$. void solveLeft(SSVector& x, Vector& y, const SVector& b, SSVector& d); /// sparse version of solving two systems of equations with transposed basis matrix void solveLeft(SSVector& x, SSVector& two, const SVector& b, SSVector& rhs2); /// Solves \f$Ax=b\f$, \f$Ay=d\f$ and \f$Az=e\f$. void solveLeft(SSVector& x, Vector& y, Vector& z, const SVector& b, SSVector& d, SSVector& e); /// sparse version of solving three systems of equations with transposed basis matrix void solveLeft(SSVector& x, SSVector& y, SSVector& z, const SVector& b, SSVector& d, SSVector& e); /// Status change(int idx, const SVector& subst, const SSVector* eta = 0); //@} //-------------------------------- /**@name Miscellaneous */ //@{ /// time spent in factorizations Real getFactorTime() const { return factorTime->time(); } /// reset FactorTime void resetFactorTime() { factorTime->reset(); } /// number of factorizations performed int getFactorCount() const { return factorCount; } /// time spent in solves Real getSolveTime() const { return solveTime->time(); } /// reset SolveTime void resetSolveTime() { solveTime->reset(); } /// number of solves performed int getSolveCount() const { return solveCount; } /// reset timers and counters void resetCounters() { factorTime->reset(); solveTime->reset(); factorCount = 0; solveCount = 0; } /// prints the LU factorization to stdout. void dump() const; /// consistency check. bool isConsistent() const; //@} //------------------------------------ /**@name Constructors / Destructors */ //@{ /// default constructor. SLUFactor(); /// assignment operator. SLUFactor& operator=(const SLUFactor& old); /// copy constructor. SLUFactor(const SLUFactor& old); /// destructor. virtual ~SLUFactor(); /// clone function for polymorphism inline virtual SLinSolver* clone() const { return new SLUFactor(*this); } //@} private: //------------------------------------ /**@name Private helpers */ //@{ /// used to implement the assignment operator void assign(const SLUFactor& old); //@} }; } // namespace soplex #endif // _SLUFACTOR_H_
<gh_stars>1-10 import * as AWSXRay from "aws-xray-sdk-core"; declare module "fastify" { interface FastifyInstance { /** * AWSXRay custom instance */ AWSXRay?: any; } interface FastifyRequest { /** * Request XRay Segment */ segment?: AWSXRay.Segment; } } declare function fastifyXray(): void; declare namespace fastifyXray { interface FastifyXrayOptions { defaultName: string; AWSXRay?: any; } } export = fastifyXray;
MUMBAI/NEW DELHI -- India is tapping Japanese bullet train technology to build a 500km high-speed rail line linking a western manufacturing hub to Mumbai, an endeavor Prime Minister Narendra Modi hopes will spur similar upgrades across the country. The roughly 2 trillion yen ($17.5 billion) project will connect Ahmedabad, the largest city in Modi's home state of Gujarat, with India's largest city and financial center. The railway, to be served by 12 stations, will shorten the trip dramatically, from seven hours now to just two. Modi, Abe celebrate Japanese Prime Minister Shinzo Abe was standing beside Modi at the groundbreaking ceremony for the project in Gujarat in September. The project has shown that Japan is a powerful partner of India, Modi stressed. Abe responded by saying "this is a historic day." At the venue where the ceremony was held, a Japanese sign declaring the beginning of a new era in railway travel was erected. The day appeared to symbolize a break from the antiquated rail network dating back to the time of British rule. Japan is eager to ensure that its companies get to build the railway, providing India a low-interest yen loan to cover roughly 80% of the project cost. The Japanese team that includes East Japan Railway, Kawasaki Heavy Industries and Hitachi is expected to win the order. Just prior to talks between Modi and Abe, the state of Maharashtra approved the terminal station for the bullet train in Mumbai's Bandra Kurla complex. New business hub in Mumbai? The complex is far from central Mumbai, home to many historic heritage sites. But it is well connected by metro transit systems and is close to an international airport. More and more financial institutions and foreign companies are setting up shop there. The U.S. company WeWork, which provides shared office spaces, opened its first Mumbai location at the complex in early September. A local marketing staffer expressed hope that the bullet train will bring even more companies to the area. Local startup Clap Global, previously of southern Mumbai, moved its office to the complex. Conglomerate Reliance Industries is building a multipurpose facility that will house office space, residences and a cinema, and many other projects are underway. The complex, conveniently situated for both air and bullet train travel, is drawing much attention, so much so that it could replace Mumbai's current main business district in the city's south. More projects Meanwhile, the Modi government aims to bring bullet train service to other parts of the country as well. Potential routes include a link between Delhi and Kolkata in the east, Mumbai to the city of Chennai in the southeast, and a Bangalore-Chennai link in the south. Japan is bringing together public- and private-sector resources in an effort to win more orders, hoping that the success of the Mumbai-Ahmedabad project will fuel momentum. Over many years, India has placed a railway technology officer at its embassies in the U.K., Germany and France. It recently decided to assign such an officer to its embassy in Tokyo. But competition is intensifying. The Chinese are conducting feasibility studies on multiple potential routes in India. The German government has released a statement highlighting German companies' interest in building railways in India. Berlin decided in June to fund a feasibility study. India boasts one of the world's largest railway networks, totaling over 60,000km in length. But aging equipment has led to many accidents. The Modi government seeks to regain trust in the country's rail system by building a bullet train network that incorporates Japanese technology. Funding problem Besides high-speed rail lines, India is moving forward with various infrastructure projects, including the 1,500km Delhi-Mumbai Industrial Corridor. In addition to laying a freight rail, the project envisions building industrial parks and residential buildings along the route. The country also has similar plans for a corridor between Chennai and Bangalore. With a sever power shortage, the country is launching renewable energy projects to increase generation capacity. Local airports and highways are also being planned. The key challenge to India's infrastructure ambitions is finding the necessary funds. The Ahmedabad-Mumbai railway can tap Japanese funds, but the Indian government, saddled with a huge fiscal deficit, will not be able to foot the bill for many projects.
<filename>tests/JarJarDiff/new/ModifiedPackage/ModifiedDeclarationInterface.java package ModifiedPackage; public interface ModifiedDeclarationInterface extends Cloneable { }
/* * Move a char into a scene. */ void actor_to_scene( PLAYER *ch, SCENE *pSceneIndex ) { PROP *prop; if ( pSceneIndex == NULL ) { wtf_logf( "Char_to_scene: NULL.", 0 ); pSceneIndex = get_scene( SCENE_VNUM_DEATH ); } ch->in_scene = pSceneIndex; ch->next_in_scene = pSceneIndex->people; pSceneIndex->people = ch; if ( !NPC(ch) ) ++ch->in_scene->zone->nplayer; for ( prop = ch->carrying; prop != NULL; prop = prop->next_content ) { if ( prop->item_type == ITEM_LIGHT && IS_LIT(prop) ) ++ch->in_scene->light; } return; }
def check_weasyprint( self ): from weasyprint import HTML, CSS from weasyprint.fonts import FontConfiguration font_config = FontConfiguration() from weasyprint import default_url_fetcher files_loaded = [] def log_url_fetcher(url): files_loaded.append( url ) return default_url_fetcher(url) base_dir = os.path.join( ABSPATH, "..", "resources" ) html = HTML(string=''' <h1>The title</h1> <div class="blue-text">blauer Text</div> <span>mdi-check-outline: </span><span><i class="mdi mdi-check-outline"></></span><span> Oder?</span> ''') css = CSS(string=''' @import url(mpdf_styles.css); h1 { font-family: Arial,"Helvetica Neue",Helvetica,sans-serif } ''', font_config=font_config, url_fetcher=log_url_fetcher, base_url=base_dir ) pdf_file_name = os.path.join( ABSPATH, 'files', 'weasyprint.pdf') html.write_pdf( pdf_file_name, stylesheets=[css], font_config=font_config) self.assertGreaterEqual(len(files_loaded), 5, "Anzahl nicht >= 5") response = self.app.get( "api/dummy/pdf", query_string={ "name" : "test-4" } ) self.assertEqual(response.status_code, 200, "Status nicht 200") self.check_pdf_data( response.json["data"], contents=2, pages=3, intern_check=True )
from math import log, factorial import re from .adjacency_graphs import ADJACENCY_GRAPHS from decimal import Decimal from src.probabilistic_models.probabilistic_model import * def calc_average_degree(graph): average = 0 for key, neighbors in graph.items(): average += len([n for n in neighbors if n]) average /= float(len(graph.items())) return average BRUTEFORCE_CARDINALITY = 10 MIN_GUESSES_BEFORE_GROWING_SEQUENCE = 10000 MIN_SUBMATCH_GUESSES_SINGLE_CHAR = 10 MIN_SUBMATCH_GUESSES_MULTI_CHAR = 50 MIN_YEAR_SPACE = 20 REFERENCE_YEAR = 2017 def nCk(n, k): """http://blog.plover.com/math/choose.html""" if k > n: return 0 if k == 0: return 1 r = 1 for d in range(1, k + 1): r *= n r /= d n -= 1 return r # ------------------------------------------------------------------------------ # search --- most guessable match sequence ------------------------------------- # ------------------------------------------------------------------------------ # # takes a sequence of overlapping matches, returns the non-overlapping sequence with # minimum guesses. the following is a O(l_max * (n + m)) dynamic programming algorithm # for a length-n password with m candidate matches. l_max is the maximum optimal # sequence length spanning each prefix of the password. In practice it rarely exceeds 5 and the # search terminates rapidly. # # the optimal "minimum guesses" sequence is here defined to be the sequence that # minimizes the following function: # # g = l! * Product(m.guesses for m in sequence) + D^(l - 1) # # where l is the length of the sequence. # # the factorial term is the number of ways to order l patterns. # # the D^(l-1) term is another length penalty, roughly capturing the idea that an # attacker will try lower-length sequences first before trying length-l sequences. # # for example, consider a sequence that is date-repeat-dictionary. # - an attacker would need to try other date-repeat-dictionary combinations, # hence the product term. # - an attacker would need to try repeat-date-dictionary, dictionary-repeat-date, # ..., hence the factorial term. # - an attacker would also likely try length-1 (dictionary) and length-2 (dictionary-date) # sequences before length-3. assuming at minimum D guesses per pattern type, # D^(l-1) approximates Sum(D^i for i in [1..l-1] # # ------------------------------------------------------------------------------ def most_guessable_match_sequence(password, matches, _exclude_additive=False): n = len(password) # partition matches into sublists according to ending index j matches_by_j = [[] for _ in range(n)] try: for m in matches: matches_by_j[m['j']].append(m) except TypeError: pass # small detail: for deterministic output, sort each sublist by i. for lst in matches_by_j: lst.sort(key=lambda m1: m1['i']) optimal = { # optimal.m[k][l] holds final match in the best length-l match sequence # covering the password prefix up to k, inclusive. # if there is no length-l sequence that scores better (fewer guesses) # than a shorter match sequence spanning the same prefix, # optimal.m[k][l] is undefined. 'm': [{} for _ in range(n)], # same structure as optimal.m -- holds the product term Prod(m.guesses # for m in sequence). optimal.pi allows for fast (non-looping) updates # to the minimization function. 'pi': [{} for _ in range(n)], # same structure as optimal.m -- holds the overall metric. 'g': [{} for _ in range(n)], } # helper: considers whether a length-l sequence ending at match m is better # (fewer guesses) than previously encountered sequences, updating state if # so. def update(m, l): k = m['j'] pi = estimate_guesses(m, password) if l > 1: # we're considering a length-l sequence ending with match m: # obtain the product term in the minimization function by # multiplying m's guesses by the product of the length-(l-1) # sequence ending just before m, at m.i - 1. pi = pi * Decimal(optimal['pi'][m['i'] - 1][l - 1]) # calculate the minimization func g = factorial(l) * pi if not _exclude_additive: g += MIN_GUESSES_BEFORE_GROWING_SEQUENCE ** (l - 1) # update state if new best. # first see if any competing sequences covering this prefix, with l or # fewer matches, fare better than this sequence. if so, skip it and # return. for competing_l, competing_g in optimal['g'][k].items(): if competing_l > l: continue if competing_g <= g: return # this sequence might be part of the final optimal sequence. optimal['g'][k][l] = g optimal['m'][k][l] = m optimal['pi'][k][l] = pi # helper: evaluate bruteforce matches ending at k. def bruteforce_update(k): # see if a single bruteforce match spanning the k-prefix is optimal. m = make_bruteforce_match(0, k) update(m, 1) for i in range(1, k + 1): # generate k bruteforce matches, spanning from (i=1, j=k) up to # (i=k, j=k). see if adding these new matches to any of the # sequences in optimal[i-1] leads to new bests. m = make_bruteforce_match(i, k) for l, last_m in optimal['m'][i - 1].items(): l = int(l) # corner: an optimal sequence will never have two adjacent # bruteforce matches. it is strictly better to have a single # bruteforce match spanning the same region: same contribution # to the guess product with a lower length. # --> safe to skip those cases. if last_m.get('pattern', False) == 'bruteforce': continue # try adding m to this length-l sequence. update(m, l + 1) # helper: make bruteforce match objects spanning i to j, inclusive. def make_bruteforce_match(i, j): return { 'pattern': 'bruteforce', 'token': password[i:j + 1], 'i': i, 'j': j, } # helper: step backwards through optimal.m starting at the end, # constructing the final optimal match sequence. def unwind(n): optimal_match_sequence = [] k = n - 1 # find the final best sequence length and score l = None g = float('inf') for candidate_l, candidate_g in optimal['g'][k].items(): if candidate_g < g: l = candidate_l g = candidate_g while k >= 0: m = optimal['m'][k][l] optimal_match_sequence.insert(0, m) k = m['i'] - 1 l -= 1 return optimal_match_sequence for k in range(n): for m in matches_by_j[k]: if m['i'] > 0: for l in optimal['m'][m['i'] - 1]: l = int(l) update(m, l + 1) else: update(m, 1) bruteforce_update(k) optimal_match_sequence = unwind(n) optimal_l = len(optimal_match_sequence) # corner: empty password if len(password) == 0: guesses = 1 else: guesses = optimal['g'][n - 1][optimal_l] probabilistic_guesses = probabilistic_model_guesses(password) if probabilistic_guesses <= guesses: guesses = probabilistic_guesses optimal_match_sequence = {'pattern':'probabilistic model'} # final result object return { 'password': password, 'guesses': guesses, 'guesses_log10': log(guesses, 10), 'sequence': optimal_match_sequence, } def estimate_guesses(match, password): if match.get('guesses', False): return Decimal(match['guesses']) min_guesses = 1 if len(match['token']) < len(password): if len(match['token']) == 1: min_guesses = MIN_SUBMATCH_GUESSES_SINGLE_CHAR else: min_guesses = MIN_SUBMATCH_GUESSES_MULTI_CHAR estimation_functions = { 'bruteforce': bruteforce_guesses, 'dictionary': dictionary_guesses, 'spatial': spatial_guesses, 'repeat': repeat_guesses, 'sequence': sequence_guesses, 'alternate_sequence': alternate_sequence_guesses, 'regex': regex_guesses, 'date': date_guesses, 'probabilistic_model': probabilistic_model_guesses, } guesses = estimation_functions[match['pattern']](match) match['guesses'] = max(guesses, min_guesses) match['guesses_log10'] = log(match['guesses'], 10) return Decimal(match['guesses']) def bruteforce_guesses(match): guesses = BRUTEFORCE_CARDINALITY ** len(match['token']) # small detail: make bruteforce matches at minimum one guess bigger than # smallest allowed submatch guesses, such that non-bruteforce submatches # over the same [i..j] take precedence. if len(match['token']) == 1: min_guesses = MIN_SUBMATCH_GUESSES_SINGLE_CHAR + 1 else: min_guesses = MIN_SUBMATCH_GUESSES_MULTI_CHAR + 1 return max(guesses, min_guesses) def dictionary_guesses(match): # keep these as properties for display purposes match['base_guesses'] = match['rank'] match['uppercase_variations'] = uppercase_variations(match) match['l33t_variations'] = l33t_variations(match) reversed_variations = match.get('reversed', False) and 2 or 1 return match['base_guesses'] * match['uppercase_variations'] * \ match['l33t_variations'] * reversed_variations def repeat_guesses(match): return match['base_guesses'] * Decimal(match['repeat_count']) def sequence_guesses(match): first_chr = match['token'][:1] # lower guesses for obvious starting points if first_chr in ['a', 'A', 'z', 'Z', '0', '1', '9']: base_guesses = 4 else: if re.compile(r'\d').match(first_chr): base_guesses = 10 # digits else: # could give a higher base for uppercase, # assigning 26 to both upper and lower sequences is more # conservative. base_guesses = 26 if not match['ascending']: base_guesses *= 2 return base_guesses * len(match['token']) def alternate_sequence_guesses(match): m1 = { 'token': match['sequence_1'], 'ascending': match['ascending_1'] } m2 = { 'token': match['sequence_2'], 'ascending': match['ascending_2'] } return sequence_guesses(m1) + sequence_guesses(m2) def regex_guesses(match): char_class_bases = { 'alpha_lower': 26, 'alpha_upper': 26, 'alpha': 52, 'alphanumeric': 62, 'digits': 10, 'symbols': 33, } if match['regex_name'] in char_class_bases: return char_class_bases[match['regex_name']] ** len(match['token']) elif match['regex_name'] == 'recent_year': # conservative estimate of year space: num years from REFERENCE_YEAR. # if year is close to REFERENCE_YEAR, estimate a year space of # MIN_YEAR_SPACE. year_space = abs(int(match['regex_match'].group(0)) - REFERENCE_YEAR) year_space = max(year_space, MIN_YEAR_SPACE) return year_space def date_guesses(match): year_space = max(abs(match['year'] - REFERENCE_YEAR), MIN_YEAR_SPACE) guesses = year_space * 365 if match.get('separator', False): guesses *= 4 return guesses KEYBOARD_AVERAGE_DEGREE = calc_average_degree(ADJACENCY_GRAPHS['qwerty']) # slightly different for keypad/mac keypad, but close enough KEYPAD_AVERAGE_DEGREE = calc_average_degree(ADJACENCY_GRAPHS['keypad']) KEYBOARD_STARTING_POSITIONS = len(ADJACENCY_GRAPHS['qwerty'].keys()) KEYPAD_STARTING_POSITIONS = len(ADJACENCY_GRAPHS['keypad'].keys()) def spatial_guesses(match): if match['graph'] in ['qwerty', 'dvorak']: s = KEYBOARD_STARTING_POSITIONS d = KEYBOARD_AVERAGE_DEGREE else: s = KEYPAD_STARTING_POSITIONS d = KEYPAD_AVERAGE_DEGREE guesses = 0 L = len(match['token']) t = match['turns'] # estimate the number of possible patterns w/ length L or less with t turns # or less. for i in range(2, L + 1): possible_turns = min(t, i - 1) + 1 for j in range(1, possible_turns): guesses += nCk(i - 1, j - 1) * s * pow(d, j) # add extra guesses for shifted keys. (% instead of 5, A instead of a.) # math is similar to extra guesses of l33t substitutions in dictionary # matches. if match['shifted_count']: S = match['shifted_count'] U = len(match['token']) - match['shifted_count'] # unshifted count if S == 0 or U == 0: guesses *= 2 else: shifted_variations = 0 for i in range(1, min(S, U) + 1): shifted_variations += nCk(S + U, i) guesses *= shifted_variations return guesses START_UPPER = re.compile(r'^[A-Z][^A-Z]+$') END_UPPER = re.compile(r'^[^A-Z]+[A-Z]$') ALL_UPPER = re.compile(r'^[^a-z]+$') ALL_LOWER = re.compile(r'^[^A-Z]+$') def uppercase_variations(match): word = match['token'] if ALL_LOWER.match(word) or word.lower() == word: return 1 for regex in [START_UPPER, END_UPPER, ALL_UPPER]: if regex.match(word): return 2 U = sum(1 for c in word if c.isupper()) L = sum(1 for c in word if c.islower()) variations = 0 for i in range(1, min(U, L) + 1): variations += nCk(U + L, i) return variations def l33t_variations(match): if not match.get('l33t', False): return 1 variations = 1 for subbed, unsubbed in match['sub'].items(): # lower-case match.token before calculating: capitalization shouldn't # affect l33t calc. chrs = list(match['token'].lower()) S = sum(1 for chr in chrs if chr == subbed) U = sum(1 for chr in chrs if chr == unsubbed) if S == 0 or U == 0: # for this sub, password is either fully subbed (444) or fully # unsubbed (aaa) treat that as doubling the space (attacker needs # to try fully subbed chars in addition to unsubbed.) variations *= 2 else: # this case is similar to capitalization: # with aa44a, U = 3, S = 2, attacker needs to try unsubbed + one # sub + two subs p = min(U, S) possibilities = 0 for i in range(1, p + 1): possibilities += nCk(U + S, i) variations *= possibilities return variations
def makeTissueMask(img): hsv = cv2.cvtColor(img, cv2.COLOR_RGB2HSV) lower_red = np.array([120,20,180]) upper_red = np.array([190,220,255]) tissue_mask = cv2.inRange(hsv, lower_red, upper_red) tissue_mask = cv2.dilate(tissue_mask, None, iterations=2) tissue_mask = cv2.morphologyEx(tissue_mask, cv2.MORPH_CLOSE, None) tissue_mask = cv2.medianBlur(tissue_mask, 21) return tissue_mask
import sys import math def main(args): food_choice = [0, 1, 2, 0, 2, 1, 0] a, b, c = map(int, input().split()) res = 0 for d in range(7): cnt = 0 ar = [a, b, c] for i in range(d, 7): v = food_choice[i] if not ar[v]: break cnt += 1 ar[v] -= 1 f = min(ar[0] // 3, min(ar[1], ar[2]) // 2) ar[0], ar[1], ar[2] = ar[0] - 3 * f, ar[1] - 2 * f, ar[2] - 2 * f cnt += (f * 7) for v in food_choice: if not ar[v]: break cnt += 1 ar[v] -= 1 res = max(res, cnt) print(res) if __name__ == '__main__': sys.exit(main(sys.argv))
/** * This is the fragment for the Home page */ public class FragmentHome extends Fragment { Context context; private View root; MaterialCardView myStatusCard; ConstraintLayout heading; TextView scanningTv; TextView headlingTv; RadioGroup radioGroup; RadioButton radioPositiveBtn; RadioButton radioNegtiveBtn; Button reportButton; Button refresh_btn; public static TextView numberOfHitsTv; public static TextView riskLevelTv; Handler handler; Map<Integer, String> riskLevelMap; SharedPreferences sharedPreferences; private static final String TAG = "fragment home"; private static final String SETTINGS = "settings"; private static final String IS_APP_DISABLED = "is_app_disabled"; public static final int UPLOAD_INTERVAL_IN_DAYS = 7; public static final boolean FLEXIBLE_MY_STATUS_ENABLED = false; @Override public void onAttach(Context context) { super.onAttach(context); this.context = context; this.handler = new DownloadHandler(context, TAG); } @Nullable @Override public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { this.context = getActivity(); root= inflater.inflate(R.layout.fragment_home, container, false); initRiskLevelMap(); initView(root); return root; } @Override public void onResume() { super.onResume(); refreshDashboard(); refreshIsScanning(); } void initView(View root){ myStatusCard = root.findViewById(R.id.card1); heading = root.findViewById(R.id.Heading); headlingTv = root.findViewById(R.id.heading_tv); refresh_btn = (Button) root.findViewById(R.id.refreshButton); radioGroup = (RadioGroup) root.findViewById(R.id.radioGroup); radioPositiveBtn = (RadioButton) root.findViewById(R.id.radioPositive); radioNegtiveBtn = root.findViewById(R.id.radioNegative); reportButton = (Button) root.findViewById(R.id.reportResults); numberOfHitsTv = root.findViewById(R.id.number_of_hits_tv); riskLevelTv = root.findViewById(R.id.risk_level_tv); scanningTv = root.findViewById(R.id.scanning_tv); //init dashboard sharedPreferences = context.getSharedPreferences("my staus choice", MODE_PRIVATE); boolean choice = sharedPreferences.getBoolean("choice", false); radioNegtiveBtn.setChecked(!choice); radioPositiveBtn.setChecked(choice); if(choice) reportButton.setVisibility(View.VISIBLE); else reportButton.setVisibility(View.INVISIBLE); // Check is manually upload is needed if(FLEXIBLE_MY_STATUS_ENABLED && !checkIfAllowsManualUpload()){ // disable two radio buttons radioNegtiveBtn.setEnabled(false); radioPositiveBtn.setEnabled(false); //set positive button checked radioPositiveBtn.setChecked(true); radioNegtiveBtn.setChecked(false); // gray background for my status panel and change heading text myStatusCard.setBackgroundColor(getResources().getColor(R.color.disable_gray)); headlingTv.setText("Result Reported"); reportButton.setVisibility(View.GONE); // alert user when click my status panel myStatusCard.setOnClickListener((View v) -> Toast.makeText(context, "Your result has been reported", Toast.LENGTH_SHORT).show()); } //EventListener for radio button radioGroup.setOnCheckedChangeListener((RadioGroup group, int checkedId) -> { sharedPreferences = context.getSharedPreferences("my staus choice", MODE_PRIVATE); SharedPreferences.Editor editor = sharedPreferences.edit(); if(checkedId == radioPositiveBtn.getId()){ reportButton.setVisibility(View.VISIBLE); editor.putBoolean("choice", true); } else{ reportButton.setVisibility(View.GONE); editor.putBoolean("choice", false); } editor.commit(); }); //EventListener for report your results button reportButton.setOnClickListener((View v) -> { Task<Boolean> isRunningTask = ContactShield.getContactShieldEngine(context).isContactShieldRunning(); isRunningTask.addOnSuccessListener(aBoolean -> { if(aBoolean){ Intent intent = new Intent(context, ReportTestResultPreActivity.class); startActivity(intent); }else{ Toast.makeText(context, "Please enable the app before reporting", Toast.LENGTH_SHORT).show(); } }); isRunningTask.addOnFailureListener(e -> { Toast.makeText(context, "Please enable the app before reporting", Toast.LENGTH_SHORT).show(); }); }); //EventListener for refresh button refresh_btn.setOnClickListener((View v) -> new GeneratePKZip(context, handler).start()); } void initRiskLevelMap(){ riskLevelMap = new HashMap<>(); riskLevelMap.put(0, "NO RISK"); riskLevelMap.put(1, "LOWEST"); riskLevelMap.put(2, "LOW"); riskLevelMap.put(3, "MEDIUM LOW"); riskLevelMap.put(4, "MEDIUM"); riskLevelMap.put(5, "MEDIUM_HIGH"); riskLevelMap.put(6, "HIGH"); riskLevelMap.put(7, "EXTRA HIGH"); riskLevelMap.put(8, "HIGHEST"); } boolean checkIfAllowsManualUpload(){ sharedPreferences = context.getSharedPreferences("upload_pk_history", MODE_PRIVATE); String registrationKey = sharedPreferences.getString("registration_key",""); long latest_uploading_time = sharedPreferences.getLong("timestamp", 0); sharedPreferences = getContext().getSharedPreferences(SETTINGS, MODE_PRIVATE); boolean isPKUploadDisabled = sharedPreferences.getBoolean("is_PK_upload_disabled", false); // if registration_key is missing or corrupted, or it has been more than one interval (7 days) since last manual upload, needs upload manually again. if(registrationKey.length() != 32 || latest_uploading_time < ( System.currentTimeMillis() - UPLOAD_INTERVAL_IN_DAYS*24*6*600000)){ return true; } //If registration_key exists, but has not uploaded in 24 hours, needs one auto upload else if(registrationKey.length() == 32 || latest_uploading_time < ( System.currentTimeMillis() - 24 * 6 * 600000)){ //check if uploading PK has been disabled by the user if(!isPKUploadDisabled) uploadPKAutomatically(registrationKey); return false; }else{ //If registration_key exists, and has auto uploaded within 24 hours, no need for further operations return false; } } void uploadPKAutomatically(String registrationKey){ new GetTan(context, handler, registrationKey).start(); } void refreshDashboard(){ //refresh my status choices sharedPreferences = context.getSharedPreferences("dashboard_info",MODE_PRIVATE); numberOfHitsTv.setText(""+sharedPreferences.getInt("number_of_hits",0)); riskLevelTv.setText(riskLevelMap.get(sharedPreferences.getInt("risk_level", 0))); } void refreshIsScanning(){ sharedPreferences = getContext().getSharedPreferences(SETTINGS,MODE_PRIVATE); boolean isAppDisabled = sharedPreferences.getBoolean(IS_APP_DISABLED, false); if(!isAppDisabled){ engineStartPreCheck(); Log.e(TAG, "contact shielding should be running"); }else{ Task<Void> stopContactShield = ContactShield.getContactShieldEngine(context).stopContactShield(); stopContactShield.addOnSuccessListener( (Void v) -> Log.e(TAG, "stop contact shield >> Succeeded")); stopContactShield.addOnFailureListener( (Exception e) -> Log.e(TAG, "stop contact shield "+ e.getMessage())); Log.e(TAG, "contact shielding should not be running"); } } void engineStartPreCheck(){ Log.d(TAG, "engine_start_pre_check"); Task<Boolean> isRunningTask = ContactShield.getContactShieldEngine(context).isContactShieldRunning(); isRunningTask.addOnSuccessListener(aBoolean -> { if(!aBoolean){ engineStart(); Log.e(TAG, "isContactShieldRunning >> NO"); }else{ scanningTv.setVisibility(View.VISIBLE); Log.e(TAG, "isContactShieldRunning >> YES"); } }); } void engineStart(){ Log.d(TAG, "engine_start"); PendingIntent pendingIntent = PendingIntent.getService(getActivity(), 0, new Intent(getActivity(), BackgroundContactCheckingIntentService.class), PendingIntent.FLAG_UPDATE_CURRENT); ContactShield.getContactShieldEngine(context).startContactShield(pendingIntent, ContactShieldSetting.DEFAULT) .addOnSuccessListener(aVoid -> { Log.d(TAG, "startContactShield >> Success"); scanningTv.setVisibility(View.VISIBLE); }) .addOnFailureListener(e -> { Log.e(TAG, "startContactShield >> Failure"); scanningTv.setVisibility(View.INVISIBLE); }); } }
/* ** Check whether a given upvalue from a given closure exists and ** returns its index */ static void *checkupval (lua_State *L, int argf, int argnup, int *pnup) { void *id; int nup = (int)luaL_checkinteger(L, argnup); luaL_checktype(L, argf, LUA_TFUNCTION); id = lua_upvalueid(L, argf, nup); if (pnup) { luaL_argcheck(L, id != NULL, argnup, "invalid upvalue index"); *pnup = nup; } return id; }
<reponame>square1-io/android-sq1-tools package io.square1.tools; import android.os.Parcel; import android.test.suitebuilder.annotation.SmallTest; import android.support.test.runner.AndroidJUnit4; import junit.framework.Assert; import org.json.JSONObject; import org.junit.Before; import org.junit.runner.RunWith; import org.junit.Test; import io.square1.tools.json.DefaultJsonMapper; import io.square1.tools.json.Pagination; /** * Created by roberto on 14/07/2016. */ @RunWith(AndroidJUnit4.class) @SmallTest public class JsonTests { int page = 3; int pages = 200; int total = 234; int mJsonId = 123; private DefaultJsonMapper mDefaultJsonParser; private JSONObject mPaginationJson; private JSONObject mMockJson; @Before public void createLogHistory() { mDefaultJsonParser = new DefaultJsonMapper(); mPaginationJson = new JSONObject(); mMockJson = new JSONObject(); try { mPaginationJson.put(mDefaultJsonParser.getJsonFieldForPaginationCurrentPage(), page); mPaginationJson.put(mDefaultJsonParser.getJsonFieldForPaginationPages(), pages); mPaginationJson.put(mDefaultJsonParser.getJsonFieldForPaginationTotalResults(), total); mMockJson.put(mDefaultJsonParser.getJsonFieldForId(),mJsonId); mMockJson.put(mDefaultJsonParser.getJsonFieldForCreatedAt(),"1999-11-22 18:01:22"); mMockJson.put(mDefaultJsonParser.getJsonFieldForUpdatedAt(),"2000-01-23 18:01:22"); mMockJson.put("mDate","2000-01-23 18:01:22"); }catch (Exception e){} } @Test public void paginationFromJson_test() throws Exception { Pagination pagination = new Pagination(mPaginationJson, mDefaultJsonParser); Assert.assertEquals(pagination.getCurrentPage(), page); Assert.assertEquals(pagination.getTotalPages(), pages); Assert.assertEquals(pagination.getTotalResults(), total); } @Test public void paginationFromParcel_test() throws Exception { Pagination pagination = new Pagination(mPaginationJson, mDefaultJsonParser); // Write the data. Parcel parcel = Parcel.obtain(); pagination.writeToParcel(parcel, pagination.describeContents()); parcel.setDataPosition(0); Pagination pagination2 = Pagination.CREATOR.createFromParcel(parcel); Assert.assertEquals(pagination2.getCurrentPage(), page); Assert.assertEquals(pagination2.getTotalPages(), pages); Assert.assertEquals(pagination2.getTotalResults(), total); } @Test public void mockFromParcel_test() throws Exception { MockObject mockObject = new MockObject(mMockJson); // Write the data. Parcel parcel = Parcel.obtain(); mockObject.writeToParcel(parcel, mockObject.describeContents()); parcel.setDataPosition(0); MockObject mockObject2 = MockObject.CREATOR.createFromParcel(parcel); Assert.assertEquals(mockObject,mockObject2); Assert.assertEquals(mockObject2.getId(), mJsonId); } }
<gh_stars>100-1000 /** @file This is part of the implementation of an Intel Graphics drivers OpRegion / Software SCI interface between system BIOS, ASL code, and Graphics drivers. The code in this file will load the driver and initialize the interface Copyright (c) 2019 - 2020 Intel Corporation. All rights reserved. <BR> SPDX-License-Identifier: BSD-2-Clause-Patent **/ #include "IgdOpRegionInit.h" #include <Library/ConfigBlockLib.h> GLOBAL_REMOVE_IF_UNREFERENCED IGD_OPREGION_PROTOCOL mIgdOpRegion; /** Get VBT data using SaPlaformPolicy @param[out] VbtFileBuffer Pointer to VBT data buffer. @retval EFI_SUCCESS VBT data was returned. @retval EFI_NOT_FOUND VBT data not found. @exception EFI_UNSUPPORTED Invalid signature in VBT data. **/ EFI_STATUS GetIntegratedIntelVbtPtr ( OUT VBIOS_VBT_STRUCTURE **VbtFileBuffer ) { EFI_STATUS Status; EFI_PHYSICAL_ADDRESS VbtAddress; UINT32 Size; GRAPHICS_DXE_CONFIG *GraphicsDxeConfig; /// /// Get the SA policy. /// Status = gBS->LocateProtocol ( &gSaPolicyProtocolGuid, NULL, (VOID **) &mSaPolicy ); if (EFI_ERROR (Status)) { return Status; } Status = GetConfigBlock ((VOID *) mSaPolicy, &gGraphicsDxeConfigGuid, (VOID *)&GraphicsDxeConfig); ASSERT_EFI_ERROR (Status); VbtAddress = GraphicsDxeConfig->VbtAddress; Size = GraphicsDxeConfig->Size; if (VbtAddress == 0x00000000) { return EFI_NOT_FOUND; } else { /// /// Check VBT signature /// *VbtFileBuffer = NULL; *VbtFileBuffer = (VBIOS_VBT_STRUCTURE *) (UINTN) VbtAddress; if ((*((UINT32 *) ((*VbtFileBuffer)->HeaderSignature))) != VBT_SIGNATURE) { FreePool (*VbtFileBuffer); *VbtFileBuffer = NULL; return EFI_UNSUPPORTED; } } if (Size == 0) { return EFI_NOT_FOUND; } else { /// /// Check VBT size /// if ((*VbtFileBuffer)->HeaderVbtSize > Size) { (*VbtFileBuffer)->HeaderVbtSize = (UINT16) Size; } } return EFI_SUCCESS; } /** Get a pointer to an uncompressed image of the Intel video BIOS. @Note: This function would only be called if the video BIOS at 0xC000 is missing or not an Intel video BIOS. It may not be an Intel video BIOS if the Intel graphic contoller is considered a secondary adapter. @param[out] VBiosImage - Pointer to an uncompressed Intel video BIOS. This pointer must be set to NULL if an uncompressed image of the Intel Video BIOS is not obtainable. @retval EFI_SUCCESS - VBiosPtr is updated. @exception EFI_UNSUPPORTED - No Intel video BIOS found. **/ EFI_STATUS GetIntegratedIntelVBiosPtr ( OUT INTEL_VBIOS_OPTION_ROM_HEADER **VBiosImage ) { EFI_HANDLE *HandleBuffer; UINTN HandleCount; UINTN Index; INTEL_VBIOS_PCIR_STRUCTURE *PcirBlockPtr; EFI_STATUS Status; EFI_PCI_IO_PROTOCOL *PciIo; INTEL_VBIOS_OPTION_ROM_HEADER *VBiosRomImage; /// /// Set as if an umcompressed Intel video BIOS image was not obtainable. /// VBiosRomImage = NULL; /// /// Get all PCI IO protocols /// Status = gBS->LocateHandleBuffer ( ByProtocol, &gEfiPciIoProtocolGuid, NULL, &HandleCount, &HandleBuffer ); ASSERT_EFI_ERROR (Status); /// /// Find the video BIOS by checking each PCI IO handle for an Intel video /// BIOS OPROM. /// for (Index = 0; Index < HandleCount; Index++) { Status = gBS->HandleProtocol ( HandleBuffer[Index], &gEfiPciIoProtocolGuid, (VOID **) &PciIo ); ASSERT_EFI_ERROR (Status); VBiosRomImage = PciIo->RomImage; /// /// If this PCI device doesn't have a ROM image, skip to the next device. /// if (!VBiosRomImage) { continue; } /// /// Get pointer to PCIR structure /// PcirBlockPtr = (INTEL_VBIOS_PCIR_STRUCTURE *) ((UINT8 *) VBiosRomImage + VBiosRomImage->PcirOffset); /// /// Check if we have an Intel video BIOS OPROM. /// if ((VBiosRomImage->Signature == OPTION_ROM_SIGNATURE) && (PcirBlockPtr->VendorId == V_SA_MC_VID) && (PcirBlockPtr->ClassCode[0] == 0x00) && (PcirBlockPtr->ClassCode[1] == 0x00) && (PcirBlockPtr->ClassCode[2] == 0x03) ) { /// /// Found Intel video BIOS. /// *VBiosImage = VBiosRomImage; return EFI_SUCCESS; } } /// /// No Intel video BIOS found. /// /// /// Free any allocated buffers /// FreePool (HandleBuffer); return EFI_UNSUPPORTED; } /** Get Intel video BIOS VBT information (i.e. Pointer to VBT and VBT size). The VBT (Video BIOS Table) is a block of customizable data that is built within the video BIOS and edited by customers. @retval EFI_SUCCESS - Video BIOS VBT information returned. @exception EFI_UNSUPPORTED - Could not find VBT information (*VBiosVbtPtr = NULL). **/ EFI_STATUS GetVBiosVbtEndOfDxe ( VOID ) { INTEL_VBIOS_PCIR_STRUCTURE *PcirBlockPtr; UINT32 PcirBlockAddress; UINT16 PciVenderId; INTEL_VBIOS_OPTION_ROM_HEADER *VBiosPtr; VBIOS_VBT_STRUCTURE *VBiosVbtPtr; EFI_LEGACY_BIOS_PROTOCOL *LegacyBios; EFI_STATUS Status; VBIOS_VBT_STRUCTURE *VbtFileBuffer; UINTN Index; UINT8 LegacyVbtFound; GRAPHICS_DXE_CONFIG *GraphicsDxeConfig; EFI_PEI_HOB_POINTERS HobPtr; SI_CONFIG_HOB_DATA *SiConfigHobData; VbtFileBuffer = NULL; LegacyVbtFound = 1; /// /// Get the SA policy. /// Status = gBS->LocateProtocol ( &gSaPolicyProtocolGuid, NULL, (VOID **) &mSaPolicy ); if (EFI_ERROR (Status)) { return Status; } Status = GetConfigBlock ((VOID *) mSaPolicy, &gGraphicsDxeConfigGuid, (VOID *)&GraphicsDxeConfig); ASSERT_EFI_ERROR (Status); LegacyBios = NULL; VBiosPtr = NULL; // // Get Silicon Config data HOB // HobPtr.Guid = GetFirstGuidHob (&gSiConfigHobGuid); SiConfigHobData = (SI_CONFIG_HOB_DATA *)GET_GUID_HOB_DATA (HobPtr.Guid); if (SiConfigHobData->CsmFlag == 1) { Status = gBS->LocateProtocol (&gEfiLegacyBiosProtocolGuid, NULL, (VOID **) &LegacyBios); if (LegacyBios) { VBiosPtr = (INTEL_VBIOS_OPTION_ROM_HEADER *) (UINTN) (VBIOS_LOCATION_PRIMARY); PcirBlockAddress = VBIOS_LOCATION_PRIMARY + VBiosPtr->PcirOffset; PcirBlockPtr = (INTEL_VBIOS_PCIR_STRUCTURE *) (UINTN) (PcirBlockAddress); PciVenderId = PcirBlockPtr->VendorId; /// /// If the video BIOS is not at 0xC0000 or it is not an Intel video BIOS get /// the integrated Intel video BIOS (must be uncompressed). /// if ((VBiosPtr->Signature != OPTION_ROM_SIGNATURE) || (PciVenderId != V_SA_MC_VID)) { GetIntegratedIntelVBiosPtr (&VBiosPtr); if (VBiosPtr != NULL) { /// /// Video BIOS found. /// PcirBlockPtr = (INTEL_VBIOS_PCIR_STRUCTURE *) ((UINT8 *) VBiosPtr + VBiosPtr->PcirOffset); PciVenderId = PcirBlockPtr->VendorId; if ((VBiosPtr->Signature != OPTION_ROM_SIGNATURE) || (PciVenderId != V_SA_MC_VID)) { /// /// Intel video BIOS not found. /// VBiosVbtPtr = NULL; LegacyVbtFound = 0; } } } } } if ((LegacyBios == NULL) || (LegacyVbtFound == 0)) { /// /// No Video BIOS found, try to get VBT from FV. /// GetIntegratedIntelVbtPtr (&VbtFileBuffer); if (VbtFileBuffer != NULL) { /// /// Video BIOS not found, use VBT from SaPolicy /// DEBUG ((DEBUG_INFO, "VBT data found\n")); for (Index = 0; (GraphicsDxeConfig->GopVersion[Index] != '\0'); Index++) { } Index = (Index+1)*2; CopyMem (mIgdOpRegion.OpRegion->Header.DVER, GraphicsDxeConfig->GopVersion, Index); CopyMem (mIgdOpRegion.OpRegion->MBox4.RVBT, VbtFileBuffer, VbtFileBuffer->HeaderVbtSize); return EFI_SUCCESS; } } if (VBiosPtr == NULL) { return EFI_UNSUPPORTED; } DEBUG ((DEBUG_INFO, "VBIOS found at 0x%X\n", VBiosPtr)); VBiosVbtPtr = (VBIOS_VBT_STRUCTURE *) ((UINT8 *) VBiosPtr + VBiosPtr->VbtOffset); if ((*((UINT32 *) (VBiosVbtPtr->HeaderSignature))) != VBT_SIGNATURE) { return EFI_UNSUPPORTED; } /// /// Initialize Video BIOS version with its build number. /// mIgdOpRegion.OpRegion->Header.VVER[0] = VBiosVbtPtr->CoreBlockBiosBuild[0]; mIgdOpRegion.OpRegion->Header.VVER[1] = VBiosVbtPtr->CoreBlockBiosBuild[1]; mIgdOpRegion.OpRegion->Header.VVER[2] = VBiosVbtPtr->CoreBlockBiosBuild[2]; mIgdOpRegion.OpRegion->Header.VVER[3] = VBiosVbtPtr->CoreBlockBiosBuild[3]; CopyMem (mIgdOpRegion.OpRegion->MBox4.RVBT, VBiosVbtPtr, VBiosVbtPtr->HeaderVbtSize); /// /// Return final status /// return EFI_SUCCESS; } /** Graphics OpRegion / Software SCI driver installation function. @param[in] void - None @retval EFI_SUCCESS - The driver installed without error. @retval EFI_ABORTED - The driver encountered an error and could not complete installation of the ACPI tables. **/ EFI_STATUS IgdOpRegionInit ( VOID ) { EFI_HANDLE Handle; EFI_STATUS Status; UINT32 DwordData; UINT64 IgdBaseAddress; SA_POLICY_PROTOCOL *SaPolicy; GRAPHICS_DXE_CONFIG *GraphicsDxeConfig; UINT8 Index; SYSTEM_AGENT_NVS_AREA_PROTOCOL *SaNvsAreaProtocol; /// /// Get the SA policy. /// Status = gBS->LocateProtocol (&gSaPolicyProtocolGuid, NULL, (VOID **)&SaPolicy); if (EFI_ERROR (Status)) { return Status; } Status = GetConfigBlock ((VOID *) SaPolicy, &gGraphicsDxeConfigGuid, (VOID *)&GraphicsDxeConfig); ASSERT_EFI_ERROR (Status); /// /// Locate the SA Global NVS Protocol. /// Status = gBS->LocateProtocol ( &gSaNvsAreaProtocolGuid, NULL, (VOID **) &SaNvsAreaProtocol ); ASSERT_EFI_ERROR (Status); /// /// Allocate an ACPI NVS memory buffer as the IGD OpRegion, zero initialize /// the first 1K, and set the IGD OpRegion pointer in the Global NVS /// area structure. /// Status = (gBS->AllocatePool) (EfiACPIMemoryNVS, sizeof (IGD_OPREGION_STRUCTURE), (VOID **) &mIgdOpRegion.OpRegion); ASSERT_EFI_ERROR (Status); SetMem (mIgdOpRegion.OpRegion, sizeof (IGD_OPREGION_STRUCTURE), 0); SaNvsAreaProtocol->Area->IgdOpRegionAddress = (UINT32) (UINTN) (mIgdOpRegion.OpRegion); /// /// If IGD is disabled return /// IgdBaseAddress = PCI_SEGMENT_LIB_ADDRESS (SA_SEG_NUM, SA_IGD_BUS, SA_IGD_DEV, SA_IGD_FUN_0, 0); if (PciSegmentRead32 (IgdBaseAddress + 0) == 0xFFFFFFFF) { return EFI_SUCCESS; } /// /// Initialize OpRegion Header /// CopyMem (mIgdOpRegion.OpRegion->Header.SIGN, HEADER_SIGNATURE, sizeof (HEADER_SIGNATURE)); /// /// Set OpRegion Size in KBs /// mIgdOpRegion.OpRegion->Header.SIZE = HEADER_SIZE / 1024; mIgdOpRegion.OpRegion->Header.OVER = (UINT32) (LShiftU64 (HEADER_OPREGION_VER, 16) + LShiftU64 (HEADER_OPREGION_REV, 8)); /// /// All Mailboxes are supported. /// mIgdOpRegion.OpRegion->Header.MBOX = HEADER_MBOX_SUPPORT; /// /// Initialize OpRegion Mailbox 1 (Public ACPI Methods). /// /// Note - The initial setting of mailbox 1 fields is implementation specific. /// Adjust them as needed many even coming from user setting in setup. /// /// /// Initialize OpRegion Mailbox 3 (ASLE Interrupt and Power Conservation). /// /// Note - The initial setting of mailbox 3 fields is implementation specific. /// Adjust them as needed many even coming from user setting in setup. /// /// /// Do not initialize TCHE. This field is written by the graphics driver only. /// /// /// The ALSI field is generally initialized by ASL code by reading the embedded controller. /// mIgdOpRegion.OpRegion->Header.PCON = GraphicsDxeConfig->PlatformConfig; mIgdOpRegion.OpRegion->Header.PCON = mIgdOpRegion.OpRegion->Header.PCON | 0x2; mIgdOpRegion.OpRegion->MBox3.BCLP = BACKLIGHT_BRIGHTNESS; mIgdOpRegion.OpRegion->MBox3.PFIT = (FIELD_VALID_BIT | PFIT_STRETCH); /// /// Reporting to driver for VR IMON Calibration. Bits [5-1] values supported 14A to 31A. /// mIgdOpRegion.OpRegion->MBox3.PCFT = (SaNvsAreaProtocol->Area->GfxTurboIMON << 1) & 0x003E; /// /// Set Initial current Brightness /// mIgdOpRegion.OpRegion->MBox3.CBLV = (INIT_BRIGHT_LEVEL | FIELD_VALID_BIT); /// /// Static Backlight Brightness Level Duty cycle Mapping Table /// for (Index = 0; Index < MAX_BCLM_ENTRIES; Index++) { mIgdOpRegion.OpRegion->MBox3.BCLM[Index] = GraphicsDxeConfig->BCLM[Index]; } mIgdOpRegion.OpRegion->MBox3.IUER = 0x00; if (!EFI_ERROR (Status)) { mIgdOpRegion.OpRegion->MBox3.IUER = GraphicsDxeConfig->IuerStatusVal; } /// /// Initialize hardware state: /// Set ASLS Register to the OpRegion physical memory address. /// Set SWSCI register bit 15 to a "1" to activate SCI interrupts. /// PciSegmentWrite32 (IgdBaseAddress + R_SA_IGD_ASLS_OFFSET, (UINT32) (UINTN) (mIgdOpRegion.OpRegion)); PciSegmentAndThenOr16 (IgdBaseAddress + R_SA_IGD_SWSCI_OFFSET, (UINT16) ~(BIT0), BIT15); DwordData = PciSegmentRead32 (IgdBaseAddress + R_SA_IGD_ASLS_OFFSET); S3BootScriptSaveMemWrite ( S3BootScriptWidthUint32, (UINTN) PcdGet64 (PcdPciExpressBaseAddress) + (IgdBaseAddress + R_SA_IGD_ASLS_OFFSET), 1, &DwordData ); DwordData = PciSegmentRead32 (IgdBaseAddress + R_SA_IGD_SWSCI_OFFSET); S3BootScriptSaveMemWrite ( S3BootScriptWidthUint32, (UINTN) PcdGet64 (PcdPciExpressBaseAddress) + (IgdBaseAddress + R_SA_IGD_SWSCI_OFFSET), 1, &DwordData ); /// /// Install OpRegion / Software SCI protocol /// Handle = NULL; Status = gBS->InstallMultipleProtocolInterfaces ( &Handle, &gIgdOpRegionProtocolGuid, &mIgdOpRegion, NULL ); ASSERT_EFI_ERROR (Status); /// /// Return final status /// return EFI_SUCCESS; } /** Update Graphics OpRegion after PCI enumeration. @param[in] void - None @retval EFI_SUCCESS - The function completed successfully. **/ EFI_STATUS UpdateIgdOpRegionEndOfDxe ( VOID ) { EFI_STATUS Status; UINTN HandleCount; EFI_HANDLE *HandleBuffer; UINTN Index; EFI_PCI_IO_PROTOCOL *PciIo; PCI_TYPE00 Pci; UINTN Segment; UINTN Bus; UINTN Device; UINTN Function; Bus = 0; Device = 0; Function = 0; DEBUG ((DEBUG_INFO, "UpdateIgdOpRegionEndOfDxe\n")); mIgdOpRegion.OpRegion->Header.PCON |= BIT8; //Set External Gfx Adapter field is valid mIgdOpRegion.OpRegion->Header.PCON &= (UINT32) (~BIT7); //Assume No External Gfx Adapter /// /// Get all PCI IO protocols handles /// Status = gBS->LocateHandleBuffer ( ByProtocol, &gEfiPciIoProtocolGuid, NULL, &HandleCount, &HandleBuffer ); if (!EFI_ERROR (Status)) { for (Index = 0; Index < HandleCount; Index++) { /// /// Get the PCI IO Protocol Interface corresponding to each handle /// Status = gBS->HandleProtocol ( HandleBuffer[Index], &gEfiPciIoProtocolGuid, (VOID **) &PciIo ); if (!EFI_ERROR (Status)) { /// /// Read the PCI configuration space /// Status = PciIo->Pci.Read ( PciIo, EfiPciIoWidthUint32, 0, sizeof (Pci) / sizeof (UINT32), &Pci ); /// /// Find the display controllers devices /// if (!EFI_ERROR (Status) && IS_PCI_DISPLAY (&Pci)) { Status = PciIo->GetLocation ( PciIo, &Segment, &Bus, &Device, &Function ); // // Assumption: Onboard devices will be sits on Bus no 0, while external devices will be sits on Bus no > 0 // if (!EFI_ERROR (Status) && (Bus > 0)) { //External Gfx Adapter Detected and Available DEBUG ((DEBUG_INFO, "PCON - External Gfx Adapter Detected and Available\n")); mIgdOpRegion.OpRegion->Header.PCON |= BIT7; break; } } } } } /// /// Free any allocated buffers /// if (HandleBuffer != NULL) { FreePool (HandleBuffer); } /// /// Return final status /// return Status; }
/** * This source code file reference from class <a href="https://github.com/tyrantgit/HeartLayout/blob/master/heartlayout/src/main/java/tyrantgit/widget/PathAnimator.java">PathAnimator</a> in HeartLayout project of tyrantgit on GitHub */ public class PathAnimation extends Animation { private float mLength; private float mRotation; private PathMeasure mPathMeasure; public PathAnimation(Path path, float rotation) { mPathMeasure = new PathMeasure(path, false); mLength = mPathMeasure.getLength(); mRotation = rotation; } @Override protected void applyTransformation(float interpolatedTime, Transformation t) { final Matrix matrix = t.getMatrix(); float scale = 1F; if (3000.0F * interpolatedTime < 200.0F) { scale = scale(interpolatedTime, 0.0D, 0.06666667014360428D, 0.20000000298023224D, 1.100000023841858D); } else if (3000.0F * interpolatedTime < 300.0F) { scale = scale(interpolatedTime, 0.06666667014360428D, 0.10000000149011612D, 1.100000023841858D, 1.0D); } mPathMeasure.getMatrix(mLength * interpolatedTime, matrix, PathMeasure.POSITION_MATRIX_FLAG); matrix.preScale(scale, scale); matrix.postRotate(mRotation * interpolatedTime); t.setAlpha(1.0F - interpolatedTime); } private float scale(double a, double b, double c, double d, double e) { return (float) ((a - b) / (c - b) * (e - d) + d); } }
/** * equals(Object) must return <code>false</code> for null */ @TestTargetNew( level = TestLevel.PARTIAL_COMPLETE, notes = "Null parameter checked", method = "equals", args = {java.lang.Object.class} ) public void testEqualsObject_00() { CodeSource thiz = new CodeSource(urlSite, (Certificate[]) null); assertFalse(thiz.equals(null)); }
package jacl import ( "fmt" "reflect" ) // ------------------------------------------------------------ // NIL-CMP // nilCmp compares a single item to another. type nilCmp struct { } func (c nilCmp) Cmp(b interface{}) error { if !isNilInterface(b) { return newComparisonError(fmt.Sprintf(haveWantFmt, toJson(b), `nil`)) } return nil } func (c nilCmp) SerializeKey() string { return nilCmpFactoryKey } func isNilInterface(i interface{}) bool { if i == nil { return true } switch reflect.TypeOf(i).Kind() { case reflect.Ptr, reflect.Map, reflect.Array, reflect.Chan, reflect.Slice: return reflect.ValueOf(i).IsNil() } return false }
<reponame>nberktumer/Change-me-plz-zahit<filename>src/components/ShapePanel/ShapeItem.tsx import React from "react" import styles from "./ShapePanel.module.css" import {Card, CardHeader} from "@material-ui/core" export interface IShapeItemProps { model: any; name: string; } export interface IShapeItemState { } export class ShapeItem extends React.Component<IShapeItemProps, IShapeItemState> { constructor(props: IShapeItemProps) { super(props) this.state = {} } render() { return ( <Card draggable={true} onDragStart={(event) => { event.dataTransfer.setData("storm-diagram-node", JSON.stringify(this.props.model)) }} className={styles.trayItem}> <CardHeader titleTypographyProps={{variant: "subtitle1"}} style={{paddingLeft: 8, paddingRight: 8, paddingTop: 0, paddingBottom: 0}} title={this.props.name}/> </Card> ) } }
<filename>tensorflow/compiler/plugin/poplar/driver/tools/custom_ops/sequence_slice.cc<gh_stars>10-100 /* Copyright 2021 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include <memory> #include <string> #include <vector> #include "tensorflow/compiler/plugin/poplar/driver/tools/custom_ops/sequence_slice.h" #include "tensorflow/compiler/plugin/poplar/driver/tools/hlo_poplar_buffer_util.h" #include "tensorflow/compiler/plugin/poplar/driver/tools/matcher_predicates.h" #include "tensorflow/compiler/plugin/poplar/kernels/custom_kernels_util.h" #include "tensorflow/compiler/plugin/poplar/kernels/ops.pb.h" #include "tensorflow/compiler/xla/shape_util.h" namespace xla { namespace poplarplugin { /* * HloSequenceSliceInstruction */ HloSequenceSliceInstruction::HloSequenceSliceInstruction( const Shape& shape, HloInstruction* const dst, HloInstruction* const src, HloInstruction* const num_elems, HloInstruction* const src_offsets, HloInstruction* const dst_offsets, bool zero_unused) : HloPoplarInstruction(shape, {dst, src, num_elems, src_offsets, dst_offsets}, PoplarOp::SequenceSlice), zero_unused(zero_unused) {} HloSequenceSliceInstruction::HloSequenceSliceInstruction( const Shape& shape, HloInstruction* const src, HloInstruction* const num_elems, HloInstruction* const src_offsets, HloInstruction* const dst_offsets, bool zero_unused, PoplarOp op) : HloPoplarInstruction(shape, {src, num_elems, src_offsets, dst_offsets}, op), zero_unused(zero_unused) {} absl::flat_hash_set<int64> HloSequenceSliceInstruction::AllocatingIndices() const { return {}; } absl::flat_hash_map<int64, int64> HloSequenceSliceInstruction::LayoutDependencies() const { return {}; } HloPoplarUseDescriptions HloSequenceSliceInstruction::GetUseDescriptions() const { return UseDescriptionsSimpleNoTuple0thOperandAliasing(this); } HloPoplarBufferDescriptions HloSequenceSliceInstruction::GetBufferDescriptions() const { return BufferDescriptionsNoAllocations(); } bool HloSequenceSliceInstruction::AllocatingOutput() const { return false; } const FindConsumersExtensionResults HloSequenceSliceInstruction::FindConsumers( FindConsumersExtensionParams params) const { auto op_index = params.op_index; if ((op_index == 0) || (IsAnyScaledInplace(this) && op_index < 2)) { FindConsumersExtensionResults result{true, this, params.index, params.permutation}; return result; } return FindConsumersExtensionResults::DoNotFindConsumers(); } bool HloSequenceSliceInstruction::AllowNonInplaceLowering() const { return false; } bool HloSequenceSliceInstruction::IsPopOpsElementwise() const { return false; } bool HloSequenceSliceInstruction::ZeroUnused() const { return zero_unused; } std::unique_ptr<HloInstruction> HloSequenceSliceInstruction::CloneWithNewOperandsImpl( const Shape& shape, absl::Span<HloInstruction* const> new_operands, HloCloneContext*) const { return absl::make_unique<HloSequenceSliceInstruction>( shape, new_operands[0], new_operands[1], new_operands[2], new_operands[3], new_operands[4], ZeroUnused()); } std::vector<std::string> HloSequenceSliceInstruction::ExtraPoplarAttributesToStringImpl( const HloPrintOptions& options) const { return {"zero_unused=" + ZeroUnused()}; } namespace { StatusOr<std::unique_ptr<HloSequenceSliceInstruction>> MakeSequenceSliceInstruction(HloCustomCallInstruction* call, bool zero_unused) { return absl::make_unique<HloSequenceSliceInstruction>( call->shape(), call->mutable_operand(0), call->mutable_operand(1), call->mutable_operand(2), call->mutable_operand(3), call->mutable_operand(4), zero_unused); } StatusOr<std::unique_ptr<HloSequenceSliceInstruction>> HloSequenceSliceInstructionFactoryFunc(HloCustomCallInstruction* call) { auto attribute_map = IPUCustomKernelsUtil::AttributeMap(call); TF_ASSIGN_OR_RETURN(bool zero_unused, attribute_map.GetAttributeAsBool("zero_unused")); return MakeSequenceSliceInstruction(call, zero_unused); } static HloPoplarInstructionFactory sequence_slice_factory( PoplarOp::SequenceSlice, HloSequenceSliceInstructionFactoryFunc); } // namespace /* * HloSequenceSliceUnpackInstruction */ HloSequenceSliceUnpackInstruction::HloSequenceSliceUnpackInstruction( const Shape& shape, HloInstruction* const src, HloInstruction* const num_elems, HloInstruction* const src_offsets, HloInstruction* const dst_offsets, bool zero_unused, int64 total_elements) : HloSequenceSliceInstruction(shape, src, num_elems, src_offsets, dst_offsets, zero_unused, PoplarOp::SequenceSliceUnpack), total_elements(total_elements) {} HloPoplarUseDescriptions HloSequenceSliceUnpackInstruction::GetUseDescriptions() const { return UseDescriptionsNoInputOutputAlias(); } HloPoplarBufferDescriptions HloSequenceSliceUnpackInstruction::GetBufferDescriptions() const { return BufferDescriptionsAllocatesAllOutputs(this); } bool HloSequenceSliceUnpackInstruction::AllocatingOutput() const { return true; } int64 HloSequenceSliceUnpackInstruction::TotalElements() const { return total_elements; } std::unique_ptr<HloInstruction> HloSequenceSliceUnpackInstruction::CloneWithNewOperandsImpl( const Shape& shape, absl::Span<HloInstruction* const> new_operands, HloCloneContext*) const { return absl::make_unique<HloSequenceSliceUnpackInstruction>( shape, new_operands[0], new_operands[1], new_operands[2], new_operands[3], ZeroUnused(), TotalElements()); } namespace { StatusOr<std::unique_ptr<HloSequenceSliceUnpackInstruction>> MakeSequenceSliceUnpackInstruction(HloCustomCallInstruction* call, bool zero_unused, int64 total_elements) { return absl::make_unique<HloSequenceSliceUnpackInstruction>( call->shape(), call->mutable_operand(0), call->mutable_operand(1), call->mutable_operand(2), call->mutable_operand(3), zero_unused, total_elements); } StatusOr<std::unique_ptr<HloSequenceSliceUnpackInstruction>> HloSequenceSliceUnpackInstructionFactoryFunc(HloCustomCallInstruction* call) { auto attribute_map = IPUCustomKernelsUtil::AttributeMap(call); TF_ASSIGN_OR_RETURN(bool zero_unused, attribute_map.GetAttributeAsBool("zero_unused")); TF_ASSIGN_OR_RETURN(int64 total_elements, attribute_map.GetAttributeAsBool("total_elements")); return MakeSequenceSliceUnpackInstruction(call, zero_unused, total_elements); } static HloPoplarInstructionFactory sequence_slice_unpack_factory( PoplarOp::SequenceSliceUnpack, HloSequenceSliceUnpackInstructionFactoryFunc); } // namespace } // namespace poplarplugin } // namespace xla
/** * Don't allow clients to submit resources with binary storage attachments declared unless the ID was already in the * resource. In other words, only HAPI itself may add a binary storage ID extension to a resource unless that * extension was already present. */ private void blockIllegalExternalBinaryIds(IBaseResource thePreviousResource, IBaseResource theResource) { Set<String> existingBinaryIds = new HashSet<>(); if (thePreviousResource != null) { List<? extends IPrimitiveType<byte[]>> base64fields = myCtx.newTerser().getAllPopulatedChildElementsOfType(thePreviousResource, myBinaryType); for (IPrimitiveType<byte[]> nextBase64 : base64fields) { if (nextBase64 instanceof IBaseHasExtensions) { ((IBaseHasExtensions) nextBase64) .getExtension() .stream() .filter(t -> t.getUserData(JpaConstants.EXTENSION_EXT_SYSTEMDEFINED) == null) .filter(t -> EXT_EXTERNALIZED_BINARY_ID.equals(t.getUrl())) .map(t -> (IPrimitiveType<?>) t.getValue()) .map(t -> t.getValueAsString()) .filter(t -> isNotBlank(t)) .forEach(t -> existingBinaryIds.add(t)); } } } List<? extends IPrimitiveType<byte[]>> base64fields = myCtx.newTerser().getAllPopulatedChildElementsOfType(theResource, myBinaryType); for (IPrimitiveType<byte[]> nextBase64 : base64fields) { if (nextBase64 instanceof IBaseHasExtensions) { Optional<String> hasExternalizedBinaryReference = ((IBaseHasExtensions) nextBase64) .getExtension() .stream() .filter(t -> t.getUserData(JpaConstants.EXTENSION_EXT_SYSTEMDEFINED) == null) .filter(t -> t.getUrl().equals(EXT_EXTERNALIZED_BINARY_ID)) .map(t -> (IPrimitiveType<?>) t.getValue()) .map(t -> t.getValueAsString()) .filter(t -> isNotBlank(t)) .filter(t -> !existingBinaryIds.contains(t)) .findFirst(); if (hasExternalizedBinaryReference.isPresent()) { String msg = myCtx.getLocalizer().getMessage(BinaryStorageInterceptor.class, "externalizedBinaryStorageExtensionFoundInRequestBody", EXT_EXTERNALIZED_BINARY_ID, hasExternalizedBinaryReference.get()); throw new InvalidRequestException(Msg.code(1329) + msg); } } } }
#include "blis.h" #include "complex_math.hpp" #include <vector> #include <array> #include <cassert> inline void increment(inc_t, gint_t) {} template <typename T, typename... Args> void increment(inc_t n, gint_t i, T& off, const inc_t* s, Args&... args) { off += s[i]*n; increment(n, i, args...); } template <typename Body, typename... Args> void for_each_impl(gint_t ndim, const dim_t* n, dim_t off, dim_t len, Body& body, Args&... args) { std::array<dim_t,8> i = {}; assert( ndim <= i.size() ); if ( off ) { for ( gint_t k = 0; k < ndim; k++ ) { i[k] = off % n[k]; off /= n[k]; increment(i[k], k, args...); } } for ( dim_t pos = 0; pos < len; pos++ ) { body(); for ( gint_t k = 0; k < ndim; k++ ) { if ( i[k] == n[k]-1 ) { increment(-i[k], k, args...); i[k] = 0; } else { increment(1, k, args...); i[k]++; break; } } } } template <typename T, typename Body> void for_each(gint_t ndim, const dim_t* n, dim_t off, dim_t len, T& a, const inc_t* s_a, Body&& body) { for_each_impl( ndim, n, off, len, body, a, s_a ); } template <typename T, typename Body> void for_each(gint_t ndim, const dim_t* n, dim_t off, dim_t len, T& a, const inc_t* s_a, T& b, const inc_t* s_b, Body&& body) { for_each_impl( ndim, n, off, len, body, a, s_a, b, s_b ); } template <typename T, typename Body> void for_each(gint_t ndim, const dim_t* n, T& a, const inc_t* s_a, Body&& body) { dim_t len = 1; for ( gint_t i = 0;i < ndim;i++ ) len *= n[i]; for_each_impl( ndim, n, 0, len, body, a, s_a ); } template <typename T, typename Body> void for_each(gint_t ndim, const dim_t* n, T& a, const inc_t* s_a, T& b, const inc_t* s_b, Body&& body) { dim_t len = 1; for ( gint_t i = 0;i < ndim;i++ ) len *= n[i]; for_each_impl( ndim, n, 0, len, body, a, s_a, b, s_b ); } void tcontract_ref( num_t dt, const std::vector<dim_t>& m, const std::vector<dim_t>& n, const std::vector<dim_t>& k, const void* alpha, const void* a, const std::vector<inc_t>& rs_a, const std::vector<inc_t>& cs_a, const void* b, const std::vector<inc_t>& rs_b, const std::vector<inc_t>& cs_b, const void* beta, void* c, const std::vector<inc_t>& rs_c, const std::vector<inc_t>& cs_c );
When it comes to battling prostitution and child exploitation, the sheriff for the nation's second-largest county is walking tall and carrying a big stick. That's why Illinois, Cook County Sheriff Thomas Dart and the operators of online classified portal Backpage.com are in a legal duel of sorts—and the First Amendment and federal law protecting website operators are squarely in the crosshairs. The latest public battle between them commenced July 1, when Dart announced that Visa and MasterCard, at his urging, agreed to stop processing ad payments for what the sheriff described as "sex trafficking industry profiteer Backpage.com." According to the sheriff: Such ads – millions of them posted a year – make up the foundation of a booming modern sex trafficking industry. It is a violent business that preys on the young and vulnerable, yet one that hides that reality behind a sense of normalcy created by sites like Backpage.com. Removing Visa and MasterCard from the business will raise the bar for pimps and traffickers who seek to place ads, help eliminate a financial incentive to host such ads, and remove the mask of normalcy that has fueled trafficking’s dangerous growth. American Express was previously an option for trafficking ad buyers, but that company removed its card as a payment option earlier in 2015. Backpage isn't gonna take it any longer. In a Tuesday federal lawsuit, Backpage says Dart's actions and the "credit card companies' acquiescence to his pressure have cut off nearly all revenue." We're not going to take it... The suit maintains that Dart's actions violate the Communications Decency Act because "ads are protected speech and that websites are immune from state-law civil or criminal liability" for what their users post. What's more, the site claims in its suit that Dart's actions amount to "an informal extralegal prior restraint of speech." According to the suit: (PDF) For over six years, Sheriff Dart has pursued a campaign against online classified advertising websites—first Craigslist and then Backpage.com—demanding they shut down portions of their sites for adult-oriented ads posted by users. At every turn, Sheriff Dart has been stymied by the Constitution, federal law, and court decisions holding that such ads are protected speech and that websites are immune from state-law civil or criminal liability. Craigslist, however, caved and removed its "adult services" section in 2010. Backpage says it uses automated and human filtering mechanisms that flag ads connected to illegal activity. The site said it "blocks or removes over a million ads per month" and reports to the National Center for Missing and Exploited Children "any ad that may concern child exploitation." The suit claims that during backroom meetings between the sheriff and Backpage, Dart had requested that the company require credit cards as the method of payment for adult ads—so the ad poster could be traced. That was something Backpage was already doing. When the sheriff talks, financial institutions listen It took just days for Visa and MasterCard to comply with Dart's letters to them requesting they cease doing business with Backpage. The letters, (PDF) on official sheriff's letterhead, "request that your institution immediately cease and desist from allowing your credit cards to be used to place ads on websites like Backpage.com, which we have objectively found to promote prostitution and facilitate online sex trafficking." The letter challenged the credit card companies to find an ad in the "Adult Escort" section "that is not selling sex." Dart wrote that the financial institutions "have the moral, social and legal right" to combat prostitution. Last year, Dart supported proposed legislation that would allow federal prosecutors to shut down sites like Backpage.
<gh_stars>1-10 /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.doplgangr.secrecy.Views; import android.content.Context; import android.graphics.Bitmap; import android.os.AsyncTask; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ArrayAdapter; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.TextView; import android.widget.ViewAnimator; import com.doplgangr.secrecy.CustomApp; import com.doplgangr.secrecy.FileSystem.File; import com.doplgangr.secrecy.Jobs.ThumbnailLoadJob; import com.doplgangr.secrecy.R; import com.ipaulpro.afilechooser.utils.FileUtils; import java.util.ArrayList; import java.util.Comparator; import de.greenrobot.event.EventBus; class FilesListAdapter extends ArrayAdapter<File> { // store the context (as an inflated layout) private final LayoutInflater inflater; // store the resource (typically file_item.xml) private final int resource; private final ArrayList<ViewNIndex> checked = new ArrayList<ViewNIndex>(); private boolean isGallery; // store (a reference to) the data private ArrayList<File> data = new ArrayList<File>(); public FilesListAdapter(Context context, int layout) { super(context, layout, new ArrayList<File>()); this.inflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE); this.resource = layout; this.isGallery = (layout == R.layout.gallery_item); if (!EventBus.getDefault().isRegistered(this)) EventBus.getDefault().register(this); } /** * Add data to data set. */ public void add(File file) { if (file == null) return; if (file.getName() == null) return; if (isGallery) { String mimeType = FileUtils.getMimeType(file.getFile()); if (mimeType != null) if (!mimeType.contains("image")) return; //abort if not images. } if (!data.contains(file)) data.add(file); notifyDataSetChanged(); } /** * Add data to data set. */ public void remove(int position) { data.remove(position); notifyDataSetChanged(); } /** * Return the size of the data set. */ public int getCount() { return this.data.size(); } /** * Return if index is in data set. */ public boolean hasIndex(int position) { return this.data.size() > position && position > -1; } /** * Return an object in the data set. */ public File getItem(int position) { return this.data.get(position); } /** * Return the position provided. */ public int getItemId(File file) { return data.indexOf(file); } /** * Return a generated view for a position. */ public void update(ArrayList<File> data) { this.data = data; checked.clear(); } public View getView(File file, View convertView, ViewGroup parent) { int position = data.indexOf(file); return getView(position, convertView, parent); } public View getView(int position, View convertView, ViewGroup parent) { // reuse a given view, or inflate a new one from the xml View view; if (convertView == null) { view = this.inflater.inflate(resource, parent, false); ViewHolder viewHolder = new ViewHolder(); viewHolder.name = (TextView) view.findViewById(R.id.name); viewHolder.type = (TextView) view.findViewById(R.id.type); viewHolder.size = (TextView) view.findViewById(R.id.size); viewHolder.date = (TextView) view.findViewById(R.id.date); viewHolder.thumbnail = (ImageView) view.findViewById(R.id.thumbNail); viewHolder.frame = (FrameLayout) view.findViewById(R.id.frame); viewHolder.animator = (ViewAnimator) view.findViewById(R.id.viewAnimator); viewHolder.selected = false; viewHolder.page = 0; view.setTag(viewHolder); } else { view = convertView; ViewHolder viewHolder = (ViewHolder) view.getTag(); viewHolder.selected = false; for (ViewNIndex obj : checked) if (obj.index == position) viewHolder.selected = true; } // bind the data to the view object return this.bindData(view, position); } /** * Bind the provided data to the view. * This is the only method not required by base adapter. */ View bindData(final View view, int position) { final ViewHolder viewHolder = (ViewHolder) view.getTag(); // make sure it's worth drawing the view if (position >= this.data.size()) // To prevent out of bound exception return view; if (this.data.get(position) == null) return view; // pull out the object final File file = this.data.get(position); if (viewHolder.name != null) viewHolder.name.setText(file.getName()); if (viewHolder.type != null) viewHolder.type.setText(file.getType()); if (viewHolder.size != null) viewHolder.size.setText(file.getSize()); if (viewHolder.date != null) viewHolder.date.setText(file.getTimestamp()); if (viewHolder.thumbnail != null) { viewHolder.thumbnail.setVisibility(View.GONE); viewHolder.thumbnail.setTag(file.getName()); } if (viewHolder.frame != null) viewHolder.frame.setForeground( viewHolder.selected ? getContext().getResources().getDrawable(R.drawable.file_selector) : null); if (viewHolder.animator != null) viewHolder.animator.setDisplayedChild(viewHolder.page); final int avatar_size = (int) CustomApp.context.getResources().getDimension(R.dimen.list_item_avatar_size); // This class is for binding thumbnail to UI class BindImageTask extends AsyncTask<File, Void, Bitmap> { protected Bitmap doInBackground(File... files) { if (isGallery) return files[0].getThumb(100); return files[0].getThumb(avatar_size); // async decrypt thumbnail } protected void onPostExecute(Bitmap thumbnail) { String name = (String) viewHolder.thumbnail.getTag(); if (name.equals(file.getName()) && (thumbnail != null) && (viewHolder.thumbnail != null)) { viewHolder.thumbnail.setImageBitmap(thumbnail); // bind thumbnail in UI thread viewHolder.thumbnail.setVisibility(View.VISIBLE); } } } new BindImageTask().execute(file); // return the final view object return view; } public void onEventMainThread(ThumbnailLoadJob.ThumbLoadDoneEvent event) { try { String name = (String) event.imageView.getTag(); if (name.equals(event.file.getName()) && (event.bitmap != null) && (event.imageView != null)) { event.imageView.setImageBitmap(event.bitmap); // bind thumbnail in UI thread event.imageView.setVisibility(View.VISIBLE); } } catch (OutOfMemoryError ignored) { } } public Boolean select(int position, View view) { ViewNIndex object = new ViewNIndex(position, view); for (ViewNIndex obj : checked) if (position == obj.index) { checked.remove(checked.indexOf(obj)); return false; } checked.add(object); return true; } public ArrayList<ViewNIndex> getSelected() { return checked; } public void clearSelected() { checked.clear(); } public void clear() { data.clear(); } public void sort() { this.sort(new Comparator<File>() { @Override public int compare(com.doplgangr.secrecy.FileSystem.File file, com.doplgangr.secrecy.FileSystem.File file2) { return file.getName().compareTo(file2.getName()); } }); notifyDataSetChanged(); } static class ViewHolder { public TextView name; public TextView type; public TextView size; public TextView date; public ImageView thumbnail; public FrameLayout frame; public Boolean selected; public ViewAnimator animator; public int page; } static class ViewNIndex { public Integer index; public View view; public ViewNIndex(Integer index, View view) { this.index = index; this.view = view; } } }
import time class ds18b20: """reads ds18b20 bus sensor data (temperature)""" def __init__(self, config): self.config = config self.s_path = '/sys/bus/w1/devices/%s/w1_slave' % (self.config['path']) def read(self, data): return [[data['ts'], self.config['id'], self.config['name'], self.read_temp()]] def read_raw(self): """reads the raw content of sensor "file" """ s_file = open(self.s_path, 'r') lines = s_file.readlines() s_file.close() return lines def read_temp(self): """converts raw content of sensor "file" to temp data as float """ try: lines = self.read_raw() while lines[0].strip()[-3:] != 'YES': time.sleep(0.2) lines = self.read_raw() temp_output = lines[1].find('t=') if temp_output != -1: temp_string = lines[1].strip()[temp_output + 2:] temp_c = float(temp_string) / 1000.0 return temp_c except IOError: return 'NO-DATA'
import { Component, OnInit, Output, EventEmitter } from '@angular/core'; import { MedsServices } from '~/meds-service'; import { NotificationService } from '~/notification-service'; import * as moment from 'moment'; @Component({ selector: 'ns-add-item', templateUrl: './add-item.component.html', styleUrls: ['./add-item.component.css'], moduleId: module.id, }) export class AddItemComponent implements OnInit { today:Date = new Date(); notificationRange:any=[{}]; date:any; time:any; isCreating:boolean = true; title:string = ''; description:string = ''; minDates:string =""; @Output() refresh: EventEmitter<object> = new EventEmitter<object>(); constructor( private medsService: MedsServices, private notificationService: NotificationService ) {} ngOnInit() { // initialize todays date and format in readable string this.minDates = this.formatMinDate(this.today) } onSubmit(title: string, description: string, date:any, time:any) { //extrapolate hours and minutes from time input let hour = time.getHours(); let minutes = time.getMinutes(); // initialize current notification time in readable string let humanHuour = hour + ':' + (minutes < 10 ? '0' : '') + minutes // create notification object and empty array where to store every related notification id let i let notificationIds = [] // calculate iteration number based on todays date and input date let num = Math.floor((Date.UTC(date.getFullYear(), date.getMonth(), date.getDate()) - Date.UTC(this.today.getFullYear(), this.today.getMonth(), this.today.getDate()) ) /(1000 * 60 * 60 * 24)); // create notification object for(i=0;i<=num;i++){ let a = { title:title, description:description, time:time, date: moment().add(i,"d").toDate(), id: Math.floor(Math.random()*5000), } // push notificationId to array notificationIds.push(a.id) // send notification to notification service this.notificationService.schedule(a) } // format input date in readable string date = this.formatDate(date) // create db object let x = { title, description, date: date, today: this.formatDate(new Date), time: humanHuour, created: +new Date, ids: notificationIds, repetition:num, } // add medication to database this.medsService.addNewMeds(x) // refresh layout this.refresh.emit(x); // aler the user notification sended! alert("New Medication added") // reset input field this.title = "" this.description = "" this.time = undefined this.date = undefined } formatDate(date) { var d = new Date(date), month = '' + (d.getMonth() + 1), day = '' + d.getDate(), year = d.getFullYear(); if (month.length < 2) month = '0' + month; if (day.length < 2) day = '0' + day; return [day,month,year ].join('-'); } formatMinDate(date) { var d = new Date(date), month = '' + (d.getMonth() + 1), day = '' + d.getDate(), year = d.getFullYear(); if (month.length < 2) month = '0' + month; if (day.length < 2) day = '0' + day; return [year,month,day ].join('/'); } // cancell all notification - Testing cancel(){ this.notificationService.cancelAll() } // get all notification - Testing get(){ this.notificationService.getAll() } }
def _init_subtokens_from_list(self, subtoken_strings, reserved_tokens=None): if reserved_tokens is None: reserved_tokens = [] if reserved_tokens: self._all_subtoken_strings = reserved_tokens + subtoken_strings else: self._all_subtoken_strings = subtoken_strings self._max_subtoken_len = max([len(s) for s in subtoken_strings]) self._subtoken_string_to_id = { s: i + len(reserved_tokens) for i, s in enumerate(subtoken_strings) if s } self._cache_size = 2 ** 20 self._cache = [(None, None)] * self._cache_size
import { ICreature } from "creature/ICreature"; import { IDoodad } from "doodad/IDoodad"; import { IPoint } from "Enums"; import { IContainer, IItem } from "item/IItem"; import BasePacket from "multiplayer/packets/BasePacket"; export default abstract class IndexedPacket extends BasePacket { private _index; private _nextIndex; protected resetIndexes(): void; protected readIndexedInt8(): number | undefined; protected writeIndexedInt8(value?: number): void; protected readIndexedUint8(): number | undefined; protected writeIndexedUint8(value?: number): void; protected readIndexedUint16(): number | undefined; protected writeIndexedUint16(value?: number): void; protected readIndexedUint32(): number | undefined; protected writeIndexedUint32(value?: number): void; protected readIndexedFloat64(): number | undefined; protected writeIndexedFloat64(value?: number): void; protected readIndexedBool(): boolean | undefined; protected writeIndexedBool(value?: boolean): void; protected readIndexedUint8Array(): Uint8Array | undefined; protected writeIndexedUint8Array(value?: Uint8Array): void; protected readIndexedUint32NumberArray(): number[] | undefined; protected writeIndexedUint32NumberArray(value?: number[]): void; protected readIndexedString(): string | undefined; protected writeIndexedString(value?: string): void; protected readIndexedStringArray(): string[] | undefined; protected writeIndexedStringArray(value?: string[]): void; protected readIndexedPoint(): IPoint | undefined; protected writeIndexedPoint(value?: IPoint): void; protected readIndexedContainer(): IContainer | undefined; protected writeIndexedContainer(value?: IContainer): void; protected readIndexedCreature(): ICreature | undefined; protected writeIndexedCreature(value?: ICreature): void; protected readIndexedDoodad(): IDoodad | undefined; protected writeIndexedDoodad(value?: IDoodad): void; protected readIndexedItem(): IItem | undefined; protected writeIndexedItem(value?: IItem): void; protected readIndexedItemOrDoodad(): IItem | IDoodad | undefined; protected writeIndexedItemOrDoodad(value?: IItem | IDoodad): void; protected readIndexedItems(): IItem[] | undefined; protected writeIndexedItems(value?: IItem[]): void; protected readIndexedObject(): any; protected writeIndexedObject(value?: any): void; private writeIndex(value); private readIndex(); }
# -*- coding: utf-8 -*- __author__ = "Haribo (<EMAIL>)" __license__ = "Apache 2.0" # standard library import asyncio from http import HTTPStatus from typing import TypeVar # scip plugin from eureka.client.discovery.discovery_client import EurekaTransport from eureka.client.discovery.eureka_client_config import EurekaClientConfig from eureka.client.discovery.shared.transport import EurekaTransportConfig from eureka.client.discovery.shared.transport.transport_client_factory import TransportClientFactory from eureka.utils.asyncio_utils import CoroutineScheduler from spring_cloud.utils.logging import getLogger DiscoveryClient = TypeVar("DiscoveryClient") class AsyncIOEurekaTransport(EurekaTransport): def __init__( self, discovery_client: DiscoveryClient, transport_client_factory: TransportClientFactory, eureka_transport_config: EurekaTransportConfig, ): super().__init__(discovery_client, transport_client_factory, eureka_transport_config) self._logger = getLogger("eureka.client.discovery.asyncio_eureka_transport") async def register(self): await asyncio.create_task(self._registration_task(), name="registration_task") async def _registration_task(self) -> bool: instance = self._discovery_client.application_info_manager.instance_info try: eureka_http_response = await self.registration_client.register(instance) if eureka_http_response and eureka_http_response.status_code == HTTPStatus.NO_CONTENT: return True else: self._logger.error(f"Instance {instance.instance_id}'s registration task failed") return False except asyncio.TimeoutError: self._logger.error(f"Timeout reached while registering {instance.instance_id}") async def refresh_local_registry(self): eureka_client_config = self._discovery_client.eureka_client_config coroutine_scheduler = CoroutineScheduler( float(eureka_client_config.registry_fetch_interval_in_secs), eureka_client_config.registry_cache_refresh_executor_exponential_back_off_bound, float(eureka_client_config.registry_fetch_interval_in_secs), eureka_client_config.registry_cache_refresh_executor_thread_pool_size, self._supervised_refresh_local_registry_task, eureka_client_config, self._discovery_client, ) await asyncio.create_task(coroutine_scheduler.start()) async def _supervised_refresh_local_registry_task( self, eureka_client_config: EurekaClientConfig, discovery_client: DiscoveryClient ): # Shuffle on temporary registry instead of client's cached registry to avoid # inconsistency in registry when the timeout error or cancelled error happened during shuffle. registry_received_from_eureka_server = None try: eureka_http_response = await asyncio.create_task( self.query_client.get_applications(), name="refresh_local_registry_task" ) if eureka_http_response and eureka_http_response.status_code == HTTPStatus.OK: if eureka_client_config.should_disable_delta: registry_received_from_eureka_server = eureka_http_response.entity registry_received_from_eureka_server.shuffle_instances( eureka_client_config.should_filter_only_up_instance ) except (asyncio.TimeoutError, asyncio.CancelledError): self._logger.error("Timeout reached while refreshing local registry") else: discovery_client.applications = registry_received_from_eureka_server async def send_heart_beat(self): # TODO heart-beat task pass async def unregister(self): instance = self._discovery_client.application_info_manager.instance_info try: eureka_http_response = await self.registration_client.cancel(instance) if not eureka_http_response or eureka_http_response.status_code != HTTPStatus.OK: self._logger.error(f"Instance {instance.instance_id}'s cancellation task failed") except asyncio.TimeoutError: self._logger.error(f"Timeout reached while cancelling instance {instance.instance_id}") async def shutdown(self): await asyncio.create_task(self.registration_client.shutdown()) await asyncio.create_task(self.query_client.shutdown()) # See https://docs.aiohttp.org/en/stable/client_advanced.html#graceful-shutdown await asyncio.sleep(0.250)
Photoredox-Catalyzed Carbonyl Alkylative Amination with Diazo Compounds: A Three-Component Reaction for the Construction of γ-Amino Acid Derivatives. A photoredox-catalyzed reaction of secondary amines, aldehydes, diazo compounds, and Hantzsch ester is reported, affording biologically active γ-amino acid derivatives in high yields. This one-pot process tolerates a broad range of functional groups and various drug molecules and biologically active compounds. Remarkably, a gram-scale reaction and diverse transformations of γ-amino acid derivatives were successfully performed, and the utility of the products is demonstrated in the synthesis of therapeutic agent pregabalin.
/** * Iteration over the three vectors, with a pause whenever we find a match * * On each stop, we store the iteration stat in the inout i,j,k * parameters, and return the currently matching passive refspec as * well as the head which we matched. */ static int next_head(const git_remote *remote, git_vector *refs, git_refspec **out_spec, git_remote_head **out_head, size_t *out_i, size_t *out_j, size_t *out_k) { const git_vector *active, *passive; git_remote_head *head; git_refspec *spec, *passive_spec; size_t i, j, k; int valid; active = &remote->active_refspecs; passive = &remote->passive_refspecs; i = *out_i; j = *out_j; k = *out_k; for (; i < refs->length; i++) { head = git_vector_get(refs, i); if (git_reference_name_is_valid(&valid, head->name) < 0) return -1; if (!valid) continue; for (; j < active->length; j++) { spec = git_vector_get(active, j); if (!git_refspec_src_matches(spec, head->name)) continue; for (; k < passive->length; k++) { passive_spec = git_vector_get(passive, k); if (!git_refspec_src_matches(passive_spec, head->name)) continue; *out_spec = passive_spec; *out_head = head; *out_i = i; *out_j = j; *out_k = k + 1; return 0; } k = 0; } j = 0; } return GIT_ITEROVER; }
import {ActionTypes, BaseModuleHandlers, action, mutation, LoadingState} from '@elux/vue-vuex-web'; import {CustomError} from '@/utils/errors'; import {CurUser, guest, api} from './entity'; export interface ModuleState { loading?: {global: LoadingState}; curUser: CurUser; } export class ModuleHandlers extends BaseModuleHandlers<ModuleState, {}> { constructor(moduleName: string, context: any) { // 向super传入moduleName和initState super(moduleName, context, {curUser: guest}); } @mutation public putCurUser(curUser: CurUser): void { this.state.curUser = curUser; } @action(null) protected async [ActionTypes.Error](error: CustomError): Promise<void> { if (!error.quiet) { // eslint-disable-next-line no-alert window.alert(error.message); } throw error; } @action(null) protected async ['this.Init'](): Promise<void> { const curUser = await api.getCurUser(); this.dispatch(this.actions.putCurUser(curUser)); } }
<filename>NetworkServiceProxy.framework/NPKeyBag.h /* Generated by RuntimeBrowser Image: /System/Library/PrivateFrameworks/NetworkServiceProxy.framework/NetworkServiceProxy */ @interface NPKeyBag : NSObject { unsigned int _error; unsigned int _generation; unsigned int _index; NSArray * _keys; double _lastUsedTimestamp; unsigned int _timestamp; NSData * _updateHash; } @property unsigned int error; @property unsigned int generation; @property unsigned int index; @property (retain) NSArray *keys; @property double lastUsedTimestamp; @property unsigned int timestamp; @property (retain) NSData *updateHash; - (void).cxx_destruct; - (id)data; - (unsigned int)error; - (unsigned int)generation; - (unsigned int)index; - (id)initWithData:(id)arg1; - (id)keys; - (double)lastUsedTimestamp; - (void)setError:(unsigned int)arg1; - (void)setGeneration:(unsigned int)arg1; - (void)setIndex:(unsigned int)arg1; - (void)setKeys:(id)arg1; - (void)setLastUsedTimestamp:(double)arg1; - (void)setTimestamp:(unsigned int)arg1; - (void)setUpdateHash:(id)arg1; - (unsigned int)timestamp; - (id)updateHash; @end
<reponame>amitkrout/gitops-operator /* Copyright 2021. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package controllers import ( "context" "fmt" argoapp "github.com/argoproj-labs/argocd-operator/pkg/apis/argoproj/v1alpha1" monitoringv1 "github.com/coreos/prometheus-operator/pkg/apis/monitoring/v1" "github.com/go-logr/logr" corev1 "k8s.io/api/core/v1" rbacv1 "k8s.io/api/rbac/v1" "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/types" "k8s.io/apimachinery/pkg/util/intstr" ctrl "sigs.k8s.io/controller-runtime" "sigs.k8s.io/controller-runtime/pkg/client" "sigs.k8s.io/controller-runtime/pkg/controller/controllerutil" logf "sigs.k8s.io/controller-runtime/pkg/log" "sigs.k8s.io/controller-runtime/pkg/reconcile" ) const ( readRoleNameFormat = "%s-read" readRoleBindingNameFormat = "%s-prometheus-k8s-read-binding" alertRuleName = "gitops-operator-argocd-alerts" ) type ArgoCDMetricsReconciler struct { // This client, initialized using mgr.Client() above, is a split client // that reads objects from the cache and writes to the apiserver Client client.Client Scheme *runtime.Scheme } // blank assignment to verify that ReconcileArgoCDRoute implements reconcile.Reconciler var _ reconcile.Reconciler = &ArgoCDMetricsReconciler{} // SetupWithManager sets up the controller with the Manager. func (r *ArgoCDMetricsReconciler) SetupWithManager(mgr ctrl.Manager) error { return ctrl.NewControllerManagedBy(mgr). For(&argoapp.ArgoCD{}). Complete(r) } //+kubebuilder:rbac:groups=monitoring.coreos.com,resources=prometheuses;prometheusrules;servicemonitors,verbs=* //+kubebuilder:rbac:groups=monitoring.coreos.com,resources=servicemonitors,verbs=create;get func (r *ArgoCDMetricsReconciler) Reconcile(request reconcile.Request) (reconcile.Result, error) { var logs = logf.Log.WithName("controller_argocd_metrics") reqLogger := logs.WithValues("Request.Namespace", request.Namespace, "Request.Name", request.Name) reqLogger.Info("Reconciling ArgoCD Metrics") namespace := corev1.Namespace{} err := r.Client.Get(context.TODO(), types.NamespacedName{Name: request.Namespace}, &namespace) if err != nil { if errors.IsNotFound(err) { // Namespace not found, could have been deleted after reconcile request. // Owned objects are automatically garbage collected. For additional cleanup logic use finalizers. // Return and don't requeue return reconcile.Result{}, nil } reqLogger.Error(err, "Error getting namespace", "Namespace", request.Namespace) return reconcile.Result{}, err } argocd := &argoapp.ArgoCD{} err = r.Client.Get(context.TODO(), types.NamespacedName{Name: request.Name, Namespace: request.Namespace}, argocd) if err != nil { if errors.IsNotFound(err) { // ArgoCD not found, could have been deleted after reconcile request. // Owned objects are automatically garbage collected. For additional cleanup logic use finalizers. // Return and don't requeue return reconcile.Result{}, nil } reqLogger.Error(err, "Error getting ArgoCD instsance") return reconcile.Result{}, err } const clusterMonitoringLabel = "openshift.io/cluster-monitoring" _, exists := namespace.Labels[clusterMonitoringLabel] if !exists { if namespace.Labels == nil { namespace.Labels = make(map[string]string) } namespace.Labels[clusterMonitoringLabel] = "true" err = r.Client.Update(context.TODO(), &namespace) if err != nil { reqLogger.Error(err, "Error updating namespace", "Namespace", namespace.Name) return reconcile.Result{}, err } } else { reqLogger.Info("Namespace already has cluster-monitoring label", "Namespace", namespace.Name) } // Create role to grant read permission to the openshift metrics stack err = r.createReadRoleIfAbsent(request.Namespace, argocd, reqLogger) if err != nil { return reconcile.Result{}, err } // Create role binding to grant read permission to the openshift metrics stack err = r.createReadRoleBindingIfAbsent(request.Namespace, argocd, reqLogger) if err != nil { return reconcile.Result{}, err } // Create ServiceMonitor for ArgoCD application metrics serviceMonitorLabel := fmt.Sprintf("%s-metrics", request.Name) serviceMonitorName := request.Name err = r.createServiceMonitorIfAbsent(request.Namespace, argocd, serviceMonitorName, serviceMonitorLabel, reqLogger) if err != nil { return reconcile.Result{}, err } // Create ServiceMonitor for ArgoCD API server metrics serviceMonitorLabel = fmt.Sprintf("%s-server-metrics", request.Name) serviceMonitorName = fmt.Sprintf("%s-server", request.Name) err = r.createServiceMonitorIfAbsent(request.Namespace, argocd, serviceMonitorName, serviceMonitorLabel, reqLogger) if err != nil { return reconcile.Result{}, err } // Create ServiceMonitor for ArgoCD repo server metrics serviceMonitorLabel = fmt.Sprintf("%s-repo-server", request.Name) serviceMonitorName = fmt.Sprintf("%s-repo-server", request.Name) err = r.createServiceMonitorIfAbsent(request.Namespace, argocd, serviceMonitorName, serviceMonitorLabel, reqLogger) if err != nil { return reconcile.Result{}, err } // Create alert rule err = r.createPrometheusRuleIfAbsent(request.Namespace, argocd, reqLogger) if err != nil { return reconcile.Result{}, err } return reconcile.Result{}, nil } func (r *ArgoCDMetricsReconciler) createReadRoleIfAbsent(namespace string, argocd *argoapp.ArgoCD, reqLogger logr.Logger) error { readRole := newReadRole(namespace) existingReadRole := &rbacv1.Role{} err := r.Client.Get(context.TODO(), types.NamespacedName{Name: readRole.Name, Namespace: readRole.Namespace}, existingReadRole) if err == nil { reqLogger.Info("Read role already exists", "Namespace", readRole.Namespace, "Name", readRole.Name) return nil } if errors.IsNotFound(err) { reqLogger.Info("Creating new read role", "Namespace", readRole.Namespace, "Name", readRole.Name) // Set the ArgoCD instance as the owner and controller if err := controllerutil.SetControllerReference(argocd, readRole, r.Scheme); err != nil { reqLogger.Error(err, "Error setting read role owner ref", "Namespace", readRole.Namespace, "Name", readRole.Name, "ArgoCD Name", argocd.Name) return err } err = r.Client.Create(context.TODO(), readRole) if err != nil { reqLogger.Error(err, "Error creating a new read role", "Namespace", readRole.Namespace, "Name", readRole.Name) return err } return nil } reqLogger.Info("Error querying for read role", "Name", readRole.Name, "Namespace", readRole.Namespace) return err } func (r *ArgoCDMetricsReconciler) createReadRoleBindingIfAbsent(namespace string, argocd *argoapp.ArgoCD, reqLogger logr.Logger) error { readRoleBinding := newReadRoleBinding(namespace) existingReadRoleBinding := &rbacv1.RoleBinding{} err := r.Client.Get(context.TODO(), types.NamespacedName{Name: readRoleBinding.Name, Namespace: readRoleBinding.Namespace}, existingReadRoleBinding) if err == nil { reqLogger.Info("Read role binding already exists", "Namespace", readRoleBinding.Namespace, "Name", readRoleBinding.Name) return nil } if errors.IsNotFound(err) { reqLogger.Info("Creating new read role binding", "Namespace", readRoleBinding.Namespace, "Name", readRoleBinding.Name) // Set the ArgoCD instance as the owner and controller if err := controllerutil.SetControllerReference(argocd, readRoleBinding, r.Scheme); err != nil { reqLogger.Error(err, "Error setting read role owner ref", "Namespace", readRoleBinding.Namespace, "Name", readRoleBinding.Name, "ArgoCD Name", argocd.Name) return err } err = r.Client.Create(context.TODO(), readRoleBinding) if err != nil { reqLogger.Error(err, "Error creating a new read role binding", "Namespace", readRoleBinding.Namespace, "Name", readRoleBinding.Name) return err } return nil } reqLogger.Error(err, "Error querying for read role binding", "Name", readRoleBinding.Name, "Namespace", readRoleBinding.Namespace) return err } func (r *ArgoCDMetricsReconciler) createServiceMonitorIfAbsent(namespace string, argocd *argoapp.ArgoCD, name, serviceMonitorLabel string, reqLogger logr.Logger) error { existingServiceMonitor := &monitoringv1.ServiceMonitor{} err := r.Client.Get(context.TODO(), types.NamespacedName{Name: name, Namespace: namespace}, existingServiceMonitor) if err == nil { reqLogger.Info("A ServiceMonitor instance already exists", "Namespace", existingServiceMonitor.Namespace, "Name", existingServiceMonitor.Name) return nil } if errors.IsNotFound(err) { serviceMonitor := newServiceMonitor(namespace, name, serviceMonitorLabel) reqLogger.Info("Creating a new ServiceMonitor instance", "Namespace", serviceMonitor.Namespace, "Name", serviceMonitor.Name) // Set the ArgoCD instance as the owner and controller if err := controllerutil.SetControllerReference(argocd, serviceMonitor, r.Scheme); err != nil { reqLogger.Error(err, "Error setting read role owner ref", "Namespace", serviceMonitor.Namespace, "Name", serviceMonitor.Name, "ArgoCD Name", argocd.Name) return err } err = r.Client.Create(context.TODO(), serviceMonitor) if err != nil { reqLogger.Error(err, "Error creating a new ServiceMonitor instance", "Namespace", serviceMonitor.Namespace, "Name", serviceMonitor.Name) return err } return nil } reqLogger.Error(err, "Error querying for ServiceMonitor", "Namespace", namespace, "Name", name) return err } func (r *ArgoCDMetricsReconciler) createPrometheusRuleIfAbsent(namespace string, argocd *argoapp.ArgoCD, reqLogger logr.Logger) error { alertRule := newPrometheusRule(namespace) existingAlertRule := &monitoringv1.PrometheusRule{} err := r.Client.Get(context.TODO(), types.NamespacedName{Name: alertRule.Name, Namespace: alertRule.Namespace}, existingAlertRule) if err == nil { reqLogger.Info("An alert rule instance already exists", "Namespace", existingAlertRule.Namespace, "Name", existingAlertRule.Name) return nil } if errors.IsNotFound(err) { reqLogger.Info("Creating new alert rule", "Namespace", alertRule.Namespace, "Name", alertRule.Name) // Set the ArgoCD instance as the owner and controller if err := controllerutil.SetControllerReference(argocd, alertRule, r.Scheme); err != nil { reqLogger.Error(err, "Error setting read role owner ref", "Namespace", alertRule.Namespace, "Name", alertRule.Name, "ArgoCD Name", argocd.Name) return err } err := r.Client.Create(context.TODO(), alertRule) if err != nil { reqLogger.Error(err, "Error creating a new alert rule", "Namespace", alertRule.Namespace, "Name", alertRule.Name) return err } return nil } reqLogger.Error(err, "Error querying for existing alert rule", "Namespace", namespace, "Name", alertRuleName) return err } func newReadRole(namespace string) *rbacv1.Role { objectMeta := metav1.ObjectMeta{ Name: fmt.Sprintf(readRoleNameFormat, namespace), Namespace: namespace, } rules := []rbacv1.PolicyRule{ { APIGroups: []string{""}, Resources: []string{"endpoints", "services", "pods"}, Verbs: []string{"get", "list", "watch"}, }, } return &rbacv1.Role{ ObjectMeta: objectMeta, Rules: rules, } } func newReadRoleBinding(namespace string) *rbacv1.RoleBinding { objectMeta := metav1.ObjectMeta{ Name: fmt.Sprintf(readRoleBindingNameFormat, namespace), Namespace: namespace, } roleRef := rbacv1.RoleRef{ APIGroup: "rbac.authorization.k8s.io", Kind: "Role", Name: fmt.Sprintf(readRoleNameFormat, namespace), } subjects := []rbacv1.Subject{ { Kind: "ServiceAccount", Name: "prometheus-k8s", Namespace: "openshift-monitoring", }, } return &rbacv1.RoleBinding{ ObjectMeta: objectMeta, RoleRef: roleRef, Subjects: subjects, } } func newServiceMonitor(namespace, name, matchLabel string) *monitoringv1.ServiceMonitor { objectMeta := metav1.ObjectMeta{ Name: name, Namespace: namespace, Labels: map[string]string{ "release": "prometheus-operator", }, } spec := monitoringv1.ServiceMonitorSpec{ Selector: metav1.LabelSelector{ MatchLabels: map[string]string{ "app.kubernetes.io/name": matchLabel, }, }, Endpoints: []monitoringv1.Endpoint{ { Port: "metrics", }, }, } return &monitoringv1.ServiceMonitor{ ObjectMeta: objectMeta, Spec: spec, } } func newPrometheusRule(namespace string) *monitoringv1.PrometheusRule { // The namespace used in the alert rule is not the namespace of the // running application, it is the namespace that the corresponding // ArgoCD application metadata was created in. This is needed to // scope this alert rule to only fire for applications managed // by the ArgoCD instance installed in this namespace. expr := fmt.Sprintf("argocd_app_info{namespace=\"%s\",sync_status=\"OutOfSync\"} > 0", namespace) objectMeta := metav1.ObjectMeta{ Name: alertRuleName, Namespace: namespace, } spec := monitoringv1.PrometheusRuleSpec{ Groups: []monitoringv1.RuleGroup{ { Name: "GitOpsOperatorArgoCD", Rules: []monitoringv1.Rule{ { Alert: "ArgoCDSyncAlert", Annotations: map[string]string{ "message": "ArgoCD application {{ $labels.name }} is out of sync", }, Expr: intstr.IntOrString{ Type: intstr.String, StrVal: expr, }, Labels: map[string]string{ "severity": "warning", }, }, }, }, }, } return &monitoringv1.PrometheusRule{ ObjectMeta: objectMeta, Spec: spec, } }
<filename>primer/chapter09/3.cpp /*! 构成迭代器范围的迭代器有何限制? 1. 它们指向同一个容器中的元素,或者是容器最后一个元素之后的位置。 2. 他们可以通过反复递增begin来到达end,换句话说,end不在begin之前。 */
Orca: A Language For Parallel Programming of Distributed Systems A detailed description is given of the Orca language design and the design choices are discussed. Orca is intended for applications programmers rather than systems programmers. This is reflected in its design goals to provide a simple, easy-to-use language that is type-secure and provides clean semantics. Three example parallel applications in Orca, one of which is described in detail, are discussed. One of the existing implementations, which is based on reliable broadcasting, is described. Performance measurements of this system are given for three parallel applications. The measurements show that significant speedups can be obtained for all three applications. The authors compare Orca with several related languages and systems. >
<reponame>SpeciesFileGroup/distinguish export const RANK_TYPES: Array<string> = [ 'otu', 'subspecies', 'species', 'subgenus', 'genus', 'subtribe', 'tribe', 'subfamily', 'family' ]
/** * Always returns true for "GET_ACCOUNTS", because we actually don't need this permission * to read accounts of this application */ public static boolean checkPermission(@NonNull Context context, PermissionType permissionType) { return allGranted || permissionType == PermissionType.GET_ACCOUNTS || ContextCompat.checkSelfPermission(context, permissionType.manifestPermission ) == PackageManager.PERMISSION_GRANTED; }
<gh_stars>0 // // BaseViewController.h // HappyShopping // // Created by Alien on 2018/10/23. // Copyright © 2018 alien. All rights reserved. // #import <UIKit/UIKit.h> NS_ASSUME_NONNULL_BEGIN @interface BaseViewController : UIViewController @property(nonatomic,strong)UIView *navBarView; /** 右边按钮文字 */ @property(nonatomic,strong)NSString *rightBarTitle; /** 左边button @param buttonBlock 具有UIBarButtonItem参数的block */ -(void)customNavViewLeftBarButtonItem:(void(^)(UIButton *itemButton))buttonBlock; /** 中间button */ -(void)customNavViewCenterButtonItem:(void(^)(UIButton *itemButton))buttonBlock; /** 中间view */ -(void)customNavViewCenterTitleView:(void(^)(UIView *titleView))titleViewBlock; /** 右边button */ -(void)customNavViewRightBarButtonItem:(void(^)(UIButton *itemButton))buttonBlock; /**显示分割线*/ -(void)showNavViewButtomLien; /**设置导航栏背景颜色*/ @property(nonatomic,strong)UIColor *navigationBGColor; @end NS_ASSUME_NONNULL_END
#Pye from sys import stdin, stdout from os import path if path.exists('inp.txt'): stdin = open("inp.txt", "r") q = int(stdin.readline()) for _ in range(q): x, y, a, b = map(int, stdin.readline().split()) print(int((y-x)/(a+b)) if not (y-x) % (a+b) else -1)
#import <UIKit/UIKit.h> #import <GoogleAnalytics/GAI.h> #import <GoogleAnalytics/GAIDictionaryBuilder.h> #import <GoogleAnalytics/GAIEcommerceFields.h> #import <GoogleAnalytics/GAIEcommerceProduct.h> #import <GoogleAnalytics/GAIEcommerceProductAction.h> #import <GoogleAnalytics/GAIEcommercePromotion.h> #import <GoogleAnalytics/GAIFields.h> #import <GoogleAnalytics/GAILogger.h> #import <GoogleAnalytics/GAITrackedViewController.h> #import <GoogleAnalytics/GAITracker.h> FOUNDATION_EXPORT double MPGoogleAnalyticsVersionNumber; FOUNDATION_EXPORT const unsigned char MPGoogleAnalyticsVersionString[];
import { both } from 'ramda'; import { selectorUnitCombinator } from '../style/define'; import { matchSelector } from '../style/match-selector'; import { isTag } from './is-tag'; import { traversalNode } from './traversal-node'; // 类似 querySelectorAll 的规则,找到所有符合条件的元素 export const getBySelector = (dom: INode, selectors: ISelector[]): ITagNode[] => { const len = selectors.length; const result: ITagNode[] = []; traversalNode<ITagNode>(both(isTag, matchSelector(selectors[len - 1])), node => { let i = len - 2; let currentNode: INode = node; while (i >= 0) { const matchI = matchSelector(selectors[i]); switch (selectors[i].combinator) { // 子选择器 case selectorUnitCombinator['>']: if (currentNode.parentNode) { if (!matchI(currentNode.parentNode)) { return; } currentNode = currentNode.parentNode; break; } return; // 相邻兄弟选择器 case selectorUnitCombinator['+']: if (currentNode.parentNode) { const brothers = (currentNode.parentNode as ITagNode).childNodes; const index = brothers.indexOf(currentNode); if (index <= 0 || !matchI(brothers[index - 1])) { return; } currentNode = brothers[index - 1]; break; } return; // 兄弟选择器 case selectorUnitCombinator['~']: if (currentNode.parentNode) { const _brothers = (currentNode.parentNode as ITagNode).childNodes; const _index = _brothers.indexOf(currentNode); if (_index <= 0) { return; } let _brother: INode | undefined; for (let bi = _index; bi--;) { _brother = _brothers[bi]; if (matchI(_brother)) { currentNode = _brother; break; } } if (currentNode !== _brother) { return; } break; } return; // 后代选择器 default: let parent = currentNode.parentNode; while (parent) { if (matchI(parent)) { currentNode = parent; break; } parent = parent.parentNode; } if (currentNode !== parent) { return; } break; } i--; } result.push(node); }, dom); return result; };