content
stringlengths
10
4.9M
/** * Watcher for the modules deployed to this container under the {@link Paths#JOB_DEPLOYMENTS} location. If the node * is deleted, this container will undeploy the module. */ class JobModuleWatcher implements CuratorWatcher { /** * {@inheritDoc} */ @Override public void process(WatchedEvent event) throws Exception { CuratorFramework client = zkConnection.getClient(); if (event.getType() == Watcher.Event.EventType.NodeDeleted) { JobDeploymentsPath jobDeploymentsPath = new JobDeploymentsPath(event.getPath()); String jobName = jobDeploymentsPath.getJobName(); String moduleLabel = jobDeploymentsPath.getModuleLabel(); String moduleSequence = jobDeploymentsPath.getModuleSequenceAsString(); undeployModule(jobName, ModuleType.job.toString(), moduleLabel); String deploymentPath = new ModuleDeploymentsPath() .setContainer(containerAttributes.getId()) .setDeploymentUnitName(jobName) .setModuleType(ModuleType.job.toString()) .setModuleLabel(moduleLabel) .setModuleSequence(moduleSequence).build(); try { if (client.checkExists().forPath(deploymentPath) != null) { logger.trace("Deleting path: {}", deploymentPath); client.delete().deletingChildrenIfNeeded().forPath(deploymentPath); } } catch (Exception e) { // it is common for a process shutdown to trigger this // event; therefore any exception thrown while attempting // to delete a deployment path will only be rethrown // if the client is in a connected/started state if (client.getState() == CuratorFrameworkState.STARTED) { throw ZooKeeperUtils.wrapThrowable(e); } } } else { logger.debug("Unexpected event {}, ZooKeeper state: {}", event.getType(), event.getState()); if (EnumSet.of(Watcher.Event.KeeperState.SyncConnected, Watcher.Event.KeeperState.SaslAuthenticated, Watcher.Event.KeeperState.ConnectedReadOnly).contains(event.getState())) { // this watcher is only interested in deletes for the purposes of undeploying modules; // if any other change occurs the watch needs to be reestablished try { client.getData().usingWatcher(this).forPath(event.getPath()); } catch (Exception e) { logger.error("Exception setting up watch for path '{}': {}; ZooKeeper state: {}", event.getPath(), e, zkConnection.getClient().getZookeeperClient().getZooKeeper().getState()); if (logger.isDebugEnabled()) { logger.debug("Full stack trace", e); } if (client.getState() == CuratorFrameworkState.STARTED) { throw ZooKeeperUtils.wrapThrowable(e); } } } } } }
/** * 05/29/2013 * PrimeIndex.java * 10001st prime * * @author Scott Wiedemann * */ public class PrimeIndex { public static void main(String[] args) { if (args.length != 1) { System.err.println("Incorrect number of arguments."); System.err.println("Usage: ./primeindex.jar N"); System.exit(1); } Long N = Long.parseLong(args[0]); System.out.println(PrimeGenerator.generateNPrimes(N).get(N.intValue() - 1)); } }
#pragma once class EntryPoint { public: EntryPoint(); ~EntryPoint(); };
def _get_num_batch_iter(self, model_inputs: Dict[str, Any], batch_size: int) -> int: num_of_sequences = model_inputs["input_ids"].shape[0] num_batch_iter = int(np.ceil(num_of_sequences / batch_size)) return num_batch_iter
The Buildup of Global Debt and the Emergence of a New Global Financial Crisis Many economic researchers and economic policy-makers are discussing an upcoming global financial crisis that will result in a long-term economic recession due to the accumulation of global debt that has reached record levels. However, the truth is that the crisis is still far from our economic reality because the debt crisis has been addressed in a number of countries since the crisis of the 1980s by the rescheduling and write-offs of debts, international cooperation, and other measures. Also, the United States is still able to manage its debts as long as the dollar is the global reserve currency, and it cannot easily be abandoned as such. In addition, the International Monetary Fund is prepared to manage debts and financial crises while at the same time monitoring economic indicators. It provides the necessary international liquidity to achieve global monetary and economic stability.
//--- // Use the internal j2k tile size if it's not the same as the image(one BIG tile). //--- ossim_uint32 ossimOpjJp2Reader::getImageTileWidth() const { ossim_uint32 result = 0; if ( (m_sizRecord.m_XTsiz - m_sizRecord.m_XTOsiz) < getNumberOfSamples(0) ) { result = m_sizRecord.m_XTsiz - m_sizRecord.m_XTOsiz; } return result; }
<commit_msg>Fix for pypy compatibility: must super's __init__ <commit_before> class BadRequestError(Exception): pass class ConflictError(Exception): pass class NotFoundError(Exception): def __init__(self, status_code=None): self.status_code = status_code <commit_after> class BadRequestError(Exception): pass class ConflictError(Exception): pass class NotFoundError(Exception): def __init__(self, status_code=None): super(NotFoundError, self).__init__() self.status_code = status_code
Social Impacts of Tourism on Central Florida. Ady Milman and Abraham Pizam. Annals of Tourism Research, vol. 15, no. 2, 1988, pp. 191-204. Pergamon Press, Inc., Fairview Park, Elmsford, New York 10523. DM255.00 annual subscription Day Trips and Their Impacts. AIEST, Case postale 2728, CH-3001 Berne, Switzerland. 1988. 180p. Because the significance of day trips from the economic, social and ecologic points of view is generally underestimated and not recognized in its total effects, AIE ST adopted this important touristic development as its general theme for the 3 8th Congress. In addition to an introduction to the Congress theme by Dr. Claude Kaspar, 12 basic and special reports on day trips are included in this volume.
/** * Interpolate. * * @param xValues the values * @param yValues the values * @param xMatrix the matrix * @return Values of the underlying cubic spline function at the values of x */ public DoubleMatrix interpolate(final double[] xValues, final double[] yValues, final double[][] xMatrix) { ArgChecker.notNull(xMatrix, "xMatrix"); DoubleMatrix matrix = DoubleMatrix.copyOf(xMatrix); return DoubleMatrix.ofArrayObjects( xMatrix.length, xMatrix[0].length, i -> interpolate(xValues, yValues, matrix.rowArray(i))); }
// Check for header file extension. // If the file extension is .h, .inc, or missing, it's // assumed to be a header. // \param FileName The file name. Must not be a directory. // \returns true if it has a header extension or no extension. bool ModularizeUtilities::isHeader(StringRef FileName) { StringRef Extension = llvm::sys::path::extension(FileName); if (Extension.size() == 0) return true; if (Extension.equals_insensitive(".h")) return true; if (Extension.equals_insensitive(".inc")) return true; return false; }
/** * Created by Cantekin on 8.1.2017. */ public class ListAdapter extends _baseListAdapter { private final String TAG = "ListAdapter"; private FragmentTransaction frgmTra; private baseProperties formProperties; public ListAdapter(Context context, IMyList view, FragmentTransaction frgmTra, int resource, List<Map<String, Object>> objects, baseProperties formProperties) { super(context, resource, objects); this.view = view; this.frgmTra = frgmTra; this.formProperties = formProperties; } @Override public View getView(final int position, final View convertView, ViewGroup parent) { View v = convertView; if (v == null) { LayoutInflater vi; vi = LayoutInflater.from(getContext()); v = vi.inflate(R.layout.row_data_list, null); } final Map<String, Object> o = getItem(position); if (o != null) { LinearLayout row = (LinearLayout) v.findViewById(R.id.listRow); TextView large = (TextView) v.findViewById(R.id.rowLargeText); TextView small1 = (TextView) v.findViewById(R.id.rowSmalTextLeft); TextView small2 = (TextView) v.findViewById(R.id.rowSmallTextRightFirst); TextView small3 = (TextView) v.findViewById(R.id.rowSmallTextSecond); TextView[] listText = new TextView[]{large, small1, small2, small3}; //TODO: list gösterim değişmeli List<Object> list = new ArrayList<>(o.values()); for (int i = 0; i < list.size(); i++) { prepairText(listText[i], list.get(i)); } final Object value = o.get("Id"); //satıra tıklama row.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { baseProperties newProp = MyPreference.getPreference(getContext()).getData(formProperties.getEditLink(), baseProperties.class); if (newProp != null) { newProp.setParentFieldId(formProperties.getParentFieldId()); newProp.setRecordId(value); CustomLogger.info(TAG, "ID==>" + String.valueOf(value)); ((MainActivity) getContext()) .showFragment(FragmentFactory.getFragment(newProp.getFormType()) .setProp(newProp)); } else { CustomLogger.alert(TAG, "Form Adı:" +formProperties.getEditLink()); ((MainActivity) getContext()).showMessage("Form bulunamadı"); } } }); } return v; } }
<reponame>chendatony31/web3uikit<gh_stars>0 import React, { useState, useEffect } from 'react'; import color from '../../styles/colors'; import { Icon } from '../Icon'; import { iconTypes } from '../Icon'; import { Illustration } from '../Illustrations'; import SelectStyles from './Select.styles'; import type { SelectProps } from './types'; import { DivWrapperStyled, LabelStyled as LabelStyledTrad, } from '../Input/Input.styles'; const { DivStyledWrapper, DropDownIcon, ErrorLabel, LabelStyled, NoDataTextStyled, Option, Options, PrefixIcon, PrefixSpan, SelectedItem, SelectStyled, } = SelectStyles; const Select: React.FC<SelectProps> = ({ customNoDataText = 'No Data', defaultOptionIndex, disabled = false, errorMessage = '', id = String(Date.now()), label, onChange, onChangeTraditional, options = [], prefixText, state = disabled ? 'disabled' : undefined, style, traditionalHTML5 = false, validation, value, width = '200px', }: SelectProps) => { const [isOpen, setIsOpen] = useState(false); const [selectedOptionIndex, setSelectedOptionIndex] = useState(defaultOptionIndex); const toggling = (event: React.MouseEvent<HTMLDivElement>) => { if (disabled) return; setIsOpen(!isOpen); event.stopPropagation(); }; const onOptionClicked = (selectedIndex: number) => () => { setSelectedOptionIndex(selectedIndex); setIsOpen(false); if (onChange) { onChange(options[selectedIndex]); } }; useEffect(() => { const handleClickOutside = () => { setIsOpen(false); }; document.addEventListener('click', handleClickOutside); return () => { document.removeEventListener('click', handleClickOutside); }; }, []); useEffect(() => { if (value) { const valueOptionItem = options.find( (optionItem) => optionItem.id == value, ); setSelectedOptionIndex( valueOptionItem ? options.indexOf(valueOptionItem) : 0, ); } }, [selectedOptionIndex, value]); const renderFancySelectMode = () => ( <DivStyledWrapper aria-label="select" data-testid="test-wrapper" id={id} state={state} style={{ ...style, width }} > <SelectedItem data-testid="test-selected" state={state} onClick={toggling} aria-label="option-selected" > {typeof selectedOptionIndex !== 'undefined' && ( <> {prefixText && <PrefixSpan>{prefixText}</PrefixSpan>} {options[selectedOptionIndex]?.prefix && ( <PrefixIcon> {options[selectedOptionIndex]?.prefix} </PrefixIcon> )} {options[selectedOptionIndex]?.label} </> )} <DropDownIcon> <Icon svg={ isOpen ? iconTypes.triangleUp : iconTypes.triangleDown } fill={color.grey} /> </DropDownIcon> </SelectedItem> {label && ( <LabelStyled data-testid="test-label" htmlFor={id} hasSelectedIndex={ typeof selectedOptionIndex !== 'undefined' } > {label} </LabelStyled> )} {isOpen && ( <Options aria-label="select-options" data-testid="test-options"> {options?.length ? ( options.map( (option, index) => index !== selectedOptionIndex && ( <Option onClick={onOptionClicked(index)} key={option?.label} data-testid="test-option" aria-label="select-option" > <PrefixIcon> {option?.prefix} </PrefixIcon> {option?.label} </Option> ), ) ) : ( <> <Illustration logo="servers" width="100%" height="60px" /> <NoDataTextStyled> {customNoDataText} </NoDataTextStyled> </> )} </Options> )} {errorMessage && <ErrorLabel>{errorMessage}</ErrorLabel>} </DivStyledWrapper> ); const renderTraditionalSelect = () => ( <DivWrapperStyled className="input_filled" style={{ ...style, width }}> <SelectStyled defaultValue="Please choose" id={id} onChange={(event: React.ChangeEvent<HTMLSelectElement>) => onChangeTraditional && onChangeTraditional(event) } required={validation?.required} > <option disabled>Please choose</option> {options.map( (option, index) => index !== selectedOptionIndex && ( <option key={option?.id} id={String(option?.id)}> {option?.label} </option> ), )} </SelectStyled> {label && ( <LabelStyledTrad hasPrefix={false} htmlFor={id}> {label} </LabelStyledTrad> )} </DivWrapperStyled> ); return traditionalHTML5 ? renderTraditionalSelect() : renderFancySelectMode(); }; export default Select;
import java.util.Scanner; public class one { public static void main(String[] ars) { Scanner scan = new Scanner(System.in); int i,j=0,k=0; String a; k = scan.nextInt(); a = scan.next(); String[] aa = a.split(""); for(i=1;i<aa.length;i++) { if(aa[i].equals(aa[i-1])) j++; } System.out.println(j); } }
BCPP: An Intelligent Prediction System of Breast Cancer Prognosis Using Microarray and Clinical Data Background: The diagnosis of cancer in most cases depends on a complex combination of clinical and histopathological data. Because of this complexity, there exists a significant amount of interest among clinical professionals and researchers regarding the efficient and accurate prediction of breast cancers. Results: In this paper, we develop a breast cancer prognosis predict system that can assist medical professionals in predicting breast cancer prognosis status based on the clinical data of patients. Our approaches include three steps. Firstly, we select genes based on statistics methodologies. Secondly, we develop three artificial neural network algorithms and four kernel functions of support vector machine for classifying breast cancers based on either clinical features or microarray gene expression data. The results are extremely good; both ANN and SVM have near perfect performance (99 – 100%) for either clinical or microarray data. Finally, we develop a user-friendly breast cancer prognosis predict (BCPP) system that generates prediction results using either support vector machine (SVM) or artificial neural network (ANN) techniques. Conclusions: Our approaches are effective in predicting the prognosis of a patient because of the very high accuracy of the results. The BCPP system developed in this study is a novel approach that can be used in the classification of breast cancer.
package com.example.myapplication1.slice; import ohos.aafwk.ability.AbilitySlice; import ohos.aafwk.content.Intent; import ohos.agp.colors.RgbColor; import ohos.agp.components.DependentLayout; import ohos.agp.components.Text; import ohos.agp.components.element.ShapeElement; import ohos.agp.utils.Color; import ohos.agp.components.DependentLayout.LayoutConfig; public class SecondAbilitySlice extends AbilitySlice { @Override public void onStart(Intent intent) { super.onStart(intent); // 声明布局 DependentLayout myLayout = new DependentLayout(this); // 设置布局宽高 myLayout.setWidth(LayoutConfig.MATCH_PARENT); myLayout.setHeight(LayoutConfig.MATCH_PARENT); // 设置布局背景为白色 ShapeElement background = new ShapeElement(); background.setRgbColor(new RgbColor(255,255,255)); myLayout.setBackground(background); // 创建一个文本 Text text = new Text(this); text.setText("跳过来干嘛?"); text.setWidth(LayoutConfig.MATCH_PARENT); text.setTextSize(100); text.setTextColor(Color.BLACK); // 设置文本的布局 DependentLayout.LayoutConfig textConfig = new DependentLayout.LayoutConfig(LayoutConfig.MATCH_CONTENT, LayoutConfig.MATCH_CONTENT); textConfig.addRule(LayoutConfig.CENTER_IN_PARENT); text.setLayoutConfig(textConfig); myLayout.addComponent(text); super.setUIContent(myLayout); } }
A Generalized Approach to Dantzig-Wolfe Decomposition for Concave Programs The Dantzig-Wolfe decomposition procedure for concave programs can be viewed as inner linearization followed by restriction. This paper presents an approach that allows selective inner linearization of functions and utilizes a generalized pricing problem. It allows considerable flexibility in the choice of functions to be approximated using inner linearization. Use of the generalized pricing problem guarantees that strict improvement in the value of the objective function is achieved at each iteration and provides a termination criterion that is both necessary and sufficient for optimality. A specialization of this procedure results in an algorithm that is a direct extension of the Dantzig-Wolfe decomposition algorithm.
// Run applies a specific target configuration func (t *Target) Run(source string, o *Options) (changed bool, err error) { var pr scm.PullRequest if len(t.Transformers) > 0 { source, err = t.Transformers.Apply(source) if err != nil { logrus.Error(err) return false, err } } if len(t.Prefix) > 0 { logrus.Warnf("Key 'prefix' deprecated in favor of 'transformers', it will be delete in a future release") } if len(t.Postfix) > 0 { logrus.Warnf("Key 'postfix' deprecated in favor of 'transformers', it will be delete in a future release") } if o.DryRun { logrus.Infof("\n**Dry Run enabled**\n\n") } spec, err := Unmarshal(t) if err != nil { return false, err } if err != nil { return false, err } if len(t.Scm) > 0 { var message string var files []string var s scm.Scm _, err := t.Check() if err != nil { return false, err } s, pr, err = scm.Unmarshal(t.Scm) if err != nil { return false, err } err = s.Init(source, t.PipelineID) if err != nil { return false, err } err = s.Checkout() if err != nil { return false, err } changed, files, message, err = spec.TargetFromSCM(t.Prefix+source+t.Postfix, s, o.DryRun) if err != nil { return changed, err } if changed && !o.DryRun { if message == "" { return changed, fmt.Errorf("Target has no change message") } if len(t.Scm) > 0 { if len(files) == 0 { logrus.Info("no changed files to commit") return changed, nil } if o.Commit { err := s.Add(files) if err != nil { return changed, err } err = s.Commit(message) if err != nil { return changed, err } } if o.Push { err := s.Push() if err != nil { return changed, err } } } } if pr != nil && !o.DryRun && o.Push { ID, err := pr.IsPullRequest() logrus.Debugf("Pull Request ID: %v", ID) if err != nil { return changed, err } else if len(ID) == 0 && err == nil && changed { logrus.Infof("Creating Pull Request\n") err = pr.OpenPullRequest() if err != nil { return changed, err } } else if len(ID) != 0 && err == nil { logrus.Infof("Pull Request already exist, updating it\n") err = pr.UpdatePullRequest(ID) if err != nil { return changed, err } } else if len(ID) == 0 && err == nil && !changed { } else { logrus.Errorf("Something unexpected happened while dealing with Pull Request") } } } else if len(t.Scm) == 0 { changed, err = spec.Target(t.Prefix+source+t.Postfix, o.DryRun) if err != nil { return changed, err } } return changed, nil }
def __connectable_facing(port1: "DevicePort",port2: "DevicePort", rad: float = 3): dx = port2.x0-port1.x0 dy = port2.y0-port1.y0 if(port1.dx()!=0): if(abs(dy)<2*rad): dxsign = 1 if(abs(dx)!=0): dxsign = dx/abs(dx) if(port1.dx()+port2.dx()==0 and dxsign==port1.dx()): if(abs(dy)<1e-3): return True,[["S",abs(dx)]] else: slen = (abs(dx)-2*rad)/2 if(slen<0): return True,[["C",port1.dx()*dy,abs(dx)/2]] else: return True,[["S",slen],["C",port1.dx()*dy,rad],["S",slen]] return False, [] else: if(abs(dx)<2*rad): dysign = 1 if(abs(dy)!=0): dysign = dy/abs(dy) if(port1.dy()+port2.dy()==0 and dysign==port1.dy()): if(abs(dx)<1e-3): return True,[["S",abs(dy)]] else: slen = (abs(dy)-2*rad)/2 if(slen<0): return True,[["C",-port1.dy()*dx,abs(dy)/2]] else: return True,[["S",slen],["C",-port1.dy()*dx,rad],["S",slen]] return False, []
def _assign_contract_solcs_range(self) -> Tuple[semver.VersionInfo, semver.VersionInfo]: with open(self.path, encoding='utf-8') as f: lines = f.readlines() min_version = semver.VersionInfo.parse('0.0.0') max_version = semver.VersionInfo.parse('99.99.99') for line in lines: version_regex = r'(?:<|>|<=|>=|==?|!=|\^)?\d\.\d+\.\d+\b' if re.search(r'pragma\s+solidity\s+' + version_regex, line): comparators = re.findall(version_regex, line) for comparator in comparators: version = semver.VersionInfo.parse(re.findall(r'\d\.\d+\.\d+\b', comparator)[0]) if re.search(r'<=', comparator): max_version = min(version, max_version) elif re.search(r'>=', comparator): min_version = max(version, min_version) elif re.search(r'<', comparator): if version.patch == 0: max_version = min(version.replace(minor=version.minor - 1, patch=99), max_version) else: max_version = min(version.replace(patch=version.patch - 1), max_version) elif re.search(r'>', comparator): if version.patch == 99: min_version = max(version.replace(minor=version.minor + 1, patch=0), min_version) else: min_version = max(version.replace(patch=version.patch + 1), min_version) elif re.search(r'!=', comparator): raise NotImplementedError('pragma version directive "!=" is not supported!') elif re.search(r'\^', comparator): min_version = max(version, min_version) max_version = min(version.replace(patch=99), max_version) else: min_version = max(version, min_version) max_version = min(version, max_version) if min_version > max_version: raise ValueError( f'{self}: Pragma directive is either invalid or there are several pragma directives but no solidity compiler version satisfying every directive.') self.solc_from = str(min_version) self.solc_to = str(max_version)
export interface Messages { /** * Note: This is a Primary Key.<pk/> */ id?: string; content?: string; /** * Note: This is a Foreign Key to `conversation.id`.<fk table='conversation' column='id'/> */ conversationId?: string; from?: string; to?: string; }
Pre-edited and more complete version of my summary of the Paris Agreement published in Nature’s World View (Dec. 2015): http://www.nature.com/polopoly_fs/1.19074!/menu/main/topColumns/topLeftColumn/pdf/528437a.pdf A two-sided print format of this post is available at: Paris Summary 2015 The Paris Agreement is a genuine triumph of international diplomacy and of how the French people brought an often-fractious world together to see beyond national self interest. Moreover, the agreement is testament to how assiduous and painstaking science ultimately defeated the unremitting programme of misinformation by powerful vested interests. It is the twenty-first century’s equivalent to the success of Heliocentrism over the malign and unscientific inquisition. The international community not only acknowledged the seriousness of climate change, but demonstrated sufficient unanimity to quantitatively define it: to hold “the increase in … temperature to well below 2°C … and to pursue efforts to limit the temperature increase to 1.5°C”. But, as the time-weary idiom suggests, “the devil is in the detail” – or perhaps more importantly, the lack of it. So how then can such an unprecedented and momentous Agreement have potentially sown the seeds of its own demise? Likewise, why did some amongst the senior echelons of the climate change community see fit to unleash their rottweilers on those scientists voicing legitimate concern as to the evolving detail of the Agreement? The deepest challenge to whether the Agreement succeeds or fails, will not come from the incessant sniping of sceptics and luke-warmers or those politicians favouring a literal reading of Genesis over Darwin. Instead, it was set in train many years ago by a cadre of well-meaning scientists, engineers and economists investigating a Plan B. What if the international community fails to recognise that temperatures relate to ongoing cumulative emissions of greenhouse gases, particularly carbon dioxide? What if world leaders remain doggedly committed to a scientifically illiterate focus on 2050 (“not in my term of office”)? By then, any ‘carbon budget’ for even an outside chance of 2°C will have been squandered – and our global experiment will be hurtling towards 4°C or more. Hence the need to develop a Plan B. Well the answer was simple. If we choose to continue our love affair with oil, coal and gas, loading the atmosphere with evermore carbon dioxide, then at some later date when sense prevails, we’ll be forced to attempt sucking our carbon back out of the atmosphere. Whilst a plethora of exotic Dr Strangelove options vie for supremacy to deliver on such a grand project, those with the ear of governments have plumped for BECCS (biomass energy carbon capture and storage) as the most promising “negative emission technology”. However these government advisors (Integrated Assessment Modellers – clever folk developing ‘cost-optimised’ solutions to 2°C by combining physics with economic and behavioural modelling) no longer see negative emission technologies as a last ditch Plan B – but rather now promote it as central pivot of the one and only Plan. So what exactly does BECCS entail? Apportioning huge swathes of the planet’s landmass to the growing of bio-energy crops (from trees to tall grasses) – which, as they grow, absorb carbon dioxide through photosynthesis. Periodically these crops are harvested; processed for worldwide travel; shipped all around the globe and finally combusted in thermal powerstations. The carbon dioxide is then stripped from the waste gases; compressed (almost to a liquid); pumped through large pipes over potentially very long distances; and finally stored deep underground in various geological formations (from exhausted oil and gas reservoirs through to saline aquifers) for a millennium or so. The unquestioned reliance on negative emission technologies to deliver on the Paris goals is the greatest threat to the Agreement. Yet BECCS, or even negative emission technologies, received no direct reference throughout the thirty-two-page Paris Agreement. Despite this, the framing of the 2°C and (even more) the 1.5°C, goals, is fundamentally premised on the massive uptake of BECCS sometime in the latter half of the century. Disturbingly, this reliance on BECCS is also the case for most of the temperature estimates (e.g. 2.7°C) ascribed to the national pledges (INDCs) prior to the Paris COP. The sheer scale of the BECCS assumption underpinning the Agreement is breath taking – decades of ongoing planting and harvesting of energy crops over an area the size of one to three times that of India. At the same time the aviation industry anticipates fuelling its planes with bio-fuel, the shipping industry is seriously considering biomass to power its ships and the chemical sector sees biomass as a potential feedstock. And then there are 9 billion or so human mouths to feed. Surely this critical assumption deserved serious attention within the Agreement? Relying on the promise of industrial scale negative emission technologies to balance our carbon budget was not the only option available to Paris – at least in relation to 2°C. With CO2 emissions in 2015 over 60% higher than at the time of the first IPCC report in 1990, the carbon budget for 1.5°C has been all but eliminated. However, reducing emissions in line with 2°C does remain a viable goal – just. But rather than rely on tenuous post-2050 BECCS, this alternative approach begs immediate and profound political, economic and social questions; questions that undermine a decade of mathematically nebulous green-growth and win-win rhetoric. Not surprisingly this alluring rhetoric has been embraced by many of those in positions of power; all the more so as it has been promulgated by two influential groups. First, those, typically but not exclusively economists, who work on the premise that physical reality and the laws of thermodynamics are subservient to the ephemeral rules of today’s economic paradigm. And second, those vested interests desperate to preserve the status quo, but prepared to accept an incremental tweak to ‘business as usual’ as a sop to meaningful action (e.g. the opportunist enthusiasm of ‘progressive’ oil companies for “oh-so-clean” gas over “dirty & nasty” coal). But move away from the cosy tenets of contemporary economics and a suite of alternative opportunities for delivering the deep and early reductions in emissions necessary to stay within 2°C budgets come into focus. Demand-side technologies, behaviours and habits all are amenable to significant and rapid change – and guided by stringent policies could drive emissions down in the near-term. Combine this with an understanding that just 10% of the global population are responsible for around 50% of total emissions and the rate and scope of what is possible if we genuinely thought climate change was an important issue becomes evident. Imagine the Paris 2°C goal was sacrosanct. A 30% reduction in global emissions could be delivered in under a year, simply by constraining the emissions of that 10% responsible for half of all global CO2 to the level of a typical European. Clearly such a level is far from impoverished, and certainly for 2°C reductions in energy demand would need to go much further and be complemented with a Marshall-style transition to zero-carbon energy supply. Nevertheless, such an early and sizeable reduction is in stark contrast to the Paris Agreement’s presumption that ‘ambitious mitigation’ out to 2030 can only deliver around 2% p.a. (with negative emissions technologies in 2050 compensating for the relative inaction today). So why was this real opportunity for deep and early mitigation muscled out by the economic bouncers in Paris? No doubt there are many elaborate and nuanced explanations – but the headline reason is simple. In true Orwellian style, the political and economic dogma that has come to pervade all facets of society must not be questioned. For many years having the audacity to suggest that the carbon budgets associated with 2°C cannot be reconciled with green growth oratory have been quashed by those eloquent big guns of academia who spend more time in government minister’s offices than they do in the laboratory or lecture room. However, as the various drafts of the Paris Agreement were circulated during the negotiations, there was a real sense of unease amongst many scientists present that the almost euphoric atmosphere accompanying the drafts could not be reconciled with their content. Desperate to maintain order the rotwieillers and even their influential handlers threatened and briefed against those daring to make informed comment – just look at some of the twitter discussions! Not surprisingly the vested interests won out – and whilst the headline goals of the Paris Agreement are to be welcomed, the five year review timeframe eliminates any serious chance of maintaining emissions within even carbon budgets for a slim chance of 2°C. Science and careful analysis could have offered so much more – but instead we are left having to pray that speculative negative emission technologies will compensate for our own hubris. Two further and key failures of the Paris Agreement. Aviation and Shipping: the final version of the Agreement fails to make any reference to the aviation and shipping sectors, effectively exempting them from having to align their emissions with the 2°C goal. Unfortunately, the emissions from these two privileged sectors are equivalent to those of the UK and Germany combined. Moreover, both aviation and shipping anticipate huge increases in their absolute emissions as the sectors continue to grow – emissions that will only serve to further jeopardise any prospect for bequeathing future generations a stable climate. Reparation for the poor: finally, there’s the sum of $100 billion that the Paris Agreement proposes should be available as annual support (I prefer reparation) to poorer nations to assist both their development of low-carbon infrastructure and their adaption to an increasingly changing climate. Say it quickly and $100 billion has a resounding ring – but wait a few seconds and the echo diminishes to a cheap and tinny ‘ching’. The normally very conservative international monetary fund (IMF) estimates that the global subsidy (direct and indirect) to the fossil fuel industry in 2015 alone will be $5.3 trillion dollars; fifty three times more than the Paris monies allocated to poorer nations. The UK is a small island nation on the periphery of Europe and with a population of 65 million. Yet it has an economy twenty nine times larger than the monies offered to billions of poorer people to leapfrog our high carbon energy system and adapt to the changing climate we’ve chosen to impose on them. The clever deception of the wealthier and high emitting nations in Paris, was to focus arguments on the details of the $100 billion crumb, circumventing any meaningful discussion of the much larger level of reparations necessary for the poorer nations to actually transition towards a low carbon, climate resilient and prosperous future. Tentative reflections a fortnight on Here we are a fortnight or so on from Paris – and the dust has all but settled. Turn on the radio and the BBC is reporting on whether the UK should expand its London airport capacity at Gatwick or Heathrow. No reference to Paris, CO2 emissions or the plight of millions who will suffer the consequences of such decisions, but will only ever see aircraft streaking across the sky 35000 feet above. Next up, the BBC reports on how the UK’s Department of Energy and Climate Change, its Chief Scientific Advisor and the UK’s Environment Agency all enthusiastically support the development of indigenous shale gas – and yet all forget to mention that the UK Government has just reneged on its support for carbon capture and storage. Another high-carbon energy source at odds with Paris and 2°C carbon budgets is simply added to UK’s portfolio of North Sea oil and gas without even a squirm of unease from those authorities who should know better. So where are we now? Future techno-utopias, pennies for the poor, more fossil fuels, co-opted NGOs and an expert community all too often silenced by fear of reprisals and reduced funding. It doesn’t need to be like this. Forget the vacuous content, it’s the wonderful spirit of the Paris Agreement and the French people on which we need to build – and fast! The pursuit of a low-carbon future could do much worse than be guided by the open concepts of liberté, égalité et fraternité.
<reponame>efeslab/bugbase<gh_stars>1-10 #!/usr/bin/env python3 # coding=utf-8 """ Integration tests for the complete framework This file automatically discovers all Compiler subclasses in 'plugins/${package}/tests/*' and runs all programs against them and their declared plugins concurrently. """ from contextlib import suppress import importlib import logging import multiprocessing import os import shutil import time import unittest from install import main from lib import get_subclasses from lib.configuration import dependencies from lib.constants import PLUGINS_PATH, ROOT_PATH from lib.exceptions import ProgramNotInstalledException from lib.logger import setup_logging from lib.parsers.configuration import get_global_conf, get_trigger_conf from lib.plugins import BasePlugin import run from tests import TEST_DIRECTORY, SAVE_DIRECTORY from tests.lib.structures import Compiler, Program, Plugin __author__ = "<NAME>, <EMAIL>" # prepare a manager for synchronization RESOURCES_MANAGER = multiprocessing.Manager() # pylint: disable=no-member def bound_value(value: int, minimum: int=0, maximum: int=9) -> int: """ Bounds a value between an upper and lower bound :param value: the value to bound :param minimum: the minimal allowed value :param maximum: the maximal allowed value :return: the bounded value """ return minimum if value < minimum else maximum if value > maximum else value class TestRunner(unittest.TestCase): """ The Test runner, containing all integration tests, numbered """ log_directory = os.path.join(get_global_conf().getdir("trigger", "default_directory"), "tests-results", "logs") _multiprocess_can_split_ = True @classmethod def setUpClass(cls) -> None: """ The class setup, ensures the log directory is ready """ # pylint: disable=no-member get_global_conf().set("install", "make_args", "-j,-l{}".format(multiprocessing.cpu_count())) setup_logging() class EnvManager: # pylint: disable=too-few-public-methods """ An environment manager for the runs. Saves automatically logs of failing runs """ def __init__(self, _compiler_: Compiler, file_suffix: str): self.compiler = _compiler_ self.filename = "{}-{}-{}-{}.txt".format( _compiler_.package, _compiler_.name, "{}wllvm".format("" if _compiler_.bitcode else "no-"), file_suffix ) def __enter__(self): logging.getLogger().setLevel(0) wllvm = "wllvm" if self.compiler.bitcode else "no-wllvm" get_global_conf().set("install", "compiler", "{}.{}".format(self.compiler.package, self.compiler.name)) get_global_conf().set("install", "llvm_bitcode", str(self.compiler.bitcode)) get_global_conf()["DEFAULT"]["default_directory"] = \ os.path.join(TEST_DIRECTORY, self.compiler.package, self.compiler.name, wllvm) get_global_conf().set("install", "source_directory", os.path.join(ROOT_PATH, "src")) handlers = logging.getLogger().handlers while len(handlers) > 0: handlers[0].close() logging.getLogger().removeHandler(handlers[0]) logging.getLogger().addHandler(logging.FileHandler(os.path.join(TestRunner.log_directory, self.filename))) get_global_conf().set( "plugins", "enabled_plugins", ",".join(["{}.{}".format(plugin.package, plugin.name) for plugin in self.compiler.plugins]) ) for plugin in self.compiler.plugins: importlib.import_module("plugins.{}.{}".format(plugin.package, plugin.name)) # noinspection PyUnusedLocal def __exit__(self, exc_type, exc_val, exc_tb): if exc_type is not None: # pragma nocover as this is only on fail and our tests should not fail shutil.move( os.path.join(TestRunner.log_directory, self.filename), os.path.join(SAVE_DIRECTORY, self.filename) ) # pragma nocover idem def configure(self, compiler: Compiler) -> None: """ Configures the environment to run with the given compiler :param compiler: the compiler to use """ try: with TestRunner.EnvManager(compiler, "configure"): self.assertFalse( dependencies.install(False), "Could not install dependencies for {} with{} bitcode".format( compiler.name, "out" if compiler.bitcode else "" ) ) for plugin_info in compiler.plugins: log_file = "{}-{}-configuration".format(plugin_info.package, plugin_info.name) with TestRunner.EnvManager(compiler, log_file): plugin = [ subclass for subclass in get_subclasses(BasePlugin) if subclass.__name__.lower() == plugin_info.name ][0] self.assertFalse(plugin().configure(force=False)) finally: compiler.is_configured.set() def compile(self, _compiler_: Compiler, _program_: Program) -> None: """ Compiles and installs the given program with the given compiler :param _compiler_: the compiler to use :param _program_: the program to compile """ _compiler_.is_configured.wait() try: with TestRunner.EnvManager(_compiler_, _program_.name): error = main([_program_.name], True, 1) self.assertFalse(error, "The program {} failed to compile with {}".format( _program_.name, get_global_conf().get("install", "compiler"))) # Checks that bitcode was indeed created at the correct place if _compiler_.bitcode: conf = get_trigger_conf(_program_.name) self.assertTrue(os.path.exists(conf.get_executable() + ".bc")) finally: _program_.is_installed.set() def run_main_plugin(self, _compiler_: Compiler, _plugin_: Plugin, _program_: Program): """ Runs the given plugin against the given program :param _compiler_: the compiler used to compile the program :param _plugin_: the plugin to test :param _program_: the program to test """ _program_.is_installed.wait() with _program_.lock, TestRunner.EnvManager(_compiler_, "{}-{}".format(_plugin_.name, _program_.name)): plugin = [ subclass for subclass in get_subclasses(BasePlugin) if subclass.__name__.lower() == _plugin_.name ][0] if hasattr(_plugin_, "main_plugin"): analysis_plugins = [plugin] plugin = _plugin_.main_plugin else: analysis_plugins = [] _plugin_.pre_run() # noinspection PyBroadException try: self.assertFalse(run.trigger_bug(_program_.name, plugin(), analysis_plugins=analysis_plugins)) except ProgramNotInstalledException: raise unittest.SkipTest("{} is not installed".format(_program_.name)) except Exception: # with concurrency, tests might fail. Let's retry once time.sleep(2) # let's sleep a bit before, timing might be bad self.assertFalse(run.trigger_bug(_program_.name, plugin(), analysis_plugins=analysis_plugins)) time.sleep(2) def load_compilers() -> None: """ Imports all tests in the plugins/packages directory, to allow for Compiler instance discoveries """ for package in os.listdir(PLUGINS_PATH): if os.path.isdir(os.path.join(PLUGINS_PATH, package)) and package != "__pycache__": for test_file in os.listdir(os.path.join(PLUGINS_PATH, package, "tests")): importlib.import_module("plugins.{}.tests.{}".format(package, os.path.splitext(test_file)[0])) def add_plugin_run(_compiler_: Compiler, _program_: Program, plugin: Plugin) -> None: """ Adds a plugin to run against the given program :param _compiler_: :param _program_: :param plugin: :return: """ function_name = "test_9{}_{}_{}".format(bound_value(plugin.priority), plugin.name, _program_.name) setattr( TestRunner, function_name, lambda x, comp=_compiler_, prog=_program_, plug=plugin: TestRunner.run_main_plugin(x, comp, plug, prog) ) setattr(getattr(TestRunner, function_name), "__name__", function_name) def add_programs_compile(compiler: Compiler) -> None: """ For all programs, add them to the compiler run list and register a plugin call for them :param compiler: the compiler to use """ for program_name in get_global_conf().getlist("install", "programs"): program = Program(program_name, RESOURCES_MANAGER.Lock(), RESOURCES_MANAGER.Event()) function_name = "test_5{}_{}_{}_{}wllvm_{}".format( bound_value(compiler.priority), compiler.package, compiler.name, "no-" if not compiler.bitcode else "", program.name ) setattr( TestRunner, function_name, lambda x, comp=compiler, prog=program: TestRunner.compile(x, comp, prog) ) setattr(getattr(TestRunner, function_name), "__name__", function_name) for plugin in compiler.plugins: add_plugin_run(compiler, program, plugin) def add_compilers(_compilers_: list) -> None: """ For all compiler in _compilers_, add a configure script for them and register all programs for them :param _compilers_: the list of compilers to use """ for compiler_class in _compilers_: compiler = compiler_class(RESOURCES_MANAGER.Event()) function_name = \ "test_1{}_{}_{}_{}wllvm".format( bound_value(compiler.priority), compiler.package, compiler.name, "no-" if not compiler.bitcode else "" ) setattr(TestRunner, function_name, lambda x, comp=compiler: TestRunner.configure(x, comp)) setattr(getattr(TestRunner, function_name), "__name__", function_name) add_programs_compile(compiler) def clean_working_directory() -> None: """ Removes old logs before running """ with suppress(FileNotFoundError): shutil.rmtree(TestRunner.log_directory) with suppress(FileNotFoundError): shutil.rmtree(SAVE_DIRECTORY) os.makedirs(TestRunner.log_directory) os.makedirs(SAVE_DIRECTORY) # Add all functions to TestRunner on initialization clean_working_directory() load_compilers() COMPILERS = get_subclasses(Compiler) add_compilers(COMPILERS)
def add_item(self, name, url): self.insert('', 'end', values=(name, url, "")) s.updateItem({'item': name, 'url': url, 'status': '', 'pstatus': ''}) self.selection_clear()
class A{ A(){ System.out.println("Constructor of A"); } } class B extends A{ B(){ System.out.println("Constructor of B"); } } class C extends B{ C(){ System.out.println("Constructor of C"); } } class ConstructorCalling{ public static void main(String [] args) { //Constructor are Called super to sub class C obj = new C(); } }
#ifndef WIBBLE_SYS_BUFFER_H #define WIBBLE_SYS_BUFFER_H /* * Variable-size, reference-counted memory buffer * * Copyright (C) 2003--2006 <NAME> <<EMAIL>> * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #include <stddef.h> // for size_t namespace wibble { namespace sys { /** * Variable-size, reference-counted memory buffer. */ class Buffer { public: class Data { protected: mutable int _ref; size_t _size; void* _data; public: Data() throw () : _ref(0), _size(0), _data(0) {} Data(size_t size); // if own == true, take possession of the memory buffer, else copy it Data(void* buf, size_t size, bool own = true); Data(const void* buf, size_t size); ~Data(); /// Increment the reference count for this object void ref() const throw () { ++_ref; } /// Decrement the reference count for this object, returning true when it /// reaches 0 bool unref() const throw () { return --_ref == 0; } /// Resize (enlarging or shrinking it) the buffer to `size' bytes void resize(size_t size); /// Compare the contents of two buffers bool operator==(const Data& d) const throw(); /// Compare the contents of two buffers bool operator<(const Data& d) const throw(); friend class Buffer; }; Data* item; public: /// Create a 0-lenght buffer Buffer() throw () : item(0) {} /// Create a buffer with the specified size Buffer(size_t size) : item(0) { if (size) { item = new Data(size); item->ref(); } } /** * Create a buffer from existing data * * @param buf * The data to put in this buffer * @param size * The dimension of buf * @param own * If true, take ownership of buf, else make a copy of it. */ Buffer(void* buf, size_t size, bool own = true) : item(0) { if (size) { item = new Data(buf, size, own); item->ref(); } } /** * Create a buffer with a copy of the given data * * It will always make a copy of the contents of buf. */ Buffer(const void* buf, size_t size) : item(0) { if (size) { item = new Data(buf, size); item->ref(); } } Buffer(const Buffer& buf) throw () { if (buf.item) buf.item->ref(); item = buf.item; } ~Buffer() { if (item && item->unref()) delete item; } Buffer& operator=(const Buffer& buf) { if (buf.item) buf.item->ref(); // Do it early to correctly handle the case of x = x; if (item && item->unref()) delete item; item = buf.item; return *this; } /// Return a pointer to the buffer void* data() throw () { return item ? item->_data : 0; } /// Return a pointer to the buffer const void* data() const throw () { return item ? item->_data : 0; } /// Return the buffer size size_t size() const throw () { return item ? item->_size : 0; } /// Resize the buffer to hold exactly the specified amount of bytes void resize(size_t newSize) { if (size() == newSize) return; if (!newSize) { if (item && item->unref()) delete item; item = 0; } else if (item) { item->resize(newSize); } else { item = new Data(newSize); item->ref(); } } /// Compare the contents of two buffers bool operator==(const Buffer& buf) const throw () { if (item == 0 && buf.item == 0) return true; if (item == 0 || buf.item == 0) return false; return *item == *buf.item; } bool operator!=(const Buffer& buf) const throw () { return !operator==(buf); } /// Compare the contents of two buffers bool operator<(const Buffer& buf) const throw () { if (item == 0 && buf.item == 0) return false; if (item == 0) return true; if (buf.item == 0) return false; return *item < *buf.item; } }; } } // vim:set ts=4 sw=4: #endif
// Set of configuration blocks describing the custom action definitions that are available for use in the firewall policy's `statelessDefaultActions`. See Stateless Custom Action below for details. func (o FirewallPolicyFirewallPolicyPtrOutput) StatelessCustomActions() FirewallPolicyFirewallPolicyStatelessCustomActionArrayOutput { return o.ApplyT(func(v *FirewallPolicyFirewallPolicy) []FirewallPolicyFirewallPolicyStatelessCustomAction { if v == nil { return nil } return v.StatelessCustomActions }).(FirewallPolicyFirewallPolicyStatelessCustomActionArrayOutput) }
package lx.af.widget; import android.animation.LayoutTransition; import android.content.Context; import android.content.DialogInterface; import android.content.res.TypedArray; import android.support.annotation.NonNull; import android.support.v7.app.AlertDialog; import android.util.AttributeSet; import android.view.View; import android.widget.ImageView; import com.nostra13.universalimageloader.core.ImageLoader; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import lx.af.R; import lx.af.activity.ImageEditor.ImageEditorActivity; import lx.af.base.AbsBaseActivity; import lx.af.utils.ActivityLauncher.ActivityResultCallback; import lx.af.utils.ActivityLauncher.ImageBrowserLauncher; import lx.af.utils.ActivityLauncher.ImageSelectorLauncher; import lx.af.utils.ActivityLauncher.SimpleStringLauncher; import lx.af.utils.ScreenUtils; import lx.af.widget.FlowLayout.FlowLayout; /** * author: lx * date: 16-3-19 */ public class SelectImageWidget extends FlowLayout { private static final int DEFAULT_IMAGE_MARGIN = ScreenUtils.dip2px(3); private static final int DEFAULT_MAX_COUNT = 9; private View mAddView; private int mMaxCount; private int mImageSize; private int mImageMargin; private ArrayList<String> mPathList = new ArrayList<>(); private HashMap<String, ImageView> mImageViewMap = new HashMap<>(); private LinkedList<ImageView> mImageViewRecycler = new LinkedList<>(); private ImageClickListener mImageClickListener = new ImageClickListener(); private ImageListChangeListener mChangeListener; public SelectImageWidget(Context context, AttributeSet attrs) { super(context, attrs); initView(context, attrs); } public SelectImageWidget(Context context) { super(context); initView(context, null); } public void setMaxImageCount(int count) { mMaxCount = count; } public int getMaxImageCount() { return mMaxCount; } public void setAddView(View addView) { if (mAddView != null) { addView.setVisibility(mAddView.getVisibility()); } if (mAddView != null && mAddView.getParent() == this) { addView.setLayoutParams(createLayoutParams()); removeView(mAddView); addView(addView); } else if (mImageSize != 0) { addView.setLayoutParams(createLayoutParams()); addView(addView); } mAddView = addView; } public void setImageListChangeListener(ImageListChangeListener l) { mChangeListener = l; } public void setItemClickListener(ItemClickListener l) { mItemClickListener = l; } @NonNull public ArrayList<String> getImagePathList() { return mPathList; } public void resetImagePathList(List<String> pathList) { boolean changed = false; // remove views representing path that has been removed Iterator<String> it = mPathList.iterator(); while (it.hasNext()) { String path = it.next(); if (!pathList.contains(path)) { // the path has been removed, remove the view ImageView img = mImageViewMap.get(path); if (img == null) { continue; } removeView(img); it.remove(); mImageViewMap.remove(path); mImageViewRecycler.add(img); changed = true; } } // add views representing path that has currently been added it = pathList.iterator(); while (it.hasNext()) { String path = it.next(); if (!mPathList.contains(path)) { // the path has been currently added, add view for it ImageView img = getImageView(); img.setTag(path); addView(img, mPathList.size()); ImageLoader.getInstance().displayImage("file://" + path, img); mPathList.add(path); mImageViewMap.put(path, img); changed = true; } } mAddView.setVisibility(mPathList.size() < mMaxCount ? View.VISIBLE : View.GONE); if (changed) { notifyImageListChanged(); } } public void addImagePathList(List<String> pathList) { if (pathList == null || pathList.size() == 0) { return; } boolean changed = false; for (int i = 0; i < pathList.size(); i ++) { if (mPathList.size() >= mMaxCount) { break; } String path = pathList.get(i); ImageView img = getImageView(); img.setTag(path); addView(img, mPathList.size()); ImageLoader.getInstance().displayImage("file://" + path, img); mPathList.add(path); mImageViewMap.put(path, img); changed = true; } mAddView.setVisibility(mPathList.size() < mMaxCount ? View.VISIBLE : View.GONE); if (changed) { notifyImageListChanged(); } } public void addImagePath(String path) { if (mPathList.contains(path)) { return; } ImageView img = getImageView(); img.setTag(path); addView(img, mPathList.size()); ImageLoader.getInstance().displayImage("file://" + path, img); mPathList.add(path); mImageViewMap.put(path, img); mAddView.setVisibility(mPathList.size() < mMaxCount ? View.VISIBLE : View.GONE); notifyImageListChanged(); } public void replaceImagePath(String originPath, String newPath) { ImageView img = mImageViewMap.get(originPath); if (img == null) { return; } int idx = mPathList.indexOf(originPath); mPathList.add(idx, newPath); mPathList.remove(originPath); mImageViewMap.remove(originPath); mImageViewMap.put(newPath, img); img.setTag(newPath); ImageLoader.getInstance().displayImage("file://" + newPath, img); notifyImageListChanged(); } public void removeImagePath(String path) { ImageView img = mImageViewMap.get(path); if (img == null) { return; } removeView(img); mPathList.remove(path); mImageViewMap.remove(path); mImageViewRecycler.add(img); mAddView.setVisibility(mPathList.size() < mMaxCount ? View.VISIBLE : View.GONE); notifyImageListChanged(); } public void startSelectImage() { startImageSelector(); } // ============================================== @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { super.onSizeChanged(w, h, oldw, oldh); if (w != 0 && mImageSize == 0) { int screenWidth = ScreenUtils.getScreenWidth(); int paddingHorizontal = getPaddingLeft() + getPaddingRight(); if (w > (screenWidth * 4 / 5)) { int count = mMaxCount < 5 ? 4 : 5; mImageSize = (w - mImageMargin * count * 2 - paddingHorizontal) / count; } else { mImageSize = (screenWidth - mImageMargin * 10 - paddingHorizontal) / 5; } if (mAddView == null) { mAddView = createDefaultAddView(); addView(mAddView); } else if (mAddView.getParent() != this) { addView(mAddView); } } } private void initView(Context context, AttributeSet attrs) { setLayoutTransition(new LayoutTransition()); if (attrs != null) { TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.SelectImageWidget); mImageSize = a.getDimensionPixelSize(R.styleable.SelectImageWidget_selectImageItemSize, 0); mImageMargin = a.getDimensionPixelSize(R.styleable.SelectImageWidget_selectImageItemMargin, DEFAULT_IMAGE_MARGIN); mMaxCount = a.getInteger(R.styleable.SelectImageWidget_selectImageMaxCount, DEFAULT_MAX_COUNT); a.recycle(); } else { mImageMargin = DEFAULT_IMAGE_MARGIN; mMaxCount = DEFAULT_MAX_COUNT; } } private ImageView getImageView() { if (mImageViewRecycler.size() != 0) { return mImageViewRecycler.pop(); } if (mImageMargin == 0) { mImageMargin = ScreenUtils.dip2px(4); } if (mImageSize == 0) { mImageSize = (ScreenUtils.getScreenWidth() - 6 * mImageMargin) / 5; } ImageView img = new ImageView(getContext()); img.setLayoutParams(createLayoutParams()); img.setScaleType(ImageView.ScaleType.CENTER_CROP); img.setOnClickListener(mImageClickListener); img.setOnLongClickListener(mImageClickListener); return img; } private View createDefaultAddView() { if (mImageMargin == 0) { mImageMargin = ScreenUtils.dip2px(4); } if (mImageSize == 0) { mImageSize = (ScreenUtils.getScreenWidth() - 6 * mImageMargin) / 5; } ImageView img = new ImageView(getContext()); img.setLayoutParams(createLayoutParams()); img.setImageResource(R.drawable.ic_image_add); img.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { mItemClickListener.onImageAddClicked(SelectImageWidget.this, mAddView); } }); return img; } private MarginLayoutParams createLayoutParams() { MarginLayoutParams params = new MarginLayoutParams(mImageSize, mImageSize); params.setMargins(mImageMargin, mImageMargin, mImageMargin, mImageMargin); return params; } private void notifyImageListChanged() { if (mChangeListener != null) { ArrayList<String> list = new ArrayList<>(mPathList.size()); list.addAll(mPathList); mChangeListener.onImageListChanged(list); } } private void showMenuDialog(final String imagePath) { String[] menu = new String[] { getResources().getString(R.string.add_image_widget_menu_doodle), getResources().getString(R.string.add_image_widget_menu_delete), }; new AlertDialog.Builder(getContext()) .setItems(menu, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { if (which == 0) { startImageEditor(imagePath); } else if (which == 1) { removeImagePath(imagePath); } } }) .setNegativeButton(android.R.string.cancel, null) .create().show(); } private void startImageEditor(final String imagePath) { AbsBaseActivity activity = (AbsBaseActivity) getContext(); SimpleStringLauncher .of(activity, ImageEditorActivity.class, ImageEditorActivity.EXTRA_RESULT) .putExtra(ImageEditorActivity.EXTRA_PATH, imagePath) .start(new ActivityResultCallback<String>() { @Override public void onActivityResult(int resultCode, @NonNull String result) { replaceImagePath(imagePath, result); } }); } private void startImageSelector() { AbsBaseActivity activity = (AbsBaseActivity) getContext(); ImageSelectorLauncher.of(activity) .count(mMaxCount) .preSelect(mPathList) .start(new ActivityResultCallback<ArrayList<String>>() { @Override public void onActivityResult(int resultCode, @NonNull ArrayList<String> result) { resetImagePathList(result); } }); } private ItemClickListener mItemClickListener = new ItemClickListener() { @Override public void onImageAddClicked(SelectImageWidget container, View addView) { startImageSelector(); } @Override public void onImageClicked(SelectImageWidget container, ImageView imageView, @NonNull String path) { ImageBrowserLauncher.of(getContext()) .tapExit(true) .paths(container.getImagePathList()) .currentPath(path) .currentView(imageView) .start(); } @Override public boolean onImageLongClicked(SelectImageWidget container, ImageView imageView, @NonNull String path) { showMenuDialog(path); return true; } }; private class ImageClickListener implements OnClickListener, OnLongClickListener { @Override public void onClick(View v) { String path = (String) v.getTag(); mItemClickListener.onImageClicked(SelectImageWidget.this, (ImageView) v, path); } @Override public boolean onLongClick(View v) { String path = (String) v.getTag(); return mItemClickListener.onImageLongClicked(SelectImageWidget.this, (ImageView) v, path); } } public interface ItemClickListener { void onImageAddClicked(SelectImageWidget container, View addView); void onImageClicked(SelectImageWidget container, ImageView imageView, @NonNull String path); boolean onImageLongClicked(SelectImageWidget container, ImageView imageView, @NonNull String path); } public interface ImageListChangeListener { void onImageListChanged(@NonNull ArrayList<String> imageList); } }
/** * here we initialize all the actors for the scene * * @return actors */ private void initActor() { background = new BaseActor(); spaceShip = new BaseActor(); meteor = new BaseActor(); win = new BaseActor(); labels = new LabelTextGround(); }
/** * @brief Set difficulty of the game. * * @param difficulty (lower is more difficult) */ void Save::setDifficulty(uint difficulty) { get()._saveJs->u("difficulty") = difficulty; if (get()._saveJs->j("state").u("life") > difficulty) get()._saveJs->j("state").u("life") = difficulty; }
package com.twu.biblioteca.interfaces; public interface Selectable { void select(); }
/** \fn bool HdlProgram::isUniformVariableValid(const std::string& varName) \brief Check if a variable is valid from its name. \param varName The name of the uniform variable to read from. \return True if the name is valid (see glGetUniformLocation at http://www.opengl.org/sdk/docs/man/xhtml/glGetUniformLocation.xml) or false otherwise. **/ bool HdlProgram::isUniformVariableValid(const std::string& varName) { if(program==0) return false; else return glGetUniformLocation(program, varName.c_str()) != -1; }
Apple has released a new iPhone 7 ad called “Race” made exclusively for Japan. The new 30-second spot features the Jet Black iPhone 7 Plus and a very fast-paced run between two men through a busy part of town. The ad concludes with the iPhone-equipped man winning the so-called race by using Apple Pay to quickly board a train… Apple Pay only launched in Japan in late October of this year after first debuting in the United States and a few other countries two years ago. Bringing Apple’s mobile payment service required integrating FeliCa mobile payment technology which is widely used in the country. FeliCa is a technology developed by Sony that allows users to store things like train cards, bus passes, store cards, and credit and debit cards on their mobile devices. The technology is the most widely used mobile payment platform in Japan with over 1.9 million payment terminal supporting it. Last year, FeliCa payment transactions totaled $46 billion in Japan. Using Apple Pay in Japan requires special hardware because of this technology: To use FeliCa, you not only need either an iPhone 7 or an Apple Watch Series 2, but it also needs to be a Japanese model: devices sold in the U.S. and elsewhere are not compatible. Locals will be able to use Apple Pay at any terminal supporting Suica, QUICPay, or iD. You can view the Japanese iPhone ad below via Apple Japan on YouTube: (H/T Michael Steeber)
Stephen Colbert delivered a scathing monologue directed at President Donald Trump and his meeting with Russian President Vladimir Putin on the Late Show Wednesday. “People think you colluded with the president of a hostile foreign power, then you go out to meet with him again — and you don’t tell anybody? That’s like if O.J. does get on parole and immediately goes glove shopping,” Colbert said, referencing O.J. Simpson parole’s hearing, which is scheduled for 1 p.m. E.T. Thursday. News broke Tuesday that Trump and Putin met a second time in Germany, which was undisclosed. The meeting took place after a G20 dinner in Hamburg, Germany, for world leaders and their spouses earlier in the month. Colbert noted that the White House has called the meeting “a brief conversation at the end of dinner.” However, the meeting reportedly went on for about an hour. The Brief Newsletter Sign up to receive the top stories you need to know right now. View Sample Sign Up Now “This might actually be a nothing burger,” Colbert said after commenting that he wants to believe Trump. “But every time they tell us it’s a nothing burger, it turns out to be a juicy quarter pounder with sleaze.” Trump responded to reports of the second meeting in a series of tweets Tuesday writing, “Fake News story of secret dinner with Putin is ‘sick.’” Contact us at [email protected].
#include<iostream> #include <string> #include"ConfigReader.h" using namespace std; int main() { int i; std::string s; //ConfigReader a("ConfigData.txt"); ConfigReader a((std::getline(std::cin,s))); a.checkData(); cin >> i; return 0; }
/** * Update HashMap of Nodes connected of cluster this broker is connected * to * * @param response */ private void updatePool(CommandResponse response, boolean queue) { NewHashMap map = null; if (_logger.getIsDetailedLogsEnabled()) { _logger.getCacheLog().Debug("udpatePool:\tUpdate pool called."); } if (_logger.getIsDetailedLogsEnabled()) { _logger.getCacheLog().Debug("Broker.UpdatePool", "Update pool called."); } try { if (queue == false) { map = (NewHashMap) response.getResultMap(); } else if (response.getValue() != null) { com.alachisoft.tayzgrid.common.datastructures.NewHashmap newHashMap = (com.alachisoft.tayzgrid.common.datastructures.NewHashmap) CompactBinaryFormatter.fromByteBuffer(response.getValue(), cacheId); map = new NewHashMap(newHashMap.getLastViewId(), newHashMap.getMap(), newHashMap.getMembers(), 0, newHashMap.getUpdateMap()); } } catch (UnsupportedEncodingException ex) { if (_logger.getIsErrorLogsEnabled()) { _logger.getCacheLog().Error("Broker.UpdatePool", ex.toString()); } } catch (IOException ex) { if (_logger.getIsErrorLogsEnabled()) { _logger.getCacheLog().Error("Broker.UpdatePool", ex.toString()); } } catch (ClassNotFoundException ex) { if (_logger.getIsErrorLogsEnabled()) { _logger.getCacheLog().Error("Broker.UpdatePool", ex.toString()); } } if (map == null || map.getMap() == null) { this._importHashmap = false; return; } if (!queue) { this._pool.setBucketSize(map.getBucketSize()); this.newHashmapRecieved(map); } else { this._newMapTask.queue(map); } }
// Clone returns a copy of the subtree rooted at n. func (n *Node) clone() *Node { if n == nil { return nil } return &Node{ n.value, n.childL.clone(), n.childR.clone(), n.balanceFactor, } }
<filename>src/interviewQuestion/GreenChannel.java package interviewQuestion; import java.util.Deque; import java.util.LinkedList; import java.util.Scanner; public class GreenChannel { static int N=50010; static int n,t; static int[] dp=new int[N]; static int[] w=new int[N]; static Deque<Integer> q=new LinkedList<>(); public static void main(String[] args){ Scanner sc=new Scanner(System.in); n=sc.nextInt(); t=sc.nextInt(); for(int i=1;i<=n;i++){ w[i]=sc.nextInt(); } int left=1,right=n; while(left<right){ int mid=left+right>>1; if(check(mid)){ right=mid; }else{ left=mid+1; } } System.out.println(left-1); } public static boolean check(int key){ q.clear(); for(int i=0;i<n;i++){ while(!q.isEmpty()&&q.getFirst()<=i-key) q.removeFirst(); while(!q.isEmpty()&&dp[q.getLast()]>=dp[i]) q.removeLast(); q.offer(i); dp[i+1]=dp[q.getFirst()]+w[i+1]; } int res=Integer.MAX_VALUE; for(int i=n;i>n-key;i--) res=Math.min(res,dp[i]); return res<=t; } }
/* $OpenBSD: uslhcomreg.h,v 1.2 2015/01/22 14:33:01 krw Exp $ */ /* * Copyright (c) 2015 <NAME> <<EMAIL>> * * Permission to use, copy, modify, and distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ #define USLHCOM_TX_HEADER_SIZE sizeof(u_char) #define SET_TRANSMIT_DATA(x) (x) #define GET_RECEIVE_DATA(x) (x) #define SET_DEVICE_RESET 0x40 #define GET_SET_UART_ENABLE 0x41 #define GET_UART_STATUS 0x42 #define SET_CLEAR_FIFOS 0x43 #define GET_GPIO_STATE 0x44 #define SET_GPIO_STATE 0x45 #define GET_VERSION 0x46 #define GET_SET_OTP_LOCK_BYTE 0x47 #define GET_SET_UART_CONFIG 0x50 #define SET_TRANSMIT_LINE_BREAK 0x51 #define SET_STOP_LINE_BREAK 0x52 /* SET_DEVICE_RESET */ #define DEVICE_RESET_VALUE 0x00 /* GET_SET_UART_ENABLE */ #define UART_DISABLE 0x00 #define UART_ENABLE 0x01 /* GET_UART_STATUS */ struct uslhcom_uart_status { u_char tx_fifo[2]; /* (big endian) */ u_char rx_fifo[2]; /* (big endian) */ u_char error_status; u_char break_status; } __packed; #define ERROR_STATUS_PARITY 0x01 #define ERROR_STATUS_OVERRUN 0x02 #define BREAK_STATUS 0x01 /* SET_CLEAR_FIFO */ #define CLEAR_TX_FIFO 0x01 #define CLEAR_RX_FIFO 0x02 /* GET_VERSION */ struct uslhcom_version_info { u_char product_id; u_char product_revision; } __packed; /* GET_SET_UART_CONFIG */ struct uslhcom_uart_config { u_char baud_rate[4]; /* (big endian) */ u_char parity; u_char data_control; u_char data_bits; u_char stop_bits; } __packed; /* * Silicon Labs CP2110/4 Application Note (AN434) Rev 0.4 says that * valid baud rate is 300bps to 500,000bps. * But HidUartSample of CP2110 SDK accepts 50bps to 2,000,000bps. */ #define UART_CONFIG_BAUD_RATE_MIN 50 #define UART_CONFIG_BAUD_RATE_MAX 2000000 #define UART_CONFIG_PARITY_NONE 0x00 #define UART_CONFIG_PARITY_EVEN 0x01 #define UART_CONFIG_PARITY_ODD 0x02 #define UART_CONFIG_PARITY_MARK 0x03 #define UART_CONFIG_PARITY_SPACE 0x04 #define UART_CONFIG_DATA_CONTROL_NONE 0x00 #define UART_CONFIG_DATA_CONTROL_HARD 0x01 /* * AN434 Rev 0.4 describes setting 0x05 ... 0x08 to configure data bits. * But actually it requires different values. */ #define UART_CONFIG_DATA_BITS_5 0x00 #define UART_CONFIG_DATA_BITS_6 0x01 #define UART_CONFIG_DATA_BITS_7 0x02 #define UART_CONFIG_DATA_BITS_8 0x03 #define UART_CONFIG_STOP_BITS_1 0x00 #define UART_CONFIG_STOP_BITS_2 0x01
def deepupdate(obj, val, newval): thisobj = deepcopy(obj) if isinstance(thisobj, dict): for k in thisobj: thisobj[k] = deepupdate(thisobj[k], val, newval) elif isinstance(thisobj, list): for k in range(len(thisobj)): thisobj[k] = deepupdate(thisobj[k], val, newval) else: if thisobj == val: thisobj = newval return thisobj
// NewExposedOffchainAggregator creates a new instance of ExposedOffchainAggregator, bound to a specific deployed contract. func NewExposedOffchainAggregator(address common.Address, backend bind.ContractBackend) (*ExposedOffchainAggregator, error) { contract, err := bindExposedOffchainAggregator(address, backend, backend, backend) if err != nil { return nil, err } return &ExposedOffchainAggregator{ExposedOffchainAggregatorCaller: ExposedOffchainAggregatorCaller{contract: contract}, ExposedOffchainAggregatorTransactor: ExposedOffchainAggregatorTransactor{contract: contract}, ExposedOffchainAggregatorFilterer: ExposedOffchainAggregatorFilterer{contract: contract}}, nil }
// rename the key in a draft while preserving order export default function renameDraftKey(draftParentValue, key, newKey) { const keys = Object.keys(draftParentValue) const keysToAppend = keys.slice(keys.indexOf(key) + 1) const valuesToAppend = [] // save and remove all key/value pairs after the old key name for (let key of keysToAppend) { valuesToAppend.push(draftParentValue[key]) delete draftParentValue[key] } // rename the key const newValue = draftParentValue[key] delete draftParentValue[key] draftParentValue[newKey] = newValue // add all the saved key/value pairs back for (let i = 0; i < keysToAppend.length; i++) { draftParentValue[keysToAppend[i]] = valuesToAppend[i] } }
class Contact: def __init__(self, firstname, middlename, lastname, nickname,title, company,address, home, mobile, work, fax,email,email2,email3,homepage,bday,bmonth,byear,aday,amonth,ayear,address2,phone2,notes): self.firstname= firstname self.middlename=middlename self.lastname=lastname self.nickname= nickname self.title= title self.company= company self.address = address self.home = home self.mobile = mobile self.work = work self.fax = fax self.email= email self.email2 = email2 self.email3 = email3 self.homepage = homepage self.bday = bday self.bmonth = bmonth self.byear = byear self.aday = aday self.amonth = amonth self.amonth = amonth self.ayear = ayear self.address2 = address2 self.phone2 = phone2 self.notes = notes
import { Component, Listen, Event, EventEmitter, State, h } from '@stencil/core'; import { uhCharacters } from './uh-characters'; import { utf16 } from './../../utils/utils'; @Component({ tag: 'uh-dash-characters', styleUrl: 'uh-dash-characters.scss' }) export class MyName { @State() characters: any[] = []; @State() showStroke: string; @State() showRad: string; @Event() onCharSelected: EventEmitter; render() { return <div> <div class="characters-header"> <stroke-list></stroke-list> </div> <div class="characters-wrapper"> {this.characters.map((item) => { return (this.showStroke && (this.showStroke==item[0])) ? [ <div key={'charhdr-' + item[0]} class="char hdr"> {item[0]} </div> ].concat( this.convertToChar(Object.values(item[1]), item[0]) ) : ''; })} </div> </div> } @Listen('strokeClicked', {target: 'body'}) strokeClicked(event: CustomEvent) { this.showStroke = event.detail; this.characters = Object.entries(uhCharacters[this.showRad]) } @Listen('radSelected', { target: 'parent' }) radSelected(event: CustomEvent) { if (event.detail) { this.characters = Object.entries(uhCharacters[event.detail]); this.showRad = event.detail; this.showStroke = '0'; } else { this.characters = []; this.showRad = ''; this.showStroke = ''; } } convertToChar(chars: number[], strokes: string): string[] { return chars ? chars.map(cp => { return <div key={cp} class={"char stroke-" + strokes} onClick={() => this.selectChar(cp)}> {utf16(cp)} </div> }) : []; } selectChar(cp: number) { this.onCharSelected.emit(cp); } }
Nintendo confirms that the upcoming Pokemon fighting game, Pokken Tournament, will support local multiplayer, thanks to the second screen on the Wii U’s GamePad. Those that have been following Pokken Tournament since its grand unveiling have been on a roller coaster ride of sorts. Originally the title was only confirmed for Japanese arcades with a handful of playable Pocket Monsters to choose from, but it’s grown quite a bit since then. Nintendo and The Pokemon Company eventually came forth to confirm that the fighting game will be arriving for the Wii U internationally, which finally means that gamers wouldn’t have to line up at select Dave & Buster’s to play the imported arcade machine. While the ability to finally play the game in areas outside of Japan is enticing to loyal Pokemon aficionados, many were wondering if Pokken Tournament would support local multiplayer on the console. According to a recent retailer listing for the title’s previously announced special edition controller, there will be local multiplayer supported in the fighter. That said, there is a bit of a catch – no pun intended. According to the listing, local multiplayer will only be possible because of the Wii U GamePad’s second-screen functionality. As a result, only one regular, non-GamePad controller can be used at a time during couch-based battles, which may not be ideal for gamers hoping to take the newest Pokemon game as seriously as other titles within the same genre. After watching gameplay footage of Pokken Tournament in action, it’s easy to see why the perspective-based fighter will only support local play through the GamePad’s screen, but it’s still a blow for anyone that wanted to see if the game had what it takes to become a serious fight title. Based on reports, one user will have full access to the television when using the controller, while the other will be left to utilize the screen on Nintendo’s tablet-like remote. Despite this news, the game seems to be shaping up quite nicely. The roster continues to grow (with the latest addition being none other than the fully-evolved Hoenn starter Sceptile) and there doesn’t appear to be any sign of a slow down in combatant reveals, which makes the build up to Pokken Tournament‘s release a tantalizing prospect. The game may not end up being as competitive as something like Nintendo’s fight-centric darling Super Smash Bros., which wraps up its DLC campaign in February, but it does look like an enjoyable romp nonetheless. Pokken Tournament is set to arrive in North America for the Nintendo Wii U in Spring 2016. Source: Hori (via Serebii)
YiQiFuMai powder injection ameliorates blood–brain barrier dysfunction and brain edema after focal cerebral ischemia–reperfusion injury in mice YiQiFuMai powder injection (YQFM) is a modern preparation derived from the traditional Chinese medicine Sheng-Mai-San. YQFM is widely used in clinical practice in the People’s Republic of China, mainly for the treatment of microcirculatory disturbance-related diseases. However, little is known about its role in animals with ischemic stroke. The aim of this study was to examine the effect of YQFM on brain edema and blood–brain barrier (BBB) dysfunction induced by cerebral ischemia–reperfusion (I/R) injury. Male C57BL/6J mice underwent right middle cerebral artery occlusion for 1 hour with a subsequent 24-hour reperfusion to produce I/R injury. YQFM (three doses: 0.336, 0.671, and 1.342 g/kg) was then given intraperitoneally (IP). The results demonstrated that YQFM significantly decreased infarct size, improved neurological deficits, reduced brain water content, and increased cerebral blood flow after I/R injury. 18F-fluorodeoxyglucose micro-positron emission tomography imaging and hematoxylin and eosin staining results indicated that YQFM is able to ameliorate brain metabolism and histopathological damage after I/R. Moreover, YQFM administration reduced BBB leakage and upregulated the expression of zona occludens-1 (ZO-1) and occludin, which was confirmed by Evans Blue extravasation, Western blotting, and immunofluorescence assay. Our findings suggest that YQFM provides protection against focal cerebral I/R injury in mice, possibly by improving BBB dysfunction via upregulation of the expression of tight junction proteins. Introduction Ischemic stroke is a destructive cerebrovascular disease and a leading cause of death worldwide. 1 Currently, recombinant tissue plasminogen activator is the only therapeutic agent available for the treatment of ischemic stroke. However, ,5% of patients with stroke recover, owing to the narrow therapeutic window, 2,3 risk of intracerebral hemorrhage, and multiple contraindications. 4 Given the devastating impact and social burden of this damaging cerebrovascular event, the development of optimum treatments for ischemic stroke is urgently needed. 5 Abundant evidence has confirmed that the disruption of the blood-brain barrier (BBB) and the consequent brain edema are major contributors to the pathogenesis of ischemic stroke. Intravascular proteins and fluid penetrate into the cerebral parenchymal extracellular space, leading to vasogenic cerebral edema and reduced blood flow to neurons, resulting in an irreversible apoptosis. 6,7 The BBB is not a rigid structure but a dynamic interface, with a range of interrelated functions that result from effective tight junctions (TJs), trans-endothelial transport systems, enzymes, and the regulation of leukocyte permeation. TJs are the most apical structure within the intercellular cleft, submit your manuscript | www.dovepress.com Dovepress Dovepress 316 cao et al limiting the paracellular flux of hydrophilic molecules across the BBB. TJs consist of several types of integral transmembrane and cytoplasmic accessory proteins, of which occludin and zona occludens-1 (ZO-1) play key roles in BBB dysfunction after ischemic stroke. 8,9 Thus, protecting the BBB may be a promising strategy for developing new clinical therapies for ischemic stroke. 10 A number of studies have indicated that traditional Chinese medicine preparations, such as XueShuanTong injection (XST), 11 Danhong injection, 12 and Buyanghuanwu decoction, 13 have reliable curative efficacy in the prevention and treatment of stroke. YiQiFuMai powder injection (YQFM), a modern preparation based on a well-known complex prescription, Sheng-Mai-San, was approved in 2007 by the China Food and Drug Administration for the treatment of microcirculatory disturbance-related diseases in the People's Republic of China. 14,15 YQFM is composed of Panax ginseng CA Mey., Ophiopogon japonicus (Thunb.) Ker-Gawl, and Schisandra chinensis (Turcz.) Baill (1:3:1.5). 16 The bioactive compounds and their chemical structures were identified by tandem mass spectrophotometry in negative electrospray ionization mode. 17 Considerable numbers of previous studies have indicated that YQFM is widely used for the treatment of cardiovascular disease. 17,18 We previously predicted the multitarget mechanism of YQFM in cardiocerebral ischemic disease, based on network pharmacology. 19 However, its related pharmacological activity and potential mechanism in ischemic stroke in animals remain unclear. Therefore, in the current study, we investigated the effects of YQFM on brain edema and BBB dysfunction induced by cerebral ischemia-reperfusion (I/R) compared with XST and explored the possible modulation of TJs in I/R induced by middle cerebral artery occlusion (MCAO) in mice. Our findings provide some evidence for the clinical application of YQFM in ischemic stroke. Materials and methods reagents YQFM (batch number 20121210) was purchased from Tasly Pharmaceutical Co., Ltd. (Tianjin, People's Republic of China). An ultrahigh performance liquid chromatography (UHPLC) fingerprint ( Figure 1) was used to control the quality of YQFM in the current study. Based on this fingerprint, we established an optimum and easily controlled method for the analysis of YQFM in our laboratory. 18 XST was purchased from Livzon Pharmaceutical Group Inc. (Guangdong, People's Republic of China). Fluorescein-isothiocyanate-conjugated goat antirabbit immunoglobulin G and horseradish-peroxidase-conjugated goat antirabbit and anti-mouse immunoglobulin G were purchased from Jackson ImmunoResearch Laboratories, Inc. (West Grove, PA, USA). All chemicals were of the commercially available highest purity. 317 YQFM ameliorates cerebral i/r-induced brain injury 40.5%-55% B for 14.5-17.5 minutes; 55%-99% B for 17.5-19.5 minutes; and 99%-2% B for 19.5-20 minutes. The flow rate was 0.5 mL/min, and the column temperature was set at 25°C. animals C57BL/6J mice weighing 18-22 g were provided by the Reference Animal Research Centre of Yangzhou University (Yangzhou, People's Republic of China; certificate no SCXK 2014-0004). All procedures and assessments were approved by the Animal Ethics Committee of the School of Chinese Materia Medica, China Pharmaceutical University. These experiments were carried out in accordance with the National Institutes of Health Guide for the Care and use of Laboratory Animals (National Institutes of Health Publication No 80-23, revised in 1996). Before performing the experiments, all animals were randomized into experimental groups, and the indices were measured by operators blinded to the study procedures. Focal cerebral i/r Stroke was induced by MCAO and reperfusion in C57BL/6J mice as reported previously. 20 Briefly, each mouse was anesthetized with 4% chloral hydrate (0.1 mL per 10 g body weight) IP, then the neck vessels were exposed by a midline incision, and the branches of the right external carotid artery were carefully isolated and cauterized. A 6-0 nylon monofilament suture, blunted at the tip and coated with 1% poly-l-lysine, was advanced 9-10 mm into the internal carotid to occlude the origin of the middle cerebral artery. The temperature of the animal was maintained at 37.0°C±0.5°C with a heating pad (ALCBIO, Shanghai, People's Republic of China) during surgery and ischemia. The same treatment was carried out on mice in that sham group except that the suture was not advanced into the internal carotid artery. A laser Doppler flowmeter (FLPI2, Moor Instruments Ltd., Axminster, UK) was used to confirm that middle cerebral artery blood flow had decreased ,30% of the basic cerebral blood flow (CBF) immediately after the occlusion to. 21 Animals whose blood flow decreased ,30% of pre-ischemia levels were used for further study. evaluation of infarct volume, neurological deficits, and brain water content To confirm whether YQFM exerts protective effects in mice, the animals were randomly divided into six groups (n=6 for each group): sham, I/R, YQFM (dissolved in 0.9% sodium chloride at three doses of 0.336, 0.671, and 1.342 g/kg), and XST (also dissolved in 0.9% sodium chloride at a dose of 40 mg/kg in accordance with clinical practice). After MCAO for 1 hour, mice were given YQFM or an equal volume of 0.9% sodium chloride by IP administration. After 24 hours of reperfusion, mice were anesthetized with 4% choral hydrate IP; after the animals were dead, the brains were removed, dissected, and sliced; and then five slices (1.5 mm thickness) were sectioned and incubated with 1% 2,3,5-triphenyltetrazolium chloride (TTC; Sigma-Aldrich Co., St Louis, MO, USA) for 30 minutes in the dark and then fixed with 4% paraformaldehyde (PFA). Lesioned areas that did not stain red with TTC were quantitatively analyzed by Quantity One analysis software (Bio-Rad Laboratories Inc., Hercules, CA, USA). Infarct volume was calculated using slice thickness and the measured areas of the lesion. Data are expressed as a percentage of total hemispheres. 22 Behavioral assessment was performed 24 hours after reperfusion. Neurological deficits of the experimental animals were graded on an 18-point scale as previously described. 23 The measurement of neurological deficits consisted of the following tests: spontaneous activity, symmetry of movements, symmetry of forelimbs, climbing, reaction to touch, and response to vibrissae touch. All six individual tests were scored on a four-point scale as 3, 2, 1, or 0. Final score was obtained by adding the scores recorded for each individual test, with a maximum score of 18 observed in healthy animals. Brain water content (n=6 for each group) was determined 24 hours after reperfusion, using the wet-dry method as described previously. 24 The total brain water was calculated as: cBF measurement CBF was measured using laser Doppler flowmetry as described in the "Materials and methods" section. 20 A computer-controlled optical scanner directed a low-power laser beam over the exposed cortex. The scanner head was positioned parallel to the cerebral cortex at a distance of ~20 cm. 25 A color-coded image indicating specific relative perfusion levels was displayed on a video monitor. The images were acquired at the onset of ischemia and reperfusion for 24 hours (n=6 for each group). hematoxylin and eosin staining of adjacent brain sections (5 µm thick) were cut from the coronal plane of the wax-embedded tissue and were stained with H&E for histomorphological analysis conducted by a pathologist blinded to the treatment groups. 26 18 F-fluorodeoxyglucose-positron emission tomography imaging 18 F-fluorodeoxyglucose-micro-positron emission tomography ( 18 F-FDG-micro-PET) scans were performed in mice (n=3 for each group) anesthetized at 23 hours after reperfusion with 4% isoflurane, with anesthesia maintained with 2%-2.5% of isoflurane in 100% O 2 . The tail vein was catheterized with a gauge catheter for intravenous administration of the radiotracer (radiochemical purity was .95%; provided by Mitro Biotech Co. Ltd., Nanjing, People's Republic of China). Animals were placed into a mouse holder compatible with PET acquisition systems (Trans-PET ® Bio-Caliburn ® 700; X-Z LAB, Inc., San Ramon, CA, USA), and normothermia was maintained by a water-based heating blanket. 27 Animals underwent two micro-PET scans to assess glucose metabolism of 18 F-FDG for each time point and condition (before ischemia and 24 hours after I/R). evaluation of BBB permeability BBB permeability was assessed by the leakage of Evans Blue (EB) stain into the brain following the tail-vein injection. 28 Two hours before the animals (n=6 for each group) were euthanized, 0.1 mL per 10 g body weight of 2% EB (Sigma-Aldrich) in normal saline was injected into each animal. The animals were then anesthetized and perfused with normal saline. For the quantitative measurement of EB leakage, the ipsilateral hemisphere was removed and homogenized in 1 mL of trichloroacetic acid, then centrifuged at 12,000× g for 20 minutes. EB concentration was quantitatively determined by measuring the absorbance at 620 nm of the supernatant with a spectrophotometer. The EB content was quantified as micrograms of EB per gram of tissue, using a standard curve. 29 Western blotting Western blotting (n=3 for each group) was performed as described previously. 25,28 Proteins extracted from the cortex of the ischemic side in the operated mice, and the corresponding areas of sham-operated mice were used for Western blot analysis. Membranes were incubated overnight at 4°C with the appropriate primary antibodies: ZO-1 (1:100; ab59720), occludin (1:200; ab31721) (both were purchased from Abcam, Cambridge, UK), and glyceraldehyde 3-phosphate dehydrogenase (GAPDH, 1:8000; Kangchen Bio-tech Inc., Shanghai, People's Republic of China). After washing, the membrane was incubated with horseradish-peroxidase-conjugated secondary antibody for 1.5 hours. Immunoreactive bands were detected by a chemiluminescence system (ECL Plus; Amersham, Arlington Heights, IL, USA) and analyzed by Quantity One analysis software. GAPDH was used as the loading control, as in previous reports. 30,31 Each sample was normalized first against the GAPDH level in the sample, then against the level of the sham sample, as described previously. 30,31 Immunofluorescence staining Mice (n=3 for each group) were anesthetized IP with 4% chloral hydrate (0.1 mL/10 g body weight) and transcardially perfused with 100 mL of normal saline, followed by 100 mL of 0.1 M phosphate-buffered saline (PBS) containing 4% PFA (pH =7.4). 28 Perfusion-fixed brains were postfixed in 0.1 M PBS +4% PFA overnight, followed by dehydration in 40% sucrose. Coronal brain sections (20 µm thick) were cut on a cryostat (CM1950; Leica Microsystems, Wetzlar, Germany), and the sections were blocked for 1.5 hours in 5% bovine serum albumin in PBS with 0.1% Triton X-100. The sections were then incubated overnight at 4°C in 3% bovine serum albumin in 0.1% Triton X-100/PBS with the primary antibodies: anti-ZO-1 TJ protein antibody (1:50; ab59720) and anti-occludin antibody (1:100; ab31721) (both were purchased from Abcam). After being rinsed three times with PBS, sections were incubated for 2 hours in fluorochromecoupled secondary antibody. The nuclei were stained with 4′,6-diamidino-2-phenylindole (DAPI; Boyetime Institute of Biotechnology, Shanghai, People's Republic of China). After being rinsed with PBS, the sections were examined under a fluorescence microscope (Leica Microsystems). statistical analysis All results are expressed as mean ± standard deviation. Statistical analysis was carried out using Student's two-tailed t-test for comparison between two groups and one-way analysis of variance followed by Dunnett's test when the data involved three or more groups. P,0.05 was considered statistically significant. 32 All analyses were performed with GraphPad Prism software (Version 5.01; GraphPad Software, Inc., La Jolla, CA, USA). YQFM reduces infarction volume and brain water content and improves behavioral outcomes in i/r-treated mice Infarction volume was evaluated in mouse brains by TTC staining and imaging software (Image J software; National Institutes of Health, Bethseda, MD, USA) after I/R injury. 319 YQFM ameliorates cerebral i/r-induced brain injury Representative samples of TTC-stained brain sections are shown in Figure 2A, with corresponding infarction volumes and statistical data that are shown in Figure 2B. Infarct volume was significantly increased in the shamoperated reference group (37.4%±8.6%), whereas the three doses of YQFM (0.336, 0.671, and 1.342 g/kg) reduced the infarct volumes induced by I/R by 28.7%±3.6%, 25.2%±2.7%, and 13.9%±1.4%, respectively, with the highest inhibition rate being 62.83% (YQFM at a dose of 1.342 g/kg versus model group). XST at a dose of 40 mg/ kg, the positive control, also significantly reduced I/Rinduced infarct volume. As shown in Figure 2C, there was a marked decline of 10.67±0.51 in I/R neurological score, while the YQFM and XST treatments resulted in a statistically significant improvement in neurobehavioral deficits compared with the reference group (P,0.05). Furthermore, the reference group had significantly increased brain water content (80.5%±0.6% versus 78.6%±0.7%, P,0.05), which was significantly attenuated by YQFM at all three doses and by XST ( Figure 2D). YQFM increases cBF in i/r-treated mice CBF was determined by a laser Doppler perfusion imaging system in the different groups, as shown in Figure 3A, and the quantification of the results is shown in Figure 3B. The administration of both YQFM at a dose of 1.342 g/kg and XST at a dose of 40 mg/kg resulted in a significant increase in CBF at 24 hours after reperfusion (63.9%±5.9% versus 27.6%±6.4%, YQFM at a dose of 1.342 g/kg versus model group, P,0.05), and increased in the other two groups (0.334 and 0.671 g/ kg) to a certain extent (42.1%±4.6% and 51.4%±6.0%). The highest inhibition rate of YQFM was 56.8% (YQFM at a dose of 1.342 g/kg versus model group). YQFM ameliorates brain metabolism and histopathological damage in i/r-treated mice Brain sections of the sham group, the untreated I/R group, and the I/R group treated with YQFM at a dose of 1.342 g/kg were examined. Figure 3A and B shows the brain metabolism and histopathological changes after 1 hour of occlusion and 24 hours of reperfusion by micro-PET imaging and H&E staining, respectively. Brain sections of the I/R group showed weak signal intensity and signal volume in the lesion side ( Figure 5A), as well as neuronal loss and the presence of numerous vacuolated spaces ( Figure 5B). The data showed that YQFM pretreatment at a dose of 1.342 g/kg ameliorated 321 YQFM ameliorates cerebral i/r-induced brain injury brain metabolism and histopathological damage in I/Rtreated mice by increasing the signal intensity and signal volume and decreasing the cell loss. YQFM increases expression of ZO-1 and occludin in i/r-treated mice To identify the relationship between TJ remodeling and BBB integrity, Western blotting and immunofluorescence staining were used to analyze the expression of TJ proteins. Western blot analysis ( Figure 6A and B) showed that the expression of ZO-1 and occludin was decreased in the I/R-treated mice, whereas it increased significantly in response to YQFM treatment at a dose of 1.342 g/kg. No statistical significance was observed with the other two YQFM groups. We performed CD31/ZO-1 and CD31/occludin double staining, which showed that ZO-1 and occludin were continuously present in the endothelial cell layer of microvessels in normal brains treated with YQFM at a dose of 1.342 g/kg ( Figure 6C and D). After I/R, ZO-1 and occludin showed structure disruptions with rearrangements compared with sham mice. TJs were much smoother and had more continuous labeling in the group treated with YQFM at a dose of 1.342 g/kg. Discussion A number of studies have demonstrated that YQFM can be used in cardiovascular diseases with marked efficacy. 14,17 However, its protective effects in ischemic stroke remain unclear. The results of the current study revealed that YQFM produces a significant improvement in cerebral infarction, neurological score, and brain edema at doses of 0.336, 0.671, and 1.342 g/kg, showing the same effects as seen with XST (Figure 2A-D). XST, a well-known Chinese herbal preparation, is composed of Panax notoginseng saponins, which shows part similarity to the chemical components of YQFM, and has been approved by the China Food and Drug Administration for the treatment of patients with some ischemic diseases. 11,33,34 The findings of the current study provide the Considering that the severity of ischemic damage correlates with the degree of CBF reduction, 35 we also investigated the changes in CBF in the different groups and found that the treatment with YQFM at a dose of 1.342 g/kg significantly increased CBF ( Figure 3A and B), which indicated that the smaller infarct volume in the YQFM-treated groups was correlated with improved CBF during I/R. However, molecular imaging has a number of advantages, such as the visual representation, characterization, and quantification of biological processes at cellular and molecular levels. 36,37 PET with 18 F-FDG has been widely used in the evaluation of the effects of drugs for cerebrovascular disease via the quantitative and noninvasive monitoring of glucose metabolism. 12,38,39 In the current study, micro-PET imaging technology was used to further confirm the validity of YQFM treatment in I/R-treated mice. We found that YQFM improved brain glucose metabolism, which was consistent with the results of TTC staining ( Figure 5A). In addition, H&E staining for histopathological observations suggested that neural structure was recovered, with a dense neuropil and largely viable 323 YQFM ameliorates cerebral i/r-induced brain injury neurons, in the brains of the YQFM-treated group compared with the I/R group ( Figure 5B). These findings provide further evidence of the effectiveness of YQFM in I/R-treated mice at both the functional and the morphological levels. Ischemic-stroke-induced brain injury is a complex pathophysiological process, including multifactor, multiway, and multichannel damage. However, BBB disruption is a critical event in the progression of ischemic stroke, which can increase cerebral vascular permeability and lead to the formation of brain edema. 44,45 Therefore, approaches focusing on the subsequent damage of BBB dysfunction should be considered. 46,47 The understanding of BBB dysfunction induced by ischemic stroke following thrombolytic therapy remains a challenge for clinicians. 48,49 In the current study, the results showed that YQFM treatment significantly reduced the leakage of EB at three doses ( Figure 4A and B), which indicated that YQFM could effectively protect BBB integrity. Furthermore, it has been generally believed that TJs that mediate paraendothelial transport participate in endothelial cell-mediated BBB permeability. 7,50 TJs such as ZO-1 and occludin have different molecular structures and characteristics, and their degradation plays a key role in ischemic BBB dysfunction in stroke. Loss and degradation of ZO-1 play significant roles in maintaining the continuity and integrity of TJs, which is closely linked to increases in BBB permeability. 54 Occludin, the first integral transmembrane protein to be identified, is localized to TJs. 55 Although much has been learned about TJs, the exact mechanisms of action of YQFM on TJs are not clear. Our immunofluorescence microscopy and Western blot assays demonstrated that ZO-1 and occludin participate in the modulation of BBB permeability by YQFM ( Figure 6A-D). However, the role of the basal lamina and other TJ proteins in the action of YQFM on BBB permeability is unknown and needs further clarification. As previously described, 18 YQFM consists of bioactive compounds, such as ginsenoside Rb1, ginsenoside Rg1, and schisandrin, some of which display neuroprotective effects after cerebral I/R injury. Recent studies have indicated that ginsenoside Rb1 can protect the loss of BBB integrity by suppressing neuroinflammation, while Rg1 attenuates BBB disruption through downregulating the expression of aquaporin 4 induced by ischemic stroke in animals. 56,57 This indicates that these components identified by UHPLC might be the key active ingredients for BBB protection. Furthermore, the possible mechanisms of action of YQFM are linked with the nuclear factor (NF)-κB and Akt pathways, 58-61 and 12 other major signaling pathways, including NF-κB, mitogen-activated protein kinase, the cytokine network, and mammalian target of rapamycin (mTOR), have been identified as related to the effects of YQFM in cardiocerebral ischemic diseases. 19 Moreover, ginsenoside Rg1 can ameliorate I/R-induced BBB disruption by regulating protease-activated receptor-1 (PAR-1), while ginsenoside Rb1 protects I/R-induced BBB disruption by inhibiting matrix metalloproteinase-9 (MMP-9). 56,58 These findings indicate that the potential targets or pathways of YQFM in the treatment of ischemic stroke are related to PAR-1, MMP-9, NF-κB, etc. In addition, a combination of four active compounds derived from Sheng-Mai-San was reported to alleviate cerebral I/R injury, correlating with the inhibition of autophagy and modulation of the adenosine monophosphate-activated protein kinase/mTOR and Jun kinase pathways and to inhibit H 2 O 2 -induced PC12 cell apoptosis in vitro. 20,62 Ginsenoside Rg1 can also improve cell viability and cell apoptosis induced by oxygen-glucose deprivation in cultured hippocampal neurons. 63 These findings suggest that YQFM may attenuate cerebral I/R injury via multiple mechanisms, such as anti-inflammation, antiapoptosis, antioxidation, and inhibition of autophagy, which require further investigation in future studies. Conclusion The current study demonstrates that the compound Chinese medicine YQFM can attenuate I/R-induced brain injury by improving cerebral infarction, neurological score, brain edema, brain metabolism, and histopathological damage. The repair of BBB disruption is mediated by interference with TJ protein degradation in vascular endothelial cells. These findings provide solid evidence for the efficacy of YQFM in the treatment of ischemic stroke.
#include "math.h" #include <iostream> using namespace std; const double MAX = 100000000; int main(){ double i = 0; double seno = 0; double coseno = 0; double tangente = 0; double logaritmo = 0; double raizCuadrada = 0; while(i < MAX){ seno += sin(i); coseno += cos(i); tangente += tan(i); logaritmo += log(i); raizCuadrada += sqrt(i); i++; } return 0; }
/* * $Name: CheckFiles * $Function: If the files used in training TM model does not exist, exit! * $Date: 2015-10-28, in Los Angeles, USA */ bool Rescoring::CheckFiles(map<string, string> &parameters) { string file_key = "-dev"; CheckFile(parameters, file_key); file_key = "-devnbest"; CheckFile(parameters, file_key); file_key = "-test"; CheckFile(parameters, file_key); file_key = "-testnbest"; CheckFile(parameters, file_key); file_key = "-initweights"; CheckFile(parameters, file_key); if(parameters.find("-out") == parameters.end()) { cerr<<"ERROR: Please add parameter \"-out\" in your command line!\n"; } return true; }
<filename>esda/tests/test_local_geary.py import unittest import libpysal from libpysal.common import pandas, RTOL, ATOL from esda.geary_local import Geary_Local import numpy as np PANDAS_EXTINCT = pandas is None class Geary_Local_Tester(unittest.TestCase): def setUp(self): np.random.seed(10) self.w = libpysal.io.open(libpysal.examples.get_path("stl.gal")).read() f = libpysal.io.open(libpysal.examples.get_path("stl_hom.txt")) self.y = np.array(f.by_col['HR8893']) def test_local_geary(self): lG = Geary_Local(connectivity=self.w).fit(self.y) self.assertAlmostEqual(lG.localG[0], 0.696703432) self.assertAlmostEqual(lG.p_sim[0], 0.19) suite = unittest.TestSuite() test_classes = [ Geary_Local_Tester ] for i in test_classes: a = unittest.TestLoader().loadTestsFromTestCase(i) suite.addTest(a) if __name__ == "__main__": runner = unittest.TextTestRunner() runner.run(suite)
<filename>src/pkgsDiff.ts import most = require('most') import R = require('ramda') import { DeprecationLog, Log, } from 'supi' import * as supi from 'supi' export interface PackageDiff { added: boolean, from?: string, name: string, realName?: string, version?: string, deprecated?: boolean, latest?: string, linked?: true, } export interface Map<T> { [index: string]: T, } export const propertyByDependencyType = { dev: 'devDependencies', optional: 'optionalDependencies', prod: 'dependencies', } export default function ( log$: { progress: most.Stream<supi.ProgressLog>, stage: most.Stream<supi.StageLog>, deprecation: most.Stream<supi.DeprecationLog>, summary: most.Stream<supi.Log>, lifecycle: most.Stream<supi.LifecycleLog>, stats: most.Stream<supi.StatsLog>, installCheck: most.Stream<supi.InstallCheckLog>, registry: most.Stream<supi.RegistryLog>, root: most.Stream<supi.RootLog>, packageJson: most.Stream<supi.PackageJsonLog>, link: most.Stream<supi.Log>, other: most.Stream<supi.Log>, }, ) { const deprecationSet$ = log$.deprecation .scan((acc, log) => { acc.add(log.pkgId) return acc }, new Set()) const pkgsDiff$ = most.combine( (rootLog, deprecationSet) => [rootLog, deprecationSet], log$.root, deprecationSet$, ) .scan((pkgsDiff, args) => { const rootLog = args[0] const deprecationSet = args[1] as Set<string> if (rootLog['added']) { pkgsDiff[rootLog['added'].dependencyType][`+${rootLog['added'].name}`] = { added: true, deprecated: deprecationSet.has(rootLog['added'].id), latest: rootLog['added'].latest, name: rootLog['added'].name, realName: rootLog['added'].realName, version: rootLog['added'].version, } return pkgsDiff } if (rootLog['removed']) { pkgsDiff[rootLog['removed'].dependencyType][`-${rootLog['removed'].name}`] = { added: false, name: rootLog['removed'].name, version: rootLog['removed'].version, } return pkgsDiff } if (rootLog['linked']) { pkgsDiff[rootLog['linked'].dependencyType][`>${rootLog['linked'].name}`] = { added: false, from: rootLog['linked'].from, linked: true, name: rootLog['linked'].name, } return pkgsDiff } return pkgsDiff }, { dev: {}, optional: {}, prod: {}, } as { dev: Map<PackageDiff>, prod: Map<PackageDiff>, optional: Map<PackageDiff>, }) const packageJson$ = most.fromPromise( most.merge( log$.packageJson, log$.summary.constant({}), ) .take(2) .reduce(R.merge, {}), ) return most.combine( (pkgsDiff, packageJsons) => { const initialPackageJson = packageJsons['initial'] const updatedPackageJson = packageJsons['updated'] if (!initialPackageJson || !updatedPackageJson) return pkgsDiff for (const depType of ['prod', 'optional', 'dev']) { const prop = propertyByDependencyType[depType] const initialDeps = R.keys(initialPackageJson[prop]) const updatedDeps = R.keys(updatedPackageJson[prop]) const removedDeps = R.difference(initialDeps, updatedDeps) for (const removedDep of removedDeps) { if (!pkgsDiff[depType][`-${removedDep}`]) { pkgsDiff[depType][`-${removedDep}`] = { added: false, name: removedDep, version: initialPackageJson[prop][removedDep], } } } const addedDeps = R.difference(updatedDeps, initialDeps) for (const addedDep of addedDeps) { if (!pkgsDiff[depType][`+${addedDep}`]) { pkgsDiff[depType][`+${addedDep}`] = { added: true, name: addedDep, version: updatedPackageJson[prop][addedDep], } } } } return pkgsDiff }, pkgsDiff$, packageJson$, ) }
import { processResponse } from './process-response'; describe('process-response', () => { describe('processResponse', () => { test('processes a complete response', () => { const response = { type: 'complete', resultCode: 'Authorised' }; expect(processResponse(response)).toHaveProperty('type', 'success'); }); test('processes a validation response', () => { const response = { type: 'validation', resultCode: '' }; expect(processResponse(response)).toHaveProperty('type', 'error'); }); test('processes an unknown response', () => { const response = { type: '?', resultCode: '' }; expect(processResponse(response)).toHaveProperty('type', 'error'); }); test('processes a pending response', () => { const response = { type: 'complete', resultCode: 'pending' }; expect(processResponse(response)).toHaveProperty('type', 'pending'); }); test('processes a pending response', () => { const response = { type: 'pending', resultCode: '' }; expect(processResponse(response)).toHaveProperty('type', 'pending'); }); test('processes a received response', () => { const response = { type: 'complete', resultCode: 'received' }; expect(processResponse(response)).toHaveProperty('type', 'received'); }); }); });
// registerUserFields creates fields for registered user. func (v *View) registerUserFields( tx *sql.Tx, user models.User, form registerUserForm, ) error { email := models.UserField{ UserID: user.ID, Kind: models.EmailField, Data: form.Email, } if _, err := v.core.UserFields.CreateTx(tx, email); err != nil { return err } if form.FirstName != "" { field := models.UserField{ UserID: user.ID, Kind: models.FirstNameField, Data: form.FirstName, } if _, err := v.core.UserFields.CreateTx(tx, field); err != nil { return err } } if form.LastName != "" { field := models.UserField{ UserID: user.ID, Kind: models.LastNameField, Data: form.LastName, } if _, err := v.core.UserFields.CreateTx(tx, field); err != nil { return err } } if form.MiddleName != "" { field := models.UserField{ UserID: user.ID, Kind: models.MiddleNameField, Data: form.MiddleName, } if _, err := v.core.UserFields.CreateTx(tx, field); err != nil { return err } } return nil }
def mask_len(mask): L_min = 0 L_max = 0 for el in mask.split(','): if el[0].isalpha(): con_s, con_e = el[1:].split('-') con_s, con_e = int(con_s), int(con_e) L_con = con_e - con_s + 1 L_min += L_con L_max += L_con else: if '-' in el: gap_min, gap_max = el.split('-') gap_min, gap_max = int(gap_min), int(gap_max) L_min += gap_min L_max += gap_max else: L_min += int(el) L_max += int(el) return L_min, L_max
Unusual Two‐Step Assembly of a Minimalistic Dipeptide‐Based Functional Hypergelator Self‐assembled peptide hydrogels represent the realization of peptide nanotechnology into biomedical products. There is a continuous quest to identify the simplest building blocks and optimize their critical gelation concentration (CGC). Herein, a minimalistic, de novo dipeptide, Fmoc‐Lys(Fmoc)‐Asp, as an hydrogelator with the lowest CGC ever reported, almost fourfold lower as compared to that of a large hexadecapeptide previously described, is reported. The dipeptide self‐assembles through an unusual and unprecedented two‐step process as elucidated by solid‐state NMR and molecular dynamics simulation. The hydrogel is cytocompatible and supports 2D/3D cell growth. Conductive composite gels composed of Fmoc‐Lys(Fmoc)‐Asp and a conductive polymer exhibit excellent DNA binding. Fmoc‐Lys(Fmoc)‐Asp exhibits the lowest CGC and highest mechanical properties when compared to a library of dipeptide analogues, thus validating the uniqueness of the molecular design which confers useful properties for various potential applications.
Ever wonder why American cities have hulking, 19th-century fortresses scattered amidst Starbucks and apartment complexes? I did. Since I work out in a YMCA that’s housed in a mammoth armory and my office is located next to a midtown Manhattan brick behemoth, I figured I’d bone up on how these enormously imposing structures came about—and why it is that such primo real estate was given over to these Medieval-looking buildings. Turns out that after the Civil War—the one that modern-day racists want us to think was about “states’ rights” (not the right to own and sell human beings, they insist)—the newly rising industrialists were nervous about the scrofulous masses getting out of hand. Here in NYC, a taste of workers’ militancy was on display in the draft riots in 1863, in which more than 100 mostly Black New Yorkers were killed when some white workers fought against the privileges of the wealthy who could pay $300 to avoid the draft. It was a horrible displacement of class rage by a bunch of workers who themselves were scapegoats, as many of the rioters were Irish. Historian Mike Davis has a pretty good description of this “schizophrenic consciousness of the immigrant poor: their hatred of the silk-stocking rich and their equal resentment against Blacks.” Nonetheless, tens of thousands of other white workers in New York signed up to join the war to abolish slavery, but those days of street violence provided a terrifying glimpse of class tensions and the rising rich were spooked. Robber barons of the Gilded Age, such as John Jacob Astor and William Vanderbilt, stepped in to finance the construction of domestic fortresses to store arms and house regiments of soldiers in the event of civic unrest, that is, strikes. Wouldn’t you know it, but those clever bastards were on to something! The decade of the 1870s was known as the Long Depression. Well, in all truth, they first called it the Great Depression, but the 1930s sucked even worse so they later changed the name. (Note to era namers: don’t get too comfy with Great Recession for the current period.) By 1873, a quarter of NYC’s workers were unemployed, thousands of businesses were failing nationally and even the booming railroad construction industry squeaked to a halt. Bosses slashed wages across the country. Workers launched a movement for “Work or Bread” and marched by the thousands in Boston, Chicago and New York. In January 1874, more than 7,000 gathered in New York’s Tompkins Square, which horrified the elite so much that they sent police in to beat and terrorize the crowd, inaugurating what historians agree was a period of “extreme repression.” A national railroad strike in 1877 both expressed and fed into a greater escalation of class tensions. One Pittsburgh railroad magnate responded to the strike action in that city saying, let’s feed them “a rifle diet for a few days and see how they like that kind of bread.” City after city was paralyzed in the strike, which rose as the economy began to recover a bit, and yet greedy bosses still insisted on wage cuts and longer hours. One speech before 20,000 workers in Tompkins Square that summer derided the policies of President Rutherford B. Hayes (a forgettable one-termer with an awesome beard) as a choice between “the hangman’s rope and the soldier’s bullet.” The police charged the crowd and beat dozens until folks dispersed. By the end of the 45-day strike—and I’ve skipped over several more bloody battles in between—Wall Streeters were convinced they needed to have a way to regularly deal with workers’ militancy. The War Department (notice how they didn’t call it the Defense Department in those years) began funding a system of armories in cities across the country. Some were started with public funds, but the robber barons poured millions of their ill-gotten-gain into these fortresses to protect their booty. In NYC, the first armory in the world opened in 1879 on the Upper East Side—smack in the middle of the wealthiest neighborhood in the country. These folks weren’t taking any chances with their new mansions. The Seventh Regiment Armory, also known as the Silk-Stocking Armory, still stands along Park Avenue stretching to Lexington Avenue, between 66th and 67th Streets. Today, it occasionally hosts the Royal Shakespeare Company and art shows. I’m happy that my own local fortress has a pretty great Y where Tumbling for Toddlers and a women’s shelter has long ago replaced the canons and drilling regiments. But not all these armories have been turned over to benign uses and sometimes they’re retooled for their original purpose. After 9/11, I was strolling down Lexington Avenue on my way to give a talk at Hunter on “Don’t Turn Tragedy into War,” when out of the Seventh Regiment Armory marched dozens of M16-wielding soldiers. Evidently, these relics of the past can be called into service again. My scheduled talk at University of Pennsylvania Thursday, December 2, has been postponed until the New Year. I’ll keep you posted. Advertisements
<gh_stars>0 import { atom, useRecoilState } from 'recoil' const dateState = atom<Date>({ key: 'dateState', default: new Date(), }) type CalendarDate = [date: Date, setDate: (date: Date) => void] export function useCalendarDate(): CalendarDate { const [date, setDate] = useRecoilState(dateState) return [date, setDate] }
# code_marshal n=input() for i in xrange(2*n+1): s='' if i<=n: s+=' '*((n-i)*2) for j in xrange(i+1):s+=str(j)+" " for j in xrange(i-1,-1,-1):s+=str(j)+" " print s[:len(s)-1] else: s+=' '*((i-n)*2) for j in xrange((2*n)-i+1):s+=str(j)+" " for j in xrange((2*n)-i-1,-1,-1):s+=str(j)+" " print s[:len(s)-1]
#include "./al-check.hpp" #ifdef EXAMPLE_BUILD_DEBUG #include <fmt/format.h> void priv::alCheckError(const char* file, uint32_t line, const char* expression) { ALenum code = alGetError(); if (code != AL_NO_ERROR) { const char* error = "Unknown AL Error!"; const char* description = "No description possible."; switch (code) { case AL_INVALID_NAME: error = "AL_INVALID_NAME"; description = "A bad name (ID) has been specified."; break; case AL_INVALID_ENUM: error = "AL_INVALID_ENUM"; description = "An unacceptable value has been specified for an enumerated argument."; break; case AL_INVALID_VALUE: error = "AL_INVALID_VALUE"; description = "A numeric argument is out of range."; break; case AL_INVALID_OPERATION: error = "AL_INVALID_OPERATION"; description = "The specified operation is not allowed in the current state."; break; case AL_OUT_OF_MEMORY: error = "AL_OUT_OF_MEMORY"; description = "There is not enough memory left to execute the command."; break; default: break; } std::string fstr = file; fmt::print( "An internal OpenAL call failed in {} ({})!\n" "Expression:\n\t{}\n" "Error description:\n\t{}\n\t{}\n", fstr.substr(fstr.find_last_of("\\/") + 1), line, expression, error, description ); } } #endif
def lens(self): assert isinstance(self.baseNode, URIRef) or isinstance(self.baseNode, BNode) target = self.baseNode lenses = self.lensCandidates if self.lensCandidates else self.fresnelCache.lenses lensesmatched = list(filter(lambda x: x[1], ((l,self.matches(l,target)) for l in lenses))) if not lensesmatched: info("No lens for {0}".format(target)) return self.fallbackLabelLens if self.label else self.fallbackLens lensesmatched.sort(key=lambda x: x[1]) lensesmatched = [x for x in lensesmatched if x[1]==lensesmatched[0][1]] if (len(lensesmatched) > 1): lensesmatched_new = [x for x in lensesmatched if fresnel.defaultLens in x[0].purposes] lensesmatched = lensesmatched_new if lensesmatched_new else lensesmatched if (len(lensesmatched) > 1): warning("more than one lens could be used for {0}".format(target)) return lensesmatched[0][0]
n = input() k = input() sum = 0 for item in k: if item == "0" or item == " ": sum += 0 else: sum += 1 print("Easy") if sum == 0 else print("Hard")
/** * Wait for condition to be true before taking screenshot * * @param cond condition * @param timeout timeout wait for condition * @return Shutterbug */ public static Shutterbug wait(ExpectedCondition<?> cond, int timeout) { beforeShootCondition = cond; beforeShootTimeout = timeout; return null; }
/// This sets up the advertising and waits for the connection complete event async fn advertise_setup<'a>(hi: &'a hci::HostInterface<bo_tie_linux::HCIAdapter>, local_name: &'a str) { let adv_name = assigned::local_name::LocalName::new(local_name, false); let mut adv_flags = assigned::flags::Flags::new(); // This is the flag specification for a LE-only, limited discoverable advertising adv_flags .get_core(assigned::flags::CoreFlags::LELimitedDiscoverableMode) .enable(); adv_flags .get_core(assigned::flags::CoreFlags::LEGeneralDiscoverableMode) .disable(); adv_flags .get_core(assigned::flags::CoreFlags::BREDRNotSupported) .enable(); adv_flags .get_core(assigned::flags::CoreFlags::ControllerSupportsSimultaniousLEAndBREDR) .disable(); adv_flags .get_core(assigned::flags::CoreFlags::HostSupportsSimultaniousLEAndBREDR) .disable(); let mut adv_uuids = assigned::service_uuids::new_16(false); adv_uuids.add(std::convert::TryFrom::try_from(heart_rate_service::HEART_RATE_SERVICE_UUID).unwrap()); let mut adv_data = set_advertising_data::AdvertisingData::new(); adv_data.try_push(adv_name).unwrap(); adv_data.try_push(adv_flags).unwrap(); adv_data.try_push(adv_uuids).unwrap(); set_advertising_enable::send(&hi, false).await.unwrap(); set_advertising_data::send(&hi, adv_data).await.unwrap(); let mut adv_prams = set_advertising_parameters::AdvertisingParameters::default(); adv_prams.own_address_type = bo_tie::hci::le::common::OwnAddressType::RandomDeviceAddress; set_advertising_parameters::send(&hi, adv_prams).await.unwrap(); set_advertising_enable::send(&hi, true).await.unwrap(); }
package rest import ( "net/http" "github.com/cosmos/cosmos-sdk/client" "github.com/cosmos/cosmos-sdk/client/tx" "github.com/cosmos/cosmos-sdk/types/rest" "github.com/gorilla/mux" "github.com/desmos-labs/desmos/x/magpie/types" ) func registerTxHandlers(clientCtx client.Context, r *mux.Router) { r.HandleFunc("/sessions", createSessionHander(clientCtx)).Methods("POST") } func createSessionHander(clientCtx client.Context) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { var req CreateSessionReq if !rest.ReadRESTReq(w, r, clientCtx.LegacyAmino, &req) { rest.WriteErrorResponse(w, http.StatusBadRequest, "failed to parse request") return } baseReq := req.BaseReq.Sanitize() if !baseReq.ValidateBasic(w) { return } // create the session msg := types.NewMsgCreateSession(req.BaseReq.From, req.Namespace, req.ExternalOwner, req.PublicKey, req.Signature) if rest.CheckBadRequestError(w, msg.ValidateBasic()) { return } tx.WriteGeneratedTxResponse(clientCtx, w, req.BaseReq, msg) } }
<reponame>Love2Java/rep-zyshen package com.thinkgem.jeesite.common.datatype; public class DataGrid { private int page=1; private Object rows; private int total; public Object getRows() { return rows; } public void setRows(Object rows) { this.rows = rows; } public int getPage() { return page; } public void setPage(int page) { this.page = page; } public int getTotal() { return total; } public void setTotal(int total) { this.total = total; } public DataGrid(){ } public DataGrid(int pageIndex,int recordCount,Object data){ setPage(pageIndex); setTotal(recordCount); setRows(data); } }
def build_tool(self, doc, entity): match = self.tool_re.match(entity) if match and validate_tool_name(match.group(self.TOOL_NAME_GROUP)): name = match.group(self.TOOL_NAME_GROUP) return creationinfo.Tool(name) else: raise ValueError('Failed to extract tool name')
<filename>ni_python_styleguide/_cli.py import pathlib import click import flake8.main.application import toml def _qs_or_vs(verbosity): if verbosity != 0: return f"-{'v' * verbosity if verbosity > 0 else 'q' * abs(verbosity)}" return "" def _read_pyproject_toml(ctx, param, value): value = value or "pyproject.toml" # Only accept local pyproject.toml if not specified try: pyproject_data = toml.load(value) except FileNotFoundError: return None except (toml.TomlDecodeError, OSError) as e: raise click.FileError(filename=value, hint=f"Error reading configuration file: {e}") config = pyproject_data.get("tool", {}).get("ni-python-styleguide", {}) config.pop("quiet", None) config.pop("verbose", None) if ctx.default_map is None: ctx.default_map = {} ctx.default_map.update(config) return value class AllowConfigGroup(click.Group): """click.Group subclass which allows for a config option to load options from.""" def __init__(self, *args, **kwargs): """Construct the click.Group with the config option.""" kwargs["params"].append( click.Option( ["--config"], type=click.Path( exists=True, file_okay=True, dir_okay=False, readable=True, allow_dash=False, path_type=str, ), is_eager=True, callback=_read_pyproject_toml, help="Config file to load configurable options from", ) ) super().__init__(*args, **kwargs) @click.group(cls=AllowConfigGroup) @click.option( "-v", "--verbose", count=True, help="Print more information. Repeat to increase verbosity.", ) @click.option( "-q", "--quiet", count=True, help="Print less information. Repeat to decrease verbosity.", ) @click.option( "--exclude", type=str, show_default=True, default="__pycache__,.git,.venv", help="Comma-separated list of files or directories to exclude.", ) @click.option( "--extend-exclude", type=str, default="", help="Comma-separated list of files or directories to exclude (in addition to --exclude).", ) @click.version_option() # @TODO: override the message to include dependency version(s) @click.pass_context def main(ctx, verbose, quiet, config, exclude, extend_exclude): """NI's internal and external Python linter rules and plugins.""" # noqa: D4 ctx.ensure_object(dict) ctx.obj["VERBOSITY"] = verbose - quiet ctx.obj["EXCLUDE"] = ",".join(filter(bool, [exclude.strip(","), extend_exclude.strip(",")])) @main.command() # @TODO: When we're ready to encourage editor integration, add --diff flag @click.option("--format", type=str, help="Format errors according to the chosen formatter.") @click.option( "--extend-ignore", type=str, help="Comma-separated list of errors and warnings to ignore (or skip)", ) @click.argument("file_or_dir", nargs=-1) @click.pass_obj def lint(obj, format, extend_ignore, file_or_dir): """Lint the file(s)/directory(s) given.""" # noqa: D4 app = flake8.main.application.Application() args = [ _qs_or_vs(obj["VERBOSITY"]), f"--config={(pathlib.Path(__file__).parent / 'config.ini').resolve()}", f"--exclude={obj['EXCLUDE']}" if obj["EXCLUDE"] else "", f"--format={format}" if format else "", f"--extend-ignore={extend_ignore}" if extend_ignore else "", # The only way to configure flake8-black's line length is through a pyproject.toml's # [tool.black] setting (which makes sense if you think about it) # So we need to give it one f"--black-config={(pathlib.Path(__file__).parent / 'config.toml').resolve()}", *file_or_dir, ] app.run(list(filter(bool, args))) app.exit()
// This cancels any notifications based on the oldest in the local SQL database static void clearOldestOverLimitFallback(Context context, int notificationsToMakeRoomFor) { OneSignalDbHelper dbHelper = OneSignalDbHelper.getInstance(context); Cursor cursor = null; try { cursor = dbHelper.query( NotificationTable.TABLE_NAME, new String[] { NotificationTable.COLUMN_NAME_ANDROID_NOTIFICATION_ID }, OneSignalDbHelper.recentUninteractedWithNotificationsWhere().toString(), null, null, null, OneSignalDbContract.NotificationTable._ID, getMaxNumberOfNotificationsString() + notificationsToMakeRoomFor ); int notificationsToClear = (cursor.getCount() - getMaxNumberOfNotificationsInt()) + notificationsToMakeRoomFor; if (notificationsToClear < 1) return; while (cursor.moveToNext()) { int existingId = cursor.getInt(cursor.getColumnIndex(NotificationTable.COLUMN_NAME_ANDROID_NOTIFICATION_ID)); OneSignal.removeNotification(existingId); if (--notificationsToClear <= 0) break; } } catch (Throwable t) { OneSignal.Log(OneSignal.LOG_LEVEL.ERROR, "Error clearing oldest notifications over limit! ", t); } finally { if (cursor != null && !cursor.isClosed()) cursor.close(); } }
<filename>neon_metrics_service/metrics_connector.py # NEON AI (TM) SOFTWARE, Software Development Kit & Application Framework # All trademark and other rights reserved by their respective owners # Copyright 2008-2022 Neongecko.com Inc. # Contributors: <NAME>, <NAME>, <NAME>, <NAME>, # <NAME>, <NAME>, <NAME>, <NAME> # BSD-3 License # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # 3. Neither the name of the copyright holder nor the names of its # contributors may be used to endorse or promote products derived from this # software without specific prior written permission. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, # OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import pika.channel from typing import Optional from neon_utils import LOG from neon_utils.socket_utils import b64_to_dict, dict_to_b64 from neon_mq_connector.connector import MQConnector from neon_metrics_service.metrics_utils import log_metric, log_client_connection class NeonMetricsConnector(MQConnector): """Adapter for establishing connection between Neon API and MQ broker""" def __init__(self, config: Optional[dict], service_name: str): """ Additionally accepts message bus connection properties :param config: dictionary containing MQ configuration data :param service_name: name of the service instance """ super().__init__(config, service_name) self.vhost = '/neon_metrics' @staticmethod def handle_record_metric(**kwargs): try: log_metric(**kwargs) return {"success": True} except Exception as e: LOG.error(e) return {"success": False} @staticmethod def handle_record_connection(**kwargs): try: log_client_connection(**kwargs) return {"success": True} except Exception as e: LOG.error(e) return {"success": False} def handle_metric(self, channel: pika.channel.Channel, method: pika.spec.Basic.Deliver, _: pika.spec.BasicProperties, body: bytes): """ Handles input requests from MQ to Neon API :param channel: MQ channel object (pika.channel.Channel) :param method: MQ return method (pika.spec.Basic.Deliver) :param _: MQ properties (pika.spec.BasicProperties) :param body: request body (bytes) """ message_id = None try: if body and isinstance(body, bytes): request = b64_to_dict(body) message_id = request.get("message_id") response = self.handle_record_metric(**request) response["message_id"] = message_id data = dict_to_b64(response) # queue declare is idempotent, just making sure queue exists channel.queue_declare(queue='neon_metrics_output') channel.basic_publish(exchange='', routing_key=request.get('routing_key', 'neon_metrics_output'), body=data, properties=pika.BasicProperties(expiration='1000') ) channel.basic_ack(method.delivery_tag) else: raise TypeError(f'Invalid body received, expected: bytes string; got: {type(body)}') except Exception as e: LOG.error(f"message_id={message_id}") LOG.error(e) def handle_new_connection(self, channel: pika.channel.Channel, method: pika.spec.Basic.Deliver, _: pika.spec.BasicProperties, body: bytes): """ Handles input requests from MQ to Neon API :param channel: MQ channel object (pika.channel.Channel) :param method: MQ return method (pika.spec.Basic.Deliver) :param _: MQ properties (pika.spec.BasicProperties) :param body: request body (bytes) """ message_id = None try: if body and isinstance(body, bytes): request = b64_to_dict(body) message_id = request.get("message_id") self.handle_record_connection(**request) channel.basic_ack(method.delivery_tag) else: raise TypeError(f'Invalid body received, expected: bytes string; got: {type(body)}') except Exception as e: LOG.error(f"message_id={message_id}") LOG.error(e) def handle_error(self, thread, exception): LOG.error(f"{exception} in {thread}") LOG.info(f"Restarting Consumers") self.stop_consumers() self.run() def pre_run(self, **kwargs): self.register_consumer("neon_connections_consumer", self.vhost, 'neon_connections_input', self.handle_new_connection, auto_ack=False) self.register_consumer("neon_metrics_consumer", self.vhost, 'neon_metrics_input', self.handle_metric, auto_ack=False)
// AddException adds a tracing exception to the tracer func (tracer *epsagonTracer) AddException(exception *protocol.Exception) { defer func() { recover() }() tracer.exceptionsPipe <- exception }
<filename>taotao-cloud-python/taotao-cloud-oldboy/day84-PerfectCRM/PerfectCRM-master/crm/admin.py from django.contrib import admin from django import forms # Register your models here. from crm import models from django.contrib.auth.admin import UserAdmin from django.contrib.auth.forms import ReadOnlyPasswordHashField class UserCreationForm(forms.ModelForm): """A form for creating new users. Includes all the required fields, plus a repeated password.""" password1 = forms.CharField(label='Password', widget=forms.PasswordInput) password2 = forms.CharField(label='Password confirmation', widget=forms.PasswordInput) class Meta: model = models.UserProfile fields = ('email','name') def clean_password2(self): # Check that the two password entries match password1 = self.cleaned_data.get("password1") password2 = self.cleaned_data.get("password2") if password1 and password2 and password1 != password2: raise forms.ValidationError("Passwords don't match") return password2 def save(self, commit=True): # Save the provided password in hashed format user = super(UserCreationForm, self).save(commit=False) user.set_password(self.cleaned_data["password1"]) if commit: user.save() return user class UserChangeForm(forms.ModelForm): """A form for updating users. Includes all the fields on the user, but replaces the password field with admin's password hash display field. """ password = ReadOnlyPasswordHashField(label="Password", help_text=("Raw passwords are not stored, so there is no way to see " "this user's password, but you can change the password " "using <a href=\"password/\">this form</a>.")) class Meta: model = models.UserProfile fields = ('email','password','is_active', 'is_admin') def clean_password(self): # Regardless of what the user provides, return the initial value. # This is done here, rather than on the field, because the # field does not have access to the initial value return self.initial["password"] class UserProfileAdmin(UserAdmin): # The forms to add and change user instances form = UserChangeForm add_form = UserCreationForm # The fields to be used in displaying the User model. # These override the definitions on the base UserAdmin # that reference specific fields on auth.User. list_display = ('id','email','is_admin','is_active') list_filter = ('is_admin',) list_editable = ['is_admin'] fieldsets = ( (None, {'fields': ('email','name', 'password')}), ('Personal info', {'fields': ('memo',)}), ('用户权限', {'fields': ('is_active','is_staff','is_admin','roles','user_permissions','groups')}), ) # add_fieldsets is not a standard ModelAdmin attribute. UserAdmin # overrides get_fieldsets to use this attribute when creating a user. add_fieldsets = ( (None, { 'classes': ('wide',), 'fields': ('email', '<PASSWORD>', '<PASSWORD>','is_active','is_admin')} ), ) search_fields = ('email',) ordering = ('email',) filter_horizontal = ('user_permissions','groups') class CustomerAdmin(admin.ModelAdmin): list_display = ('qq','name','source','phone','course','class_type','consultant','status','date') choice_fields = ('status','source','class_type') fk_fields = ('consultant','course') list_per_page = 10 list_filter = ('name','source','course','status','date','class_type','consultant') list_editable = ['phone',] class MenuAdmin(admin.ModelAdmin): list_display = ('name','url_type','url_name','order') filter_horizontal = ('sub_menus',) class SubMenuAdmin(admin.ModelAdmin): list_display = ('name','url_name','order') class RoleAdmin(admin.ModelAdmin): list_display = ('name',) filter_horizontal = ('menus',) class EnrollmentAdmin(admin.ModelAdmin): list_display = ('customer','course_grade','school','enrolled_date','contract_agreed','contract_approved') class PaymentRecordAdmin(admin.ModelAdmin): list_display = ('enrollment','pay_type','paid_fee','date','consultant') class StudyRecordAdmin(admin.ModelAdmin): list_display = ('id','student','course_record','record','score','date','note') list_editable = ('student','score','record','note') class CourseAdmin(admin.ModelAdmin): list_display = ('id','name','period') admin.site.register(models.Customer,CustomerAdmin) admin.site.register(models.CustomerFollowUp) admin.site.register(models.Branch) admin.site.register(models.ClassList) admin.site.register(models.Course,CourseAdmin) admin.site.register(models.Role,RoleAdmin) admin.site.register(models.UserProfile,UserProfileAdmin) admin.site.register(models.Enrollment,EnrollmentAdmin) admin.site.register(models.StuAccount) admin.site.register(models.CourseRecord) admin.site.register(models.StudyRecord,StudyRecordAdmin) admin.site.register(models.FirstLayerMenu,MenuAdmin) admin.site.register(models.SubMenu,SubMenuAdmin) admin.site.register(models.PaymentRecord,PaymentRecordAdmin)
/** * Checks whether a given String is a palindrome. */ public class Palindrome { /** * Removes everything except for letters and numbers and makes capital letters lower case. * * @param text text to be cleaned * @return cleaned text */ private static String cleanText(String text) { validateInput(text); return text.replaceAll("[^a-zA-Z0-9]+", "").toLowerCase(); } /** * Validates the text. * * @param text text to be validated * @throws IllegalArgumentException if the text is null or empty */ private static void validateInput(String text) { if (text == null || text.isEmpty()) { throw new IllegalArgumentException(); } } /** * Determines whether the text is a palindrome. * * @param text any text * @return true if the text is a palindrome, false otherwise */ public static boolean isPalindrome(String text) { String cleanText = cleanText(text); validateInput(cleanText); int forward = 0; int backward = cleanText.length() - 1; char[] charArray = cleanText.toCharArray(); while (backward > forward) { if (charArray[forward++] != charArray[backward--]) { return false; } } return true; } /** * Determines whether the text is a palindrome. * * @param text any text * @return true if the text is a palindrome, false otherwise */ public static boolean isPalindromeReverseString(String text) { String cleanText = cleanText(text); validateInput(cleanText); int halfLength = cleanText.length() / 2; char[] charArray = new char[halfLength]; cleanText.getChars(0, halfLength, charArray, 0); return new StringBuilder() .append(charArray) .reverse().toString() .equals(cleanText.substring(cleanText.length() % 2 == 0 ? halfLength : halfLength + 1)); } /** * Determines whether the text is a palindrome. Uses {@link StringBuilder#reverse()}. * * @param text any text * @return true if the text is a palindrome, false otherwise */ public static boolean isPalindromeStringBuilder(String text) { String cleanText = cleanText(text); validateInput(cleanText); return new StringBuilder(cleanText).reverse().toString().equals(cleanText); } /** * Determines whether the text is a palindrome. Uses {@link IntStream}. * * @param text any text * @return true if the text is a palindrome, false otherwise */ public static boolean isPalindromeIntStream(String text) { String cleanText = cleanText(text); validateInput(cleanText); return IntStream .range(0, cleanText.length() / 2) .noneMatch(i -> cleanText.charAt(i) != cleanText.charAt(cleanText.length() - i - 1)); } }
use aoc_runner_derive::*; struct Polymer(String); fn char_reduces_with(c1: char, c2: Option<&char>) -> bool { match c2 { Some(&c2) => c1.to_ascii_lowercase() == c2.to_ascii_lowercase() && c1 != c2, None => false, } } impl Polymer { fn new(s: impl Into<String>) -> Polymer { Polymer(s.into()) } fn reduced_len(&self) -> usize { let mut reduced = Vec::new(); for c in self.0.chars() { if char_reduces_with(c, reduced.last()) { reduced.pop(); } else { reduced.push(c); } } reduced.len() } fn reduced_len_without(&self, exclude: char) -> usize { let mut reduced = Vec::new(); for c in self.0.chars() { if c.to_ascii_lowercase() == exclude { continue; } else if char_reduces_with(c, reduced.last()) { reduced.pop(); } else { reduced.push(c); } } reduced.len() } } #[aoc_generator(day5)] fn generator(input: &str) -> Polymer { Polymer::new(input.trim()) } #[aoc(day5, part1)] fn part1(input: &Polymer) -> usize { input.reduced_len() } #[aoc(day5, part2)] fn part2(input: &Polymer) -> usize { (b'a'..=b'z') .map(char::from) .map(|c| input.reduced_len_without(c)) .min() .unwrap() } #[test] fn test_part1() { let input_string = crate::util::read_file_to_string("./input/2018/day5.txt"); let input = generator(&input_string); let result = part1(&input); assert_eq!(result, 9386); } #[test] fn test_part1_example() { let input_string = "dabAcCaCBAcCcaDA"; let input = generator(&input_string); let result = part1(&input); assert_eq!(result, 10); } #[test] fn test_part2() { let input_string = crate::util::read_file_to_string("./input/2018/day5.txt"); let input = generator(&input_string); let result = part2(&input); assert_eq!(result, 4876); } #[test] fn test_part2_example() { let input_string = "dabAcCaCBAcCcaDA"; let input = generator(&input_string); let result = part2(&input); assert_eq!(result, 4); }
<reponame>Sadragiel/Vongola<filename>src/client/pages/NewCourseForm/index.tsx import * as React from 'react'; import FirstStep from './FirstStep'; import SecondStep from './SecondStep'; import './styles/index.scss'; export default function () { const [isFirstStep, setIsFirstStep] = React.useState(true); return ( <div className="new-course-form"> <h3 className="new-course-form__title"> Новий курс </h3> { isFirstStep ? <FirstStep cb={ (values: any) => { setIsFirstStep(false) }} /> : <SecondStep cb={ (values: any) => { }} /> } </div> ); }
<reponame>LePtitDev/gamengin<gh_stars>0 #include "lua.h" #include "../../include/lua/lua.hpp" LuaScript::LuaScript() : need_close(true) { state = (void *)luaL_newstate(); luaL_openlibs((lua_State *)state); } LuaScript::LuaScript(void *s) : state(s), need_close(false) {} LuaScript::~LuaScript() { if (need_close) lua_close((lua_State *)state); } bool LuaScript::load(const char * code) { return luaL_loadstring((lua_State *)state, code) == LUA_OK; } bool LuaScript::execute() { return lua_pcall((lua_State *)state, 0, LUA_MULTRET, 0) == LUA_OK; } const char * LuaScript::getError() { return lua_tostring((lua_State *)state, -1); } LuaScript::Variable LuaScript::getVariable(const char *name) { lua_settop((lua_State *)state, 0); lua_getglobal((lua_State *)state, name); Variable var = getVariable(1); lua_pop((lua_State *)state, 1); return var; } LuaScript::Variable LuaScript::getVariable(int luaID) { Variable var; switch (lua_type((lua_State *)state, luaID)) { case LUA_TNIL: var.type = NIL; break; case LUA_TBOOLEAN: var.type = BOOLEAN; var.v_boolean = (bool)(lua_toboolean((lua_State *)state, luaID) != 0); break; case LUA_TNUMBER: var.type = NUMBER; var.v_number = (float)lua_tonumber((lua_State *)state, luaID); break; case LUA_TSTRING: var.type = STRING; var.v_string = lua_tostring((lua_State *)state, luaID); break; case LUA_TTABLE: var.type = TABLE; break; case LUA_TFUNCTION: var.type = FUNCTION; var.v_pointer = (void *)lua_tocfunction((lua_State *)state, luaID); break; case LUA_TLIGHTUSERDATA: var.type = POINTER; var.v_pointer = (void *)lua_topointer((lua_State *)state, luaID); break; default: var.type = UNKNOW; } return var; } void LuaScript::pushVariable(const Variable &var) { switch (var.type) { case NIL: lua_pushnil((lua_State *)state); break; case BOOLEAN: lua_pushboolean((lua_State *)state, var.v_boolean ? 1 : 0); break; case INTEGER: lua_pushinteger((lua_State *)state, var.v_integer); break; case NUMBER: lua_pushnumber((lua_State *)state, var.v_number); break; case STRING: lua_pushstring((lua_State *)state, var.v_string.c_str()); break; case TABLE: lua_pushstring((lua_State *)state, "array"); break; case POINTER: lua_pushlightuserdata((lua_State *)state, var.v_pointer); break; default: lua_pushnil((lua_State *)state); } } void LuaScript::createVariable(const char *name, const Variable &var) { pushVariable(var); lua_setglobal((lua_State *)state, name); } void LuaScript::createFunction(const char *name, int (*value)(void *)) { lua_register((lua_State *)state, name, (int(*)(lua_State *))value); } std::vector<LuaScript::Variable> LuaScript::callFunction(const char *name, Variable *args, int count) { std::vector<LuaScript::Variable> result; lua_getglobal((lua_State *)state, name); if (!lua_isfunction((lua_State *)state, -1)) { lua_pop((lua_State *)state, 1); return result; } else { for (int i = 0; i < count; i++) pushVariable(args[i]); lua_pcall((lua_State *)state, count, LUA_MULTRET, 0); int rcount = lua_gettop((lua_State *)state); for (int i = 0; i < rcount; i++) result.push_back(getVariable(i + 1)); return result; } }
Ben Carson is calling on Democratic presidential nominee Hillary Clinton Hillary Diane Rodham ClintonREAD: Cohen testimony alleges Trump knew Stone talked with WikiLeaks about DNC emails County GOP in Minnesota shares image comparing Sanders to Hitler Holder: 'Time to make the Electoral College a vestige of the past' MORE to release the results of a specialized MRI. ADVERTISEMENT Carson, a supporter of Republican presidential nominee Donald Trump Donald John TrumpREAD: Cohen testimony alleges Trump knew Stone talked with WikiLeaks about DNC emails Trump urges North Korea to denuclearize ahead of summit Venezuela's Maduro says he fears 'bad' people around Trump MORE, told BuzzFeed that it is "imperative" both presidential candidates release their medical records to "allow the people a fair opportunity to assess their physical fitness for office." “In the case of Hillary Clinton, because she has had a brain injury in the past and there is a question of venous sinus thrombosis, the result of a recent specialized MRI would be very helpful," Carson, a retired neurosurgeon, said. "The stakes are too high for us to blindly elect someone to the presidency of the United States when we have the possibility of examining real evidence upon which to make a decision.” Clinton left a 9/11 memorial on Sunday early because she became dehydrated and overheated, her campaign said. A video surfaced Sunday that appeared to show the Democratic nominee stumbling to a black van with the help of aides when she was departing from the event. She later returned to her home in Chappaqua, N.Y., and her doctor released a statement saying the Democratic nominee had been diagnosed with pneumonia on Friday. She subsequently canceled her California trip beginning Monday. Carson appeared to suggest that there could be more going on with Clinton's health than her campaign has shared. “The fact that the security detail assisting her as she clumsily attempted to enter her vehicle after abruptly ending her participation at a 9/11 event did not appear surprised as if dealing with something new, makes one wonder if such awkward moments are something they have become accustomed to,” he said. Carson also took a shot at the Democratic nominee's character, saying Clinton's biggest health problem is "the disease of dishonesty." “People in America tend to be understanding of natural diseases and are sympathetic," he said. "But adding more fuel to the fire of suspicion about her honesty by hiding a diagnosis of pneumonia and possibly other things may be a mistake from which there is no recovery.”
<reponame>hanaonnao/livedl package niconico import ( "fmt" "encoding/xml" "io/ioutil" "regexp" "strings" "net/url" "sync" "log" "time" "../rtmps" "../amf" "../options" "../files" "../httpbase" ) type Content struct { Id string `xml:"id,attr"` Text string `xml:",chardata"` } type Tickets struct { Name string `xml:"name,attr"` Text string `xml:",chardata"` } type Status struct { Title string `xml:"stream>title"` CommunityId string `xml:"stream>default_community"` Id string `xml:"stream>id"` Provider string `xml:"stream>provider_type"` IsArchive bool `xml:"stream>archive"` IsArchivePlayerServer bool `xml:"stream>is_archiveplayserver"` Ques []string `xml:"stream>quesheet>que"` Contents []Content `xml:"stream>contents_list>contents"` IsPremium bool `xml:"user>is_premium"` Url string `xml:"rtmp>url"` Ticket string `xml:"rtmp>ticket"` Tickets []Tickets `xml:"tickets>stream"` ErrorCode string `xml:"error>code"` streams []Stream chStream chan struct{} wg *sync.WaitGroup } type StatusE struct { Url string `xml:"rtmp>url"` Ticket string `xml:"rtmp>ticket"` ErrorCode string `xml:"error>code"` } type Stream struct { originUrl string streamName string originTicket string } func (status *Status) quesheet() { stream := make(map[string][]Stream) playType := make(map[string]string) // timeshift; <quesheet> tag re_pub := regexp.MustCompile(`\A/publish\s+(\S+)\s+(?:(\S+?),)?(\S+?)(?:\?(\S+))?\z`) re_play := regexp.MustCompile(`\A/play\s+(\S+)\s+(\S+)\z`) for _, q := range status.Ques { // /publish lv* /content/*/lv*_*_1_*.f4v if ma := re_pub.FindStringSubmatch(q); len(ma) >= 5 { stream[ma[1]] = append(stream[ma[1]], Stream{ originUrl: ma[2], streamName: ma[3], originTicket: ma[4], }) // /play ... } else if ma := re_play.FindStringSubmatch(q); len(ma) > 0 { // /play case:sp:rtmp:lv*_s_lv*,mobile:rtmp:lv*_s_lv*_sub1,premium:rtmp:lv*_s_lv*_sub1,default:rtmp:lv*_s_lv* main if strings.HasPrefix(ma[1], "case:") { s0 := ma[1] s0 = strings.TrimPrefix(s0, "case:") cases := strings.Split(s0, ",") // sp:rtmp:lv*_s_lv* re := regexp.MustCompile(`\A(\S+?):rtmp:(\S+?)\z`) for _, c := range cases { if ma := re.FindStringSubmatch(c); len(ma) > 0 { playType[ma[1]] = ma[2] } } // /play rtmp:lv* main } else { re := regexp.MustCompile(`\Artmp:(\S+?)\z`) if ma := re.FindStringSubmatch(ma[1]); len(ma) > 0 { playType["default"] = ma[1] } } } } pt, ok := playType["premium"] if ok && status.IsPremium { s, ok := stream[ pt ] if ok { status.streams = s } } else { pt, ok := playType["default"] if ok { s, ok := stream[ pt ] if ok { status.streams = s } } } } func (status *Status) initStreams() { if len(status.streams) > 0 { return } //if status.isOfficialLive() { status.contentsOfficialLive() //} else if status.isLive() { status.contentsNonOfficialLive() //} else { status.quesheet() //} return } func (status *Status) getFileName(index int) (name string) { if len(status.streams) == 1 { //name = fmt.Sprintf("%s.flv", status.Id) name = fmt.Sprintf("%s-%s-%s.flv", status.Id, status.CommunityId, status.Title) } else if len(status.streams) > 1 { //name = fmt.Sprintf("%s-%d.flv", status.Id, 1 + index) name = fmt.Sprintf("%s-%s-%s#%d.flv", status.Id, status.CommunityId, status.Title, 1 + index) } else { log.Fatalf("No stream") } name = files.ReplaceForbidden(name) return } func (status *Status) contentsNonOfficialLive() { re := regexp.MustCompile(`\A(?:rtmp:)?(rtmp\w*://\S+?)(?:,(\S+?)(?:\?(\S+))?)?\z`) // Live (not timeshift); <contents_list> tag for _, c := range status.Contents { if ma := re.FindStringSubmatch(c.Text); len(ma) > 0 { status.streams = append(status.streams, Stream{ originUrl: ma[1], streamName: ma[2], originTicket: ma[3], }) } } } func (status *Status) contentsOfficialLive() { tickets := make(map[string] string) for _, t := range status.Tickets { tickets[t.Name] = t.Text } for _, c := range status.Contents { if strings.HasPrefix(c.Text, "case:") { c.Text = strings.TrimPrefix(c.Text, "case:") for _, c := range strings.Split(c.Text, ",") { c, e := url.PathUnescape(c) if e != nil { fmt.Printf("%v\n", e) } re := regexp.MustCompile(`\A(\S+?):(?:limelight:|akamai:)?(\S+),(\S+)\z`) if ma := re.FindStringSubmatch(c); len(ma) > 0 { fmt.Printf("\n%#v\n", ma) switch ma[1] { default: fmt.Printf("unknown contents case %#v\n", ma[1]) case "mobile": case "middle": case "default": status.Url = ma[2] t, ok := tickets[ma[3]] if (! ok) { fmt.Printf("not found %s\n", ma[3]) } fmt.Printf("%s\n", t) status.streams = append(status.streams, Stream{ streamName: ma[3], originTicket: t, }) } } } } } } func (status *Status) relayStreamName(i, offset int) (s string) { s = regexp.MustCompile(`[^/\\]+\z`).FindString(status.streams[i].streamName) if offset >= 0 { s += fmt.Sprintf("_%d", offset) } return } func (status *Status) streamName(i, offset int) (name string, err error) { if status.isOfficialLive() { if i >= len(status.streams) { err = fmt.Errorf("(status *Status) streamName(i int): Out of index: %d\n", i) return } name = status.streams[i].streamName if status.streams[i].originTicket != "" { name += "?" + status.streams[i].originTicket } return } else if status.isOfficialTs() { name = status.streams[i].streamName name = regexp.MustCompile(`(?i:\.flv)$`).ReplaceAllString(name, "") if regexp.MustCompile(`(?i:\.(?:f4v|mp4))$`).MatchString(name) { name = "mp4:" + name } else if regexp.MustCompile(`(?i:\.raw)$`).MatchString(name) { name = "raw:" + name } } else { name = status.relayStreamName(i, offset) } return } func (status *Status) tcUrl() (url string, err error) { if status.Url != "" { url = status.Url return } else { status.contentsOfficialLive() } if status.Url != "" { url = status.Url return } err = fmt.Errorf("tcUrl not found") return } func (status *Status) isTs() bool { return status.IsArchive } func (status *Status) isLive() bool { return (! status.IsArchive) } func (status *Status) isOfficialLive() bool { return (status.Provider == "official") && (! status.IsArchive) } func (status *Status) isOfficialTs() bool { if status.IsArchive { switch status.Provider { case "official": return true case "channel": return status.IsArchivePlayerServer } } return false } func (st Stream) relayStreamName(offset int) (s string) { s = regexp.MustCompile(`[^/\\]+\z`).FindString(st.streamName) if offset >= 0 { s += fmt.Sprintf("_%d", offset) } return } func (st Stream) noticeStreamName(offset int) (s string) { s = st.streamName s = regexp.MustCompile(`(?i:\.flv)$`).ReplaceAllString(s, "") if regexp.MustCompile(`(?i:\.(?:f4v|mp4))$`).MatchString(s) { s = "mp4:" + s } else if regexp.MustCompile(`(?i:\.raw)$`).MatchString(s) { s = "raw:" + s } if st.originTicket != "" { s += "?" + st.originTicket } return } func (status *Status) recStream(index int, opt options.Option) (err error) { defer func(){ <-status.chStream status.wg.Done() }() stream := status.streams[index] tcUrl, err := status.tcUrl() if err != nil { return } rtmp, err := rtmps.NewRtmp( // tcUrl tcUrl, // swfUrl "http://live.nicovideo.jp/nicoliveplayer.swf?180116154229", // pageUrl "http://live.nicovideo.jp/watch/" + status.Id, // option status.Ticket, ) if err != nil { return } defer rtmp.Close() fileName, err := files.GetFileNameNext(status.getFileName(index)) if err != nil { return } rtmp.SetFlvName(fileName) tryRecord := func() (incomplete bool, err error) { if err = rtmp.Connect(); err != nil { return } // default: 2500000 //if err = rtmp.SetPeerBandwidth(100*1000*1000, 0); err != nil { if err = rtmp.SetPeerBandwidth(2500000, 0); err != nil { fmt.Printf("SetPeerBandwidth: %v\n", err) return } if err = rtmp.WindowAckSize(2500000); err != nil { fmt.Printf("WindowAckSize: %v\n", err) return } if err = rtmp.CreateStream(); err != nil { fmt.Printf("CreateStream %v\n", err) return } if err = rtmp.SetBufferLength(0, 2000); err != nil { fmt.Printf("SetBufferLength: %v\n", err) return } var offset int if status.IsArchive { offset = 0 } else { offset = -2 } if status.isOfficialTs() { for i := 0; true; i++ { if i > 30 { err = fmt.Errorf("sendFileRequest: No response") return } data, e := rtmp.Command( "sendFileRequest", []interface{} { nil, amf.SwitchToAmf3(), []string{ stream.streamName, }, }) if e != nil { err = e return } var resCnt int switch data.(type) { case map[string]interface{}: resCnt = len(data.(map[string]interface{})) case map[int]interface{}: resCnt = len(data.(map[int]interface{})) case []interface{}: resCnt = len(data.([]interface{})) case []string: resCnt = len(data.([]string)) } if resCnt > 0 { break } time.Sleep(10 * time.Second) } } else if (! status.isOfficialLive()) { // /publishの第二引数 // streamName(param1:String) // 「,」で区切る // ._originUrl, streamName(playStreamName) // streamName に、「?」がついてるなら originTickt となる // streamName の.flvは削除する // streamNameが/\.(f4v|mp4)$/iなら、頭にmp4:をつける // /\.raw$/iなら、raw:をつける。 // relayStreamName: streamNameの頭からスラッシュまでを削除したもの _, err = rtmp.Command( "nlPlayNotice", []interface{} { nil, // _connection.request.originUrl stream.originUrl, // this._connection.request.playStreamRequest // originticket あるなら // playStreamName ? this._originTicket // 無いなら playStreamName stream.noticeStreamName(offset), // var _loc1_:String = this._relayStreamName; // if(this._offset != -2) // { // _loc1_ = _loc1_ + ("_" + this.offset); // } // user nama: String 'lvxxxxxxxxx' // user kako: lvxxxxxxxxx_xxxxxxxxxxxx_1_xxxxxx.f4v_0 stream.relayStreamName(offset), // seek offset // user nama: -2, user kako: 0 offset, }) if err != nil { fmt.Printf("nlPlayNotice %v\n", err) return } } if err = rtmp.SetBufferLength(1, 3600 * 1000); err != nil { fmt.Printf("SetBufferLength: %v\n", err) return } // No return rtmp.SetFixAggrTimestamp(true) // user kako: lv*********_************_*_******.f4v_0 // official or channel ts: mp4:/content/********/lv*********_************_*_******.f4v //if err = rtmp.Play(status.origin.playStreamName(status.isTsOfficial(), offset)); err != nil { streamName, err := status.streamName(index, offset) if err != nil { return } if status.isOfficialTs() { ts := rtmp.GetTimestamp() if ts > 1000 { err = rtmp.PlayTime(streamName, ts - 1000) } else { err = rtmp.PlayTime(streamName, -5000) } } else if status.isTs() { rtmp.SetFlush(true) err = rtmp.PlayTime(streamName, -5000) } else { err = rtmp.Play(streamName) } if err != nil { fmt.Printf("Play: %v\n", err) return } // Non-recordedなタイムシフトでseekしても、timestampが変わるだけで // 最初からの再生となってしまうのでやらないこと // 公式のタイムシフトでSeekしてもタイムスタンプがおかしい if opt.NicoTestTimeout > 0 { // test mode _, incomplete, err = rtmp.WaitTest(opt.NicoTestTimeout) } else { // normal mode _, incomplete, err = rtmp.Wait() } return } // end func //ticketTime := time.Now().Unix() //rtmp.SetNoSeek(false) for i := 0; i < 10; i++ { incomplete, e := tryRecord() if e != nil { err = e fmt.Printf("%v\n", e) return } else if incomplete && status.isOfficialTs() { fmt.Println("incomplete") time.Sleep(3 * time.Second) // update ticket if true { //if time.Now().Unix() > ticketTime + 60 { //ticketTime = time.Now().Unix() if ticket, e := getTicket(opt); e != nil { err = e return } else { rtmp.SetConnectOpt(ticket) } //} } continue } break } fmt.Printf("done\n") return } func (status *Status) recAllStreams(opt options.Option) (err error) { status.initStreams() var MaxConn int if opt.NicoRtmpMaxConn == 0 { if status.isOfficialTs() { MaxConn = 1 } else { MaxConn = 4 } } else if opt.NicoRtmpMaxConn < 0 { MaxConn = 1 } else { MaxConn = opt.NicoRtmpMaxConn } status.wg = &sync.WaitGroup{} status.chStream = make(chan struct{}, MaxConn) ticketTime := time.Now().Unix() for index, _ := range status.streams { if opt.NicoRtmpIndex != nil { if tes, ok := opt.NicoRtmpIndex[index]; !ok || !tes { continue } } // blocks here status.chStream <- struct{}{} status.wg.Add(1) go status.recStream(index, opt) now := time.Now().Unix() if now > ticketTime + 60 { ticketTime = now if ticket, e := getTicket(opt); e != nil { err = e return } else { status.Ticket = ticket } } } status.wg.Wait() return } func getTicket(opt options.Option) (ticket string, err error) { status, notLogin, err := getStatus(opt) if err != nil { return } if status.Ticket != "" { ticket = status.Ticket } else { if notLogin { err = fmt.Errorf("notLogin") } else { err = fmt.Errorf("Ticket not found") } } return } func getStatus(opt options.Option) (status *Status, notLogin bool, err error) { var uri, uriE string // experimental if opt.NicoStatusHTTPS { uri = fmt.Sprintf("https://ow.live.nicovideo.jp/api/getplayerstatus?v=%s", opt.NicoLiveId) uriE = fmt.Sprintf("https://ow.live.nicovideo.jp/api/getedgestatus?v=%s", opt.NicoLiveId) } else { uri = fmt.Sprintf("http://watch.live.nicovideo.jp/api/getplayerstatus?v=%s", opt.NicoLiveId) uriE = fmt.Sprintf("http://watch.live.nicovideo.jp/api/getedgestatus?v=%s", opt.NicoLiveId) } header := make(map[string]string, 4) if opt.NicoSession != "" { header["Cookie"] = "user_session=" + opt.NicoSession } // experimental //if opt.NicoStatusHTTPS { // req.Header.Set("User-Agent", "Niconico/1.0 (Unix; U; iPhone OS 10.3.3; ja-jp; nicoiphone; iPhone5,2) Version/6.65") //} resp, err, neterr := httpbase.Get(uri, header) if err != nil { return } if neterr != nil { err = neterr return } defer resp.Body.Close() dat, _ := ioutil.ReadAll(resp.Body) status = &Status{} err = xml.Unmarshal(dat, status) if err != nil { //fmt.Println(string(dat)) fmt.Printf("error: %v", err) return } switch status.ErrorCode { case "": case "notlogin": notLogin = true default: err = fmt.Errorf("Error code: %s\n", status.ErrorCode) return } respE, err, neterr := httpbase.Get(uriE, header) if err != nil { return } if neterr != nil { err = neterr return } defer respE.Body.Close() datE, _ := ioutil.ReadAll(respE.Body) statusE := &StatusE{} err = xml.Unmarshal(datE, statusE) if err != nil { //fmt.Println(string(dat)) fmt.Printf("error: %v", err) return } switch statusE.ErrorCode { case "": case "notlogin": notLogin = true default: err = fmt.Errorf("Error code: %s\n", statusE.ErrorCode) return } if statusE.Url != "" { status.Url = statusE.Url } if statusE.Ticket != "" { status.Ticket = statusE.Ticket } return } func NicoRecRtmp(opt options.Option) (notLogin bool, err error) { status, notLogin, err := getStatus(opt) if err != nil { return } if notLogin { return } status.recAllStreams(opt) return }
/** * Create a StorableWeakPointer from thsi Storable to antoher Storable. * @param <T> * @param target The targeted Storable. * @return A StorableWeakPointer. * @throws StorageException * @throws PermissionException */ protected <T extends Storable> StorableWeakPointer<T> weakPointer(T target) throws StorageException, PermissionException { if(target.getStorage()==null) storage.init(target); else if (target.getStorage() != this.getStorage()) throw new StorageException("Target is not in the same storage."); return new StorableWeakPointer<T>(this, target); }
import { CorsOptions } from '@nestjs/common/interfaces/external/cors-options.interface'; import { RequestHandler } from 'express'; import { ConnectionOptions } from 'typeorm'; import { MpcastLogger } from './logger/mpcast-logger'; import { MpcastPlugin } from './mpcast-plugin/mpcast-plugin'; export interface AuthOptions { /** * @default 'bearer */ // tokenMethod?: 'cookie' | 'bearer'; tokenMethod?: 'cookie' | 'bearer'; /** * Token 密钥 */ jwtTokenSecret?: string; /** * @description * 设置 header 头属性 * * @default `Authorization` */ authTokenHeaderKey?: string; /** * @description * Sets the length of time that a verification token is valid for, after which the verification token must be refreshed. * * Expressed as a string describing a time span per * [zeit/ms](https://github.com/zeit/ms.js). Eg: `60`, `'2 days'`, `'10h'`, `'7d'` * * @default '3d' */ expiresIn?: string | number; } export interface MpcastConfig { /** * @description * The connection options used by TypeORM to connect to the database. * 使用 TypeORM 的数据库连接项 */ dbConnectionOptions: ConnectionOptions; hostname?: string; /** * @description * Custom Express middleware for the server. * * @default [] */ middleware?: Array<{ handler: RequestHandler; route: string }>; /** * @description * An array of plugins. * * @default [] */ plugins?: MpcastPlugin[]; /** * @description * 默认服务启动端口 * * @default 5000 */ port?: number; /** * @description * Set the CORS handling for the server. See the [express CORS docs](https://github.com/expressjs/cors#configuration-options). * * @default { origin: true, credentials: true } */ cors?: boolean | CorsOptions; /** * @description * Configuration for the handling of Assets. */ // assetOptions?: AssetOptions; /** * @description * Configuration for authorization. */ authOptions?: AuthOptions; /** * @description * Provide a logging service which implements the {@link MpcastLogger} interface. * 实现 BaseLogger 接口的一个默认日志服务 * @default DefaultLogger */ logger?: MpcastLogger; }
def combo_blocksize_changed(self): self.blocksize = int(self.comboBlockSize.currentText()) self.aa.set_properties(blocksize=self.blocksize) self.canvas.set_plot_properties(blocksize=self.blocksize) self.update_test_data() self.update_stream() self.settings["blocksize"] = self.blocksize
def cor_nzagri(isReload=True): data = ['Rough Rice', 'US Soybean Oil', 'US Soybean Meal', 'US Soybeans', 'US Wheat', 'US Corn', 'Oats', 'London Wheat', 'US Coffee C', 'US Cotton #2', 'US Sugar #11', 'Orange Juice', 'US Cocoa', 'Lumber', 'London Cocoa', 'London Coffee', 'London Sugar', 'Live Cattle', 'Lean Hogs', 'Feeder Cattle', 'PHLX New Zealand Dollar', 'NZD/USD'] info = [[markets[2], 'united states', get_commodities]] *\ 20 + [[markets[0], 'united states', get_index]] \ + [[markets[1], 'united states', get_forex]] params = ['cor_nzagri', data, info, analysis_commodity] make_market(params, isReload)
<filename>src/main.rs extern crate syscall; use std::env; use std::fs::{File, read_dir}; use std::io::{BufReader, BufRead, Error, Result}; use std::os::unix::io::{AsRawFd, FromRawFd, RawFd}; use std::path::Path; use std::process::Command; use syscall::flag::{WaitFlags, O_RDONLY, O_WRONLY}; fn switch_stdio(stdio: &str) -> Result<()> { let stdin = unsafe { File::from_raw_fd( syscall::open(stdio, O_RDONLY).map_err(|err| Error::from_raw_os_error(err.errno))? as RawFd ) }; let stdout = unsafe { File::from_raw_fd( syscall::open(stdio, O_WRONLY).map_err(|err| Error::from_raw_os_error(err.errno))? as RawFd ) }; let stderr = unsafe { File::from_raw_fd( syscall::open(stdio, O_WRONLY).map_err(|err| Error::from_raw_os_error(err.errno))? as RawFd ) }; syscall::dup2(stdin.as_raw_fd() as usize, 0, &[]).map_err(|err| Error::from_raw_os_error(err.errno))?; syscall::dup2(stdout.as_raw_fd() as usize, 1, &[]).map_err(|err| Error::from_raw_os_error(err.errno))?; syscall::dup2(stderr.as_raw_fd() as usize, 2, &[]).map_err(|err| Error::from_raw_os_error(err.errno))?; Ok(()) } pub fn run(file: &Path) -> Result<()> { let file = File::open(file)?; let reader = BufReader::new(file); for line_res in reader.lines() { let line_raw = line_res?; let line = line_raw.trim(); if ! line.is_empty() && ! line.starts_with('#') { let mut args = line.split(' ').map(|arg| if arg.starts_with('$') { env::var(&arg[1..]).unwrap_or(String::new()) } else { arg.to_string() }); if let Some(cmd) = args.next() { match cmd.as_str() { "cd" => if let Some(dir) = args.next() { if let Err(err) = env::set_current_dir(&dir) { println!("init: failed to cd to '{}': {}", dir, err); } } else { println!("init: failed to cd: no argument"); }, "echo" => { if let Some(arg) = args.next() { print!("{}", arg); } for arg in args { print!(" {}", arg); } print!("\n"); }, "export" => if let Some(var) = args.next() { let mut value = String::new(); if let Some(arg) = args.next() { value.push_str(&arg); } for arg in args { value.push(' '); value.push_str(&arg); } env::set_var(var, value); } else { println!("init: failed to export: no argument"); }, "run" => if let Some(new_file) = args.next() { if let Err(err) = run(&Path::new(&new_file)) { println!("init: failed to run '{}': {}", new_file, err); } } else { println!("init: failed to run: no argument"); }, "run.d" => if let Some(new_dir) = args.next() { let mut entries = vec![]; match read_dir(&new_dir) { Ok(list) => for entry_res in list { match entry_res { Ok(entry) => { entries.push(entry.path()); }, Err(err) => { println!("init: failed to run.d: '{}': {}", new_dir, err); } } }, Err(err) => { println!("init: failed to run.d: '{}': {}", new_dir, err); } } entries.sort(); for entry in entries { if let Err(err) = run(&entry) { println!("init: failed to run '{}': {}", entry.display(), err); } } } else { println!("init: failed to run.d: no argument"); }, "stdio" => if let Some(stdio) = args.next() { if let Err(err) = switch_stdio(&stdio) { println!("init: failed to switch stdio to '{}': {}", stdio, err); } } else { println!("init: failed to set stdio: no argument"); }, _ => { let mut command = Command::new(cmd); for arg in args { command.arg(arg); } match command.spawn() { Ok(mut child) => match child.wait() { Ok(_status) => (), //println!("init: waited for {}: {:?}", line, status.code()), Err(err) => println!("init: failed to wait for '{}': {}", line, err) }, Err(err) => println!("init: failed to execute '{}': {}", line, err) } } } } } } Ok(()) } pub fn main() { if let Err(err) = run(&Path::new("initfs:etc/init.rc")) { println!("init: failed to run initfs:etc/init.rc: {}", err); } syscall::setrens(0, 0).expect("init: failed to enter null namespace"); loop { let mut status = 0; syscall::waitpid(0, &mut status, WaitFlags::empty()).unwrap(); } }
// Returns the next unique choice-identifier func NextChoiceId() int { count := <- choiceCount choiceCount <- count + 1 return count }
/** * Tests the {@link ExifUtils} class to check that the Exif Orientation * tag is correctly acquired by the * {@link ExifUtils#getExifOrientation(ImageReader, int)} method. * <p> * The Exif Orientation tags has been added to the source images by using * <a href="http://owl.phy.queensu.ca/~phil/exiftool/index.html">ExifTool</a>. * * @author coobird * */ public class ExifUtilsTest { @Test public void exifOrientation1() throws Exception { // given ImageReader reader = ImageIO.getImageReadersByFormatName("jpg").next(); reader.setInput(ImageIO.createImageInputStream(new File("src/test/resources/Exif/orientation_1.jpg"))); // when Orientation orientation = ExifUtils.getExifOrientation(reader, 0); // then assertEquals(Orientation.typeOf(1), orientation); } @Test public void exifOrientation2() throws Exception { // given ImageReader reader = ImageIO.getImageReadersByFormatName("jpg").next(); reader.setInput(ImageIO.createImageInputStream(new File("src/test/resources/Exif/orientation_2.jpg"))); // when Orientation orientation = ExifUtils.getExifOrientation(reader, 0); // then assertEquals(Orientation.typeOf(2), orientation); } @Test public void exifOrientation3() throws Exception { // given ImageReader reader = ImageIO.getImageReadersByFormatName("jpg").next(); reader.setInput(ImageIO.createImageInputStream(new File("src/test/resources/Exif/orientation_3.jpg"))); // when Orientation orientation = ExifUtils.getExifOrientation(reader, 0); // then assertEquals(Orientation.typeOf(3), orientation); } @Test public void exifOrientation4() throws Exception { // given ImageReader reader = ImageIO.getImageReadersByFormatName("jpg").next(); reader.setInput(ImageIO.createImageInputStream(new File("src/test/resources/Exif/orientation_4.jpg"))); // when Orientation orientation = ExifUtils.getExifOrientation(reader, 0); // then assertEquals(Orientation.typeOf(4), orientation); } @Test public void exifOrientation5() throws Exception { // given ImageReader reader = ImageIO.getImageReadersByFormatName("jpg").next(); reader.setInput(ImageIO.createImageInputStream(new File("src/test/resources/Exif/orientation_5.jpg"))); // when Orientation orientation = ExifUtils.getExifOrientation(reader, 0); // then assertEquals(Orientation.typeOf(5), orientation); } @Test public void exifOrientation6() throws Exception { // given ImageReader reader = ImageIO.getImageReadersByFormatName("jpg").next(); reader.setInput(ImageIO.createImageInputStream(new File("src/test/resources/Exif/orientation_6.jpg"))); // when Orientation orientation = ExifUtils.getExifOrientation(reader, 0); // then assertEquals(Orientation.typeOf(6), orientation); } @Test public void exifOrientation7() throws Exception { // given ImageReader reader = ImageIO.getImageReadersByFormatName("jpg").next(); reader.setInput(ImageIO.createImageInputStream(new File("src/test/resources/Exif/orientation_7.jpg"))); // when Orientation orientation = ExifUtils.getExifOrientation(reader, 0); // then assertEquals(Orientation.typeOf(7), orientation); } @Test public void exifOrientation8() throws Exception { // given ImageReader reader = ImageIO.getImageReadersByFormatName("jpg").next(); reader.setInput(ImageIO.createImageInputStream(new File("src/test/resources/Exif/orientation_8.jpg"))); // when Orientation orientation = ExifUtils.getExifOrientation(reader, 0); // then assertEquals(Orientation.typeOf(8), orientation); } }
//ParseLogLevel takes a string with the log level names or enum number values //and parses it into an integer func ParseLogLevel(logLevel string) (level int) { var err error switch strings.ToUpper(logLevel) { case "INFO": level = INFO break case "WARN": level = WARN break case "WARNING": level = WARN break case "ERROR": level = ERROR break default: level, err = strconv.Atoi(logLevel) if err != nil { level = INFO } } return }
<reponame>Xamaneone/SoftUni-Intro num = int(input()) if not (100 <= num <= 200) and not num == 0: print ("invalid")
/** * Add one to the total number of moves made by player and extend * dungeon if needed. * * @param dungeon A pointer to the dungeon and game info * @param player A pointer to the player object * @param move The last move made by the player * * @return None */ void addMove(Dungeon *dungeon, Object* player, int move) { const char *flag = (const char*) FLAG_PAGE; player->moves++; if(player->moves % EXTEND_MOVE == 0) { int dungeon_idx = 0; char last_move = 0; int len = 0; char* old_list = NULL; dungeon_idx = player->moves / EXTEND_MOVE; old_list = dungeon->moveList; #ifdef PATCHED_1 if(!(dungeon->moveList = malloc((dungeon_idx+2)*EXTEND_MOVE*4+1))) _terminate(ALLOCATE_ERROR); bzero(dungeon->moveList, (dungeon_idx+2)*EXTEND_MOVE*4+1); #else if(!(dungeon->moveList = malloc((dungeon_idx+1)*EXTEND_MOVE*4))) _terminate(ALLOCATE_ERROR); bzero(dungeon->moveList, (dungeon_idx+1)*EXTEND_MOVE*4); #endif len = strlen(old_list); memcpy(dungeon->moveList, old_list, len); bzero(old_list, len); last_move = move; extendDungeon(dungeon, dungeon_idx, last_move, player->moves); if(!flag_buf) { if(!(flag_buf = malloc(512))) _terminate(ALLOCATE_ERROR); bzero(flag_buf, 512); for (unsigned int i = 0; i < 10; i++) { sprintf(&flag_buf[i*4], "!H", (unsigned char) *flag++); } } free(old_list); } }
def create_default_settings(self) -> Dict[str, Any]: settings = {} if self._use_options: settings['_options'] = (None, '') return settings
package com.binghamton.jhelp.util; import java.io.File; import java.io.FileNotFoundException; import java.io.FileWriter; import java.io.PrintWriter; /** * A utility class providing static method for logging to file */ public class Logger { private static String logName = "log.txt"; private static PrintWriter writer; static { try { writer = new PrintWriter(new File(logName)); } catch(FileNotFoundException e) { System.err.println("could not initialize log file, exiting..."); e.printStackTrace(); } } /** * Logs a message to the log file * @param msg the message to log */ public static void log(String msg) { writer.write(msg); } /** * Logs a message and a newline to the log file * @param msg the message to log */ public static void logln(String msg) { writer.write(msg); writer.write("\n"); } /** * Logs an Exception's stack trace to the log file * @param e the Exception to log */ public static void log(Exception e) { e.printStackTrace(writer); } /** * Closes the log file */ public static void close() { writer.flush(); writer.close(); } }
def _on_Closing(self): self._PopUpRoot.withdraw()
/** Implements COUNT(DISTINCT) over the values in an integer stream */ public class HyperLogLogOperator implements WindowOperator<HyperLogLog, Long, Long> { private static final OpType opType = OpType.COUNT; @Override public OpType getOpType() { return opType; } @Override public HyperLogLog createEmpty() { return new HyperLogLog(); } @Override public HyperLogLog merge(Stream<HyperLogLog> aggrs) { HyperLogLog mergedResult = new HyperLogLog(); aggrs.forEach(hll -> mergedResult.estimate += hll.estimate); return mergedResult; } @Override public HyperLogLog insert(HyperLogLog aggr, long timestamp, Object val) { aggr.insert((Integer) val); return aggr; } @Override public ResultError<Long, Long> query(StreamStatistics streamStats, Stream<SummaryWindow> summaryWindows, Function<SummaryWindow, HyperLogLog> hllRetriever, Stream<LandmarkWindow> landmarkWindows, long t0, long t1, Object... params) { return new ResultError<>( (long)Math.ceil(summaryWindows.map(hllRetriever).mapToDouble(HyperLogLog::getEstimate).sum()), null); } @Override public ResultError<Long, Long> getEmptyQueryResult() { return new ResultError<>(0L, 0L); } @Override public ProtoOperator.Builder protofy(HyperLogLog aggr) { return null; } @Override public HyperLogLog deprotofy(ProtoOperator protoOperator) { return null; } }
import { AddressIF } from './addresses-interface' export interface CompletingPartyIF { firstName: string middleName?: string lastName: string mailingAddress: AddressIF email?: string phone?: string }
<gh_stars>10-100 use crate::ir::codeparts::Keyword; use crate::lexeme::Lexeme; use crate::parselet::signature::entrypoint::EntryPointParselet; use crate::parsing::partial::code_body::parse_code_body; use crate::parsing::util::cursor::ParseCursor; use crate::parsing::util::{NoMatch, ParseRes}; pub fn parse_entrypoint(mut cursor: ParseCursor) -> ParseRes<EntryPointParselet> { if let Lexeme::Keyword(keyword) = cursor.take()? { if keyword.word == Keyword::Entrypoint { let mut name_cursor = cursor.fork(); let identifier = if let Lexeme::Identifier(identifier) = name_cursor.take()? { if let Some(simple_identifier) = identifier.to_simple() { cursor = name_cursor; Some(simple_identifier) } else { None } } else { None }; let (body_cursor, body) = parse_code_body(cursor)?; let entrypoint = EntryPointParselet::new(identifier, body); return Ok((body_cursor, entrypoint)); } } Err(NoMatch) } #[cfg(test)] mod tests { use crate::io::slice::SourceSlice; use crate::ir::codeparts::operator::Symbol::Dash; use crate::lexeme::collect::for_test::builder; use crate::lexeme::identifier::SimpleIdentifierLexeme; use crate::parselet::body::code_body::CodeBodyParselet; use crate::parsing::util::cursor::End; use super::*; #[test] fn anonymous_nl_endblock() { let lexemes = builder().keyword("main").colon().newline().start_block().end_block().file(); let (cursor, entry) = parse_entrypoint(lexemes.cursor()).unwrap(); let expected = EntryPointParselet::anonymous(CodeBodyParselet::new(vec![])); assert_eq!(expected, entry); assert_eq!(cursor.peek(), Err(End)); } #[test] fn named_nl_endblock() { let lexemes = builder() .keyword("main") .identifier("my_main_name") .colon() .newline() .start_block() .end_block() .file(); let (cursor, entry) = parse_entrypoint(lexemes.cursor()).unwrap(); let entry_name = SimpleIdentifierLexeme::from_valid("my_main_name", SourceSlice::mock()); let expected = EntryPointParselet::named(entry_name, CodeBodyParselet::new(vec![])); assert_eq!(expected, entry); assert_eq!(cursor.peek(), Err(End)); } #[test] fn anonymous_nl_eof() { let lexemes = builder().keyword("main").colon().newline().start_block().file(); let (cursor, entry) = parse_entrypoint(lexemes.cursor()).unwrap(); let expected = EntryPointParselet::anonymous(CodeBodyParselet::new(vec![])); assert_eq!(expected, entry); assert_eq!(cursor.peek(), Err(End)); } #[test] fn named_nl_eof() { let lexemes = builder() .keyword("main") .identifier("my_main_name") .colon() .newline() .start_block() .file(); let (cursor, entry) = parse_entrypoint(lexemes.cursor()).unwrap(); let entry_name = SimpleIdentifierLexeme::from_valid("my_main_name", SourceSlice::mock()); let expected = EntryPointParselet::named(entry_name, CodeBodyParselet::new(vec![])); assert_eq!(expected, entry); assert_eq!(cursor.peek(), Err(End)); } #[test] #[should_panic] fn no_nl_after_colon() { let lexemes = builder().keyword("main").colon().start_block().end_block().file(); let (cursor, entry) = parse_entrypoint(lexemes.cursor()).unwrap(); let expected = EntryPointParselet::anonymous(CodeBodyParselet::new(vec![])); assert_eq!(expected, entry); assert_eq!(cursor.peek(), Err(End)); } #[test] fn code_after_colon_block() { let lexemes = builder() .keyword("main") .colon() .keyword("let") .identifier("x") .assignment() .literal_int(42) .newline() .start_block() .identifier("x") .association(Dash) .literal_int(5) .newline() .end_block() .file(); let res = parse_entrypoint(lexemes.cursor()); // Not sure if this will be supported one day, but it is not supported now assert!(res.is_err()); } #[test] fn code_after_colon_noblock() { let lexemes = builder() .keyword("main") .colon() .keyword("let") .identifier("x") .assignment() .literal_int(42) .newline() .keyword("use") .identifier("fake") .file(); let res = parse_entrypoint(lexemes.cursor()); // Not sure if this will be supported one day, but it is not supported now assert!(res.is_err()); } #[test] fn anonymous_simple_body() { let lexemes = builder() .keyword("main") .colon() .newline() .start_block() .keyword("let") .identifier("x") .assignment() .literal_int(42) .newline() .identifier("x") .association(Dash) .literal_int(5) .newline() .end_block() .file(); let (cursor, entry) = parse_entrypoint(lexemes.cursor()).unwrap(); let expected = EntryPointParselet::anonymous(CodeBodyParselet::new( builder() .keyword("let") .identifier("x") .assignment() .literal_int(42) .newline() .identifier("x") .association(Dash) .literal_int(5) .newline() .build(), )); assert_eq!(expected, entry); assert_eq!(cursor.peek(), Err(End)); } #[test] fn named_simple_body() { let lexemes = builder() .keyword("main") .identifier("my_main_name") .colon() .newline() .start_block() .identifier("f") .parenthesis_open() .literal_int(42) .parenthesis_close() .newline() .newline() .end_block() .file(); let (cursor, entry) = parse_entrypoint(lexemes.cursor()).unwrap(); let entry_name = SimpleIdentifierLexeme::from_valid("my_main_name", SourceSlice::mock()); let expected = EntryPointParselet::named( entry_name, CodeBodyParselet::new( builder() .identifier("f") .parenthesis_open() .literal_int(42) .parenthesis_close() .newline() .newline() .build(), ), ); assert_eq!(expected, entry); assert_eq!(cursor.peek(), Err(End)); } }
/** * A preference that displays "Sign out and turn off sync" button in {@link ManageSyncSettings}. */ public class SyncOffPreference extends DialogPreference { public SyncOffPreference(Context context, AttributeSet attrs) { super(context, attrs); setLayoutResource(R.layout.preference_turn_off_sync); } }
// IsUpToDate checks whether current state is up-to-date compared to the given // set of parameters. func IsUpToDate(in *v1alpha1.CryptoKeyParameters, observed *cloudkms.CryptoKey) (bool, string, error) { um := make([]string, 0, 6) generated, err := copystructure.Copy(observed) if err != nil { return true, "", errors.Wrap(err, errCheckUpToDate) } desired, ok := generated.(*cloudkms.CryptoKey) if !ok { return true, "", errors.New(errCheckUpToDate) } GenerateCryptoKeyInstance(*in, desired) if !cmp.Equal(desired.Labels, observed.Labels, cmpopts.EquateEmpty()) { um = append(um, "labels") } if !cmp.Equal(desired.Purpose, observed.Purpose, cmpopts.EquateEmpty()) { um = append(um, "purpose") } if !cmp.Equal(desired.RotationPeriod, observed.RotationPeriod, cmpopts.EquateEmpty()) { um = append(um, "rotationPeriod") } if !cmp.Equal(desired.NextRotationTime, observed.NextRotationTime, cmpopts.EquateEmpty()) { um = append(um, "nextRotationTime") } if !cmp.Equal(desired.VersionTemplate, observed.VersionTemplate, cmpopts.EquateEmpty(), cmpopts.IgnoreFields(cloudkms.CryptoKeyVersionTemplate{}, "ForceSendFields"), cmpopts.IgnoreFields(cloudkms.CryptoKeyVersionTemplate{}, "NullFields"), ) { if !cmp.Equal(desired.VersionTemplate.Algorithm, observed.VersionTemplate.Algorithm, cmpopts.EquateEmpty()) { um = append(um, "versionTemplate.algorithm") } if !cmp.Equal(desired.VersionTemplate.ProtectionLevel, observed.VersionTemplate.ProtectionLevel, cmpopts.EquateEmpty()) { um = append(um, "versionTemplate.protectionLevel") } } if len(um) > 0 { return false, strings.Join(um, ","), nil } return true, "", nil }
// Fill out your copyright notice in the Description page of Project Settings. #include "LuaPanda.h" #include "lua.hpp" #include "libpdebug.h" IMPLEMENT_MODULE(FLuaPanda, LuaPanda); void FLuaPanda::StartupModule() { } void FLuaPanda::ShutdownModule() { } void FLuaPanda::SetupLuaPanda(struct lua_State* L) { pdebug_init(L); luaL_getsubtable(L, LUA_REGISTRYINDEX, LUA_PRELOAD_TABLE); lua_pushcfunction(L, &FLuaPanda::OpenLuaPanda); lua_setfield(L, -2, "LuaPanda"); lua_pushcfunction(L, &FLuaPanda::OpenDebugTools); lua_setfield(L, -2, "DebugTools"); lua_pop(L, 1); } int FLuaPanda::OpenLuaPanda(lua_State* L) { static const auto RawLua1 = #include "LuaPanda.1.lua.inc" static const auto RawLua2 = #include "LuaPanda.2.lua.inc" static const auto RawLua3 = #include "LuaPanda.3.lua.inc" static const auto RawLua4 = #include "LuaPanda.4.lua.inc" static const auto RawLua5 = #include "LuaPanda.5.lua.inc" static const int32 ContentSize = 100 * 1024; static ANSICHAR LuaContent[ContentSize] = { 0 }; if (LuaContent[0] == 0) { FCStringAnsi::Snprintf(LuaContent, ContentSize, "%s%s%s%s%s", RawLua1, RawLua2, RawLua3, RawLua4, RawLua5); } luaL_dostring(L, LuaContent); return 1; } int FLuaPanda::OpenDebugTools(lua_State* L) { static const auto RawLua = #include "DebugTools.lua.inc" luaL_dostring(L, RawLua); return 1; }
<reponame>LiHu20160316/electrum<filename>ios/CustomCode/Modules/ScanQR/OKQRCodeScanManager.h<gh_stars>0 // // OKScanView.m // OneKey // // Created by bixin on 2020/9/28. // #import <Foundation/Foundation.h> @class OKQRCodeScanManager; @protocol OKQRCodeScanManagerDelegate <NSObject> @optional /** 二维码扫描获取数据的回调方法 (metadataObjects: 扫描二维码数据信息) */ - (void)OKQRCodeScanManager:(OKQRCodeScanManager *)scanManager didOutputMetadataObjects:(NSArray *)metadataObjects; // 相册获取回调 - (void)pickerControllerDidCancel:(UIImagePickerController *)picker; - (void)pickerController:(UIImagePickerController *)picker didFinishPickingMediaWithMessage:(NSString *)message; // 亮度值回调(-5 ~ 9 ?) - (void)captureDidOutput:(CGFloat)brightnessValue; @end @interface OKQRCodeScanManager : NSObject @property (nonatomic, weak) id<OKQRCodeScanManagerDelegate> delegate; @property (nonatomic, readonly) BOOL torchHadOn; // 已经打开了背光灯 - (void)setupSessionOnController:(UIViewController *)currentController; /** 延迟3秒重新开始开启会话对象扫描 */ - (void)sessionRestartRunning; /** 开启会话对象扫描 */ - (void)sessionStartRunning; /** 停止会话对象扫描 */ - (void)sessionStopRunning; /** 移除 videoPreviewLayer 对象 */ - (void)videoPreviewLayerRemoveFromSuperlayer; /** 播放音效文件 */ - (void)playSound; // 从相册读取二维码 - (void)readQRCodeFromPhotoLibraryTo:(UIViewController *)currentController; // 打开背光灯 - (void)torchOn; // 关闭背光灯 - (void)torchOff; // 是否需要打开背光灯 - (BOOL)needTorchOn:(CGFloat)brightnessValue; @end
package com.charon.opengles30studydemo.videofilter.filter; import com.charon.opengles30studydemo.R; import com.charon.opengles30studydemo.videofilter.base.BaseFilter; public class BlackWhiteFilter extends BaseFilter { public BlackWhiteFilter() { super(R.raw.video_no_filter_vertex_shader, R.raw.video_blackwhite_filter_fragment_shader); } }
from collections import defaultdict as dd n, m = map(int, input().split()) s = input() t = input() md = {} mmd = {} md[-1] = 0 mmd[m] = n-1 for j in range(m): i = md[j-1] if j > 0 and t[j] == t[j-1]: i += 1 while s[i] != t[j]: i += 1 md[j] = i for j in range(m-1, -1, -1): i = mmd[j+1] if (j != m-1 and t[j+1] == t[j]): i-=1 while s[i] != t[j]: i -= 1 mmd[j] = i ans = 0 for j in range(1, m): ans = max(mmd[j] - md[j-1], ans) print(ans)
def conduct_curve_fitting(self, DeltaT, Decay, dDecay=None, bReInitialise=False, fp=sys.stdout): if bReInitialise: self.initialise_for_fit_advanced(DeltaT, Decay) bQuality=[True,True,True] try: paramOpt, dParamMatrix = curve_fit(curvefit_exponential, DeltaT, Decay, sigma=dDecay, p0 = self.get_params_as_list(), bounds = self.get_bounds_as_list(tauMax=DeltaT[-1]*10)) except: print( "= = = WARNING, curve fitting of %s with %i params failed!" % (self.name,self.nParams), file=fp) bQuality[0]=False return np.inf, bQuality dParam = np.sqrt(np.diag( dParamMatrix ) ) if not self.bS2Fast: self.S2=1.0-np.sum(self.C) if np.any( dParam > paramOpt ): print( "= = = WARNING, curve fitting of %s with %i params indicates overfitting." % (self.name,self.nParams), file=fp) bQuality[1]=False if self.S2+np.sum(self.C) > 1.0: print( "= = = WARNING, curve fitting of %s with %i params returns sum>1." % (self.name,self.nParams), file=fp) bQuality[2]=False self.set_params_from_list(paramOpt) self.set_uncertainties_from_list( dParam ) self.bHasFit=True self.chiSq = self.calc_chiSq( DeltaT, Decay, dDecay ) self.sort_components() return self.chiSq, bQuality
<filename>software/SSv3_control_panel/loggwindow.h /**///////////////////////////////////////////////////////////////////////////// // // // // // Copyright(c) 2018, <NAME> <EMAIL> // // All rights reserved. // // // // Redistribution and use in source and binary forms, with or without // // modification, are permitted provided that the following conditions are // // met : // // // // 1. Redistributions of source code must retain the above copyright notice, // // this list of conditions and the following disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright // // notice, this list of conditions and the following disclaimer in the // // documentation and/or other materials provided with the distribution. // // // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED // // TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A // // PARTICULAR PURPOSE ARE DISCLAIMED.IN NO EVENT SHALL THE COPYRIGHT OWNER // // OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, // // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, // // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR // // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF // // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING // // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // // // The views and conclusions contained in the software and documentation are // // those of the authors and should not be interpreted as representing // // official policies, either expressed or implied, of the SAIMScannerV3 // // project, the Paszek Research Group, or Cornell University. // //////////////////////////////////////////////////////////////////////////////*/ #ifndef LOGGWINDOW_H #define LOGGWINDOW_H #include <QWidget> #include <QCloseEvent> #include <qdialog.h> #include <qpushbutton.h> #include <qcheckbox.h> #include <qlayout.h> #include <qtextedit.h> #include <qlineedit.h> #include <qtextstream.h> #include <qfile.h> #include <qtimer.h> #include "ui_loggwindow.h" #include "ssv3controlpanel.h" class TextEditStream : public QTextEdit { private: QTextEdit *_thisEdit; public: TextEditStream() {} TextEditStream(QTextEdit *edit) { _thisEdit = edit; } TextEditStream(TextEditStream &obj) { _thisEdit = obj._thisEdit; } TextEditStream & operator=(const TextEditStream &obj) { _thisEdit = obj._thisEdit; return *this; } void OutputDest(QTextEdit *obj) { _thisEdit = obj; return; } TextEditStream & operator<<(const QString &str) { _thisEdit->insertPlainText(str); return *this; } TextEditStream & operator<<(const std::string &str) { _thisEdit->insertPlainText(QString::fromStdString(str)); return *this; } TextEditStream & operator<<(const int &num) { _thisEdit->insertPlainText(QString::number(num)); return *this; } TextEditStream & operator<<(const double &num) { _thisEdit->insertPlainText(QString::number(num, 'f', 3)); return *this; } TextEditStream & operator<<(const char *str) { _thisEdit->insertPlainText(QString(str)); return *this; } }; class SSv3ControlPanel; class LoggWindow : public QWidget, public Ui::LoggWindow { Q_OBJECT public: LoggWindow(SSv3ControlPanel *cp, QWidget *parent = Q_NULLPTR); ~LoggWindow(); private: friend class SSv3ControlPanel; void SetupWindow(); void PrintSequence(int seq); void PrintProfile(int prof); void PrintExperiment(); virtual void closeEvent(QCloseEvent *event); SSv3ControlPanel* _mainWindow; QVBoxLayout *_mainLayout; QTextEdit *_textBox; QTextEdit *_commentBox; QLineEdit *_saveLocation; QHBoxLayout *_saveLayout; QCheckBox *_autosaveCheckBox; QPushButton *_saveLocationButton; QHBoxLayout *_commentButtonLayout; QPushButton *_addCommentButton; QPushButton *_clearCommentButton; QHBoxLayout *_controlButtonLayout; QPushButton *_recordSettingsButton; QPushButton *_recordElementsButton; QPushButton *_saveAsButton; QPushButton *_closeButton; TextEditStream _textBoxStream; QTextStream _fileStream; QFile _logFile; QTimer *_autosaveTimer; private slots: void on_saveLocationButton_clicked(); void on_saveAsButton_clicked(); void on_autosaveCheckBox_checked(); void SaveLog(); void on_addComment(); void on_recordSettingsButton_clicked(); void on_recordElementsButton_clicked(); }; #endif //LOGGWINDOW_H
# coding=utf-8 from __future__ import unicode_literals import os from mock import patch from django_app.test import PyW4CTestCase from utils import gui, path, tmp, patterns class UtilsTest(PyW4CTestCase): temp_dir = 'temp' @staticmethod @patch('subprocess.check_call') def test_gui_open_file(subprocess_check_call): filename = 'test.png' gui.open_file(filename) subprocess_check_call.assert_called_with([ os.path.join(path.get_bin_dir(), 'open'), filename ]) @patch('utils.tmp.settings', TEMP_DIR=temp_dir) def test_get_temp_filename(self, settings): # pylint: disable=W0613 nb_temp_files_before = len(os.listdir(self.temp_dir)) temp_filename = tmp.get_temp_filename() self.assertIn(self.temp_dir, temp_filename) os.remove(temp_filename) extension = '.png' temp_filename = tmp.get_temp_filename(extension) self.assertIn(extension, temp_filename) os.remove(temp_filename) self.assertEquals(len(os.listdir(self.temp_dir)), nb_temp_files_before) def test_patterns_singleton(self): val = 42 class _TestSingleton(object): __metaclass__ = patterns.Singleton var = val def get_val(self): return self.var def set_var(self, new_val): self.var = new_val self.assertTrue(_TestSingleton() is _TestSingleton()) self.assertEqual(id(_TestSingleton()), id(_TestSingleton())) self.assertEqual(_TestSingleton().get_val(), val) self.assertEqual(_TestSingleton().var, val) new_val = 21 _TestSingleton().var = new_val self.assertEqual(_TestSingleton().var, new_val) _TestSingleton().set_var(val) self.assertEqual(_TestSingleton().get_val(), val)