content
stringlengths
10
4.9M
<reponame>isabella232/pdfium-1 // Copyright 2014 PDFium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // Original code copyright 2014 Foxit Software Inc. http://www.foxitsoftware.com #include "xfa/fwl/core/cfwl_widget.h" #include "xfa/fde/tto/fde_textout.h" #include "xfa/fwl/core/cfwl_themetext.h" #include "xfa/fwl/core/cfwl_widgetmgr.h" #include "xfa/fwl/core/fwl_noteimp.h" #include "xfa/fwl/core/fwl_noteimp.h" #include "xfa/fwl/core/ifwl_app.h" #include "xfa/fwl/core/ifwl_themeprovider.h" #define FWL_WGT_CalcHeight 2048 #define FWL_WGT_CalcWidth 2048 #define FWL_WGT_CalcMultiLineDefWidth 120.0f CFWL_Widget::CFWL_Widget(const IFWL_App* app) : m_pApp(app) {} CFWL_Widget::~CFWL_Widget() {} void CFWL_Widget::Initialize() { ASSERT(m_pIface); m_pIface->SetAssociateWidget(this); } void CFWL_Widget::GetWidgetRect(CFX_RectF& rect, bool bAutoSize) { if (m_pIface) m_pIface->GetWidgetRect(rect, bAutoSize); } void CFWL_Widget::SetWidgetRect(const CFX_RectF& rect) { if (m_pIface) m_pIface->SetWidgetRect(rect); } void CFWL_Widget::ModifyStyles(uint32_t dwStylesAdded, uint32_t dwStylesRemoved) { if (m_pIface) m_pIface->ModifyStyles(dwStylesAdded, dwStylesRemoved); } uint32_t CFWL_Widget::GetStylesEx() { return m_pIface ? m_pIface->GetStylesEx() : 0; } void CFWL_Widget::ModifyStylesEx(uint32_t dwStylesExAdded, uint32_t dwStylesExRemoved) { m_pIface->ModifyStylesEx(dwStylesExAdded, dwStylesExRemoved); } uint32_t CFWL_Widget::GetStates() { return m_pIface ? m_pIface->GetStates() : 0; } void CFWL_Widget::SetStates(uint32_t dwStates, bool bSet) { if (m_pIface) m_pIface->SetStates(dwStates, bSet); } void CFWL_Widget::SetLayoutItem(void* pItem) { if (m_pIface) m_pIface->SetLayoutItem(pItem); } void CFWL_Widget::Update() { if (m_pIface) m_pIface->Update(); } void CFWL_Widget::LockUpdate() { if (m_pIface) m_pIface->LockUpdate(); } void CFWL_Widget::UnlockUpdate() { if (m_pIface) m_pIface->UnlockUpdate(); } FWL_WidgetHit CFWL_Widget::HitTest(FX_FLOAT fx, FX_FLOAT fy) { if (!m_pIface) return FWL_WidgetHit::Unknown; return m_pIface->HitTest(fx, fy); } void CFWL_Widget::DrawWidget(CFX_Graphics* pGraphics, const CFX_Matrix* pMatrix) { if (m_pIface) m_pIface->DrawWidget(pGraphics, pMatrix); } IFWL_WidgetDelegate* CFWL_Widget::GetDelegate() const { return m_pIface ? m_pIface->GetDelegate() : nullptr; } void CFWL_Widget::SetDelegate(IFWL_WidgetDelegate* pDelegate) { if (m_pIface) m_pIface->SetDelegate(pDelegate); }
/** * Writes a TREE_ID record for the supplied <code>Tree</code>. The saved * information is sufficient to recreate a new, empty <code>Tree</code> * having the same name as the <code>Tree</code> being saved. * * @param tree * The <code>Tree</code> * @throws IOException */ protected void writeTreeInfo(final Tree tree) throws IOException { _dos.writeChar(RECORD_TYPE_TREE_ID); _dos.writeUTF(tree.getName()); _lastTree = tree; _otherRecordCount++; }
/** * Overlay two geometries, using heuristics to ensure * computation completes correctly. * In practice the heuristics are observed to be fully correct. * * @param geom0 a geometry * @param geom1 a geometry * @param opCode the overlay operation code (from {@link OverlayNG} * @return the overlay result geometry * * @see OverlayNG */ public static Geometry overlay(Geometry geom0, Geometry geom1, int opCode) { Geometry result; RuntimeException exOriginal; /** * First try overlay with a FLOAT noder, which is fast and causes least * change to geometry coordinates * By default the noder is validated, which is required in order * to detect certain invalid noding situations which otherwise * cause incorrect overlay output. */ try { result = OverlayNG.overlay(geom0, geom1, opCode ); return result; } catch (RuntimeException ex) { /** * Capture original exception, * so it can be rethrown if the remaining strategies all fail. */ exOriginal = ex; } /** * On failure retry using snapping noding with a "safe" tolerance. * if this throws an exception just let it go, * since it is something that is not a TopologyException */ result = overlaySnapTries(geom0, geom1, opCode); if (result != null) return result; /** * On failure retry using snap-rounding with a heuristic scale factor (grid size). */ result = overlaySR(geom0, geom1, opCode); if (result != null) return result; /** * Just can't get overlay to work, so throw original error. */ throw exOriginal; }
/* * Copyright 2016 Huawei Technologies Co., Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openo.sdno.osdriverservice.sbi.model; import org.openo.sdno.osdriverservice.openstack.client.model.VpnIkePolicy; import org.openo.sdno.osdriverservice.openstack.client.model.VpnIpSecPolicy; import org.openo.sdno.osdriverservice.openstack.client.model.VpnIpSecSiteConnection; import org.openo.sdno.osdriverservice.openstack.client.model.VpnService; /** * Model class for IpSec.<br> * * @author * @version SDNO 0.5 2016-6-15 */ public class OsIpSec { private String overlayId; private VpnIkePolicy vpnIkePolicy; private VpnIpSecPolicy vpnIpSecPolicy; private VpnIpSecSiteConnection vpnIpSecSiteConnection; private VpnService vpnService; private OsIpSec.Underlays attributes = new OsIpSec.Underlays(); public VpnIkePolicy getVpnIkePolicy() { return this.vpnIkePolicy; } public void setVpnIkePolicy(VpnIkePolicy vpnIkePolicy) { this.vpnIkePolicy = vpnIkePolicy; } public VpnIpSecPolicy getVpnIpSecPolicy() { return this.vpnIpSecPolicy; } public void setVpnIpSecPolicy(VpnIpSecPolicy vpnIpSecPolicy) { this.vpnIpSecPolicy = vpnIpSecPolicy; } public VpnService getVpnService() { return this.vpnService; } public void setVpnService(VpnService vpnService) { this.vpnService = vpnService; } public VpnIpSecSiteConnection getVpnIpSecSiteConnection() { return this.vpnIpSecSiteConnection; } public void setVpnIpSecSiteConnection(VpnIpSecSiteConnection vpnIpSecSiteConnection) { this.vpnIpSecSiteConnection = vpnIpSecSiteConnection; } public OsIpSec.Underlays getAttributes() { return this.attributes; } public void setAttributes(OsIpSec.Underlays attributes) { this.attributes = attributes; } public String getOverlayId() { return this.overlayId; } public void setOverlayId(String overlayId) { this.overlayId = overlayId; } /** * Underlays class * <br> * <p> * </p> * * @author * @version SDNO 0.5 August 8, 2016 */ public static class Underlays extends BaseUnderlays{ private String vpnServiceId = null; private String vpnIkePolicyId = null; private String vpnIpSecPolicyId = null; private String vpnIpSecSiteConnectionId = null; public String getVpnServiceId() { return this.vpnServiceId; } /** * Set VpnService ID * <br> * * @param vpnServiceId * @param action * @since SDNO 0.5 */ public void setVpnServiceId(String vpnServiceId, String action) { this.vpnServiceId = vpnServiceId; this.put("vpnServiceId", vpnServiceId, action); } public String getVpnIkePolicyId() { return this.vpnIkePolicyId; } /** * Set VpnIkePolicy ID * <br> * * @param vpnIkePolicyId * @param action * @since SDNO 0.5 */ public void setVpnIkePolicyId(String vpnIkePolicyId, String action) { this.vpnIkePolicyId = vpnIkePolicyId; this.put("vpnIkePolicyId", vpnIkePolicyId, action); } public String getVpnIpSecPolicyId() { return this.vpnIpSecPolicyId; } /** * Set VpnIpsecPolicy ID * <br> * * @param vpnIpSecPolicyId * @param action * @since SDNO 0.5 */ public void setVpnIpSecPolicyId(String vpnIpSecPolicyId, String action) { this.vpnIpSecPolicyId = vpnIpSecPolicyId; this.put("vpnIpSecPolicyId", vpnIpSecPolicyId, action); } public String getVpnIpSecSiteConnectionId() { return this.vpnIpSecSiteConnectionId; } /** * Set VpnIpSecSite connection ID * <br> * * @param vpnIpSecSiteConnectionId * @param action * @since SDNO 0.5 */ public void setVpnIpSecSiteConnectionId(String vpnIpSecSiteConnectionId, String action) { this.vpnIpSecSiteConnectionId = vpnIpSecSiteConnectionId; this.put("vpnIpSecSiteConnectionId", vpnIpSecSiteConnectionId, action); } } }
An alternative approach for evaluating the phenotypic virulence factors of pathogenic Escherichia coli Escherichia coli is a recognized zoonotic food-borne pathogen; however, the use of polymerase chain reaction (PCR) in the underdeveloped countries to differentiate pathogenic from non-pathogenic E. coli is a problematic issue. Our grail was to assess the phenotypic virulence markers motility, hemolysin, congo red agar, embryo lethality assay and serum resistance for pathogenic E. coli (PEC) correlated to PCR tests which is currently used world-wide to evaluate the PEC. The 448 strains of Escherichia coli that were isolated from different sources, were characterized for phenotypic virulence factors such as motility, hemolysin, Congo red binding, Embryo Lethality assay (ELA) and serum resistance, as well as antibiotic susceptibility using disc diffusion method to 23 antibiotics. Results exhibited 100% motility and Congo red binding, 97.1% for hemolysin production and 90.2% in the ELA. As a result, we were able to hypothetically conclude that the aforementioned virulence markers are plain, straightforward, economical, rapid, more dynamic, uncomplicated methodology, duplicatable and cost next to nothing when compared to the molecular PCR. Their implementation in a diagnostic microbiology laboratory for vetting is a rewarding task in the underdeveloped countries. It augments endeavors to minimize the use of PCR in our investigations especially during epidemiological and outbreak investigations of PEC. Introduction Escherichia coli has been differentiated into more than 50,000 different serotypes, of which several have the capability to produce disease through their pathogenic potentiality (EFSA, 2014;CDC, 2015). There are several methods for detecting virulence to discriminate among pathogenic and non-pathogenic E. coli serovars which comprise classical phenotypic cultural vetting procedures and molecular techniques. However, many laboratories throughout the world, when we specially refer to the underdeveloped countries, lack the ability or the skilful manpower needed to evolve molecular procedures to detect the pathogenic E. coli isolates, maintenance and repair of the sophisticated equipments that are used in the molecular assays. It is therefore essential that other, cheaper and non-sophisticated scanning procedures are integrated into standard medical métier and diagnostic laboratories in the Third World. Therefore, our goal was to assess the phenotypic factors (motility, hemolysin, Congo red agar, ELA, serum resistance and PCR) as predictors of virulence and as a sensitive and specific tool for the pathogenicity of E. coli isolated in the diagnostic microbiological laboratories of the underdeveloped countries to differentiate between pathogenic and non-pathogenic E. coli isolated from different sources in the failing of molecular biology potentiality. Materials and methods The 448 strains we studied were previously isolated and molecularly identified from different sources (Osman et al., 2012a(Osman et al., , 2012bOsman et al., 2013). After being re-confirmed as E. coli, the 448 purified isolates were tested for their pathogenicity using classical tests for E. coli pathogenicity as previously described by Osman et al. (2012aOsman et al. ( , 2012b which included motility, hemolytic activity, Congo red uptake (CR), Embryo Lethality Assay (ELA), and assessment of serum resistance (SR). Results and discussion The results of the phenotypic virulence markers recorded that 348 E. coli isolates were found to be 100% motile, 97% were hemolytic. 95% were Congo red positive, Embryo lethality at 13th day post-inoculation, ranged from 10.0 to 100% with a mean of 36% and 62% of the E. coli isolates were highly sensitive to serum resistance as shown in Table 1. E. coli is a highly adaptable microorganism that has evolved sophisticated means of variable phenotype virulence tests, motility and hemolysis are sometimes tested in conjunction with complement resistance and embryo lethality tests to differentiate pathogenic E. coli (Dziva and Stevens, 2008). All of our 448 E. coli isolates were found to be motile mediated by the flagella of E. coli, which is one of the virulence factors (Lane et al., 2005;Chelsea et al., 2007;Tonu et al., 2011) for pathogenicity of E. coli (Kao et al., 2014). The other phenotypic virulence markers are: (i) the characteristic CR binding affinity (AL-Saiedi and Al-Mayah, 2014; Yadav et al., 2014), (ii) the ELA was found to have the potentiality to differentiate between highly virulent, moderately virulent, and avirulent isolates of avian E. coli (Wooley et al., 2000;Gibbs et al., , 2004Oh et al., 2012), (iii) the capacity to counteract the germicidal action of serum (serum resistance), and thus continue to live in the bloodstream, represents another essential pathogenic phenomenon for pathogenic E. coli strains (Falkenhagen et al., 1991;Jacobson et al., 1992;Allan et al., 1993), (iv) hemolysin production has been implicated as an emerging and one of the most important virulence factors (Fatima et al., 2012). The present study showed that 97% of our E. coli isolates were able to produce hemolysis, a phenotypic virulence phenomenon demonstrated in a number of pathogens such as streptococcal and staphylococcal species, E. coli, Serpulina hyodysenteriae, Mycobacterium tuberculosis, Trypanosoma cruzi, and Listeria monocytogenes (Braun and Focareta, 1991;Andrews and Portnoy, 1994;Bhakdi et al., 1996;Morgan et al., 1996;Beutin, 1999;Quave et al., 2015). The cytolytic protein toxin, also known as cytotoxic necrotizing factor, hemolysin, secreted by the majority of pathogenic E. coli strains produced cell-associated lysin on blood agar plates seen as a clear zone of lysis (Smith, 1963;Shobrak and Abo-Amer, 2014). The importance of the hemolysin criteria, especially a-hemolysin, comes from the fact that it is strongly proinflammatory leading to secretion of IL-6 and chemotaxins, which sets pace for the pathogenesis of renal disease (Ranjan et al., 2010;Garcia et al., 2013). Clinical studies have indicated that the virulence factors of E. coli like production of hemolysin and the capacity to counteract the germicidal action of serum play a role in the pathogenesis (Fatima et al., 2012;Rizvi et al., 2013). Conclusion We were able to hypothetically conclude that the aforementioned virulence markers are plain, straightforward, economical, rapid, more dynamic, uncomplicated methodology, duplicatable and cost next to nothing when compared to the molecular PCR (Table 1). Their implementation in a diagnostic microbiology laboratory for vetting is a rewarding task in the underdeveloped countries. It augments endeavors to minimize the use of PCR in our investigations especially during epidemiological and outbreak investigations of PEC.
A secondary school which isolated as many as 70 pupils on the first day of term has now been accused of forcing a 12-year-old to 'bite off' her fake nails before a class. Denbigh High School, in north Wales, refused to let a large number of children attend lessons after they arrived wearing the wrong uniform. Staff at the school claimed pupils were in breach of the new uniform policy and were wearing trousers which were 'too tight'. Parents said that when pupils arrived for the first day of school, those considered to have broken the rules were 'isolated'. Pamela Williams, 48, from Denbigh, said her two daughters, aged 12 and 15, both go to the school and claims her daughter was told to 'bite off' her false nails. As many as 70 pupils were put in isolation for wearing trousers which were 'too tight' and other uniform breaches at Denbigh High School in north Wales (pictured) Tracey Webster, 40, (middle) holds up the trousers her 12-year-old daughter wore to school Speaking today, she said: 'One of the teachers made my 12-year-old bite off her false nails in the class. 'She was really upset because they were a birthday present and it's ruined her nails underneath. 'It's disgusting what they're doing to the kids, you should see the teachers - half of them are caked in make-up. 'It's the headteacher who's doing this, he's got the school where it needs to be with exam results but this is too far. 'Another girl was put in isolation because she had white socks - what does it matter what colour her socks are?' Tracey Webster, whose 12-year-old daughter attends the school, said: 'I am disgusted with the school. 'My daughter was so looking forward to school this term, she was looking forward to a fresh start after having some problems at school last year. 'She was told first thing that her trousers were too tight and that she wouldn't be able to attend classes. 'She doesn't want to wear baggy trousers, and in any case, how does the fit of the trousers affect a child's learning? 'Her trousers are nice - they're presentable - and now I've had to get a friend to take me to buy her a baggy pair that she will probably hate. 'She came home in floods of tears on her first day, she was so distressed. I think it's disgusting that children lost a day's education because of this.' Ms Webster, 40, added: 'It's really expensive for parents. We have wasted money when we can't afford to. I have seven children, and this is not fair. 'I appreciate that there has to be a certain uniform standard, but trousers that are too tight? I think it's pathetic.' One woman, who did not wish to be named but whose granddaughter attends the school, said: 'I received a phone call from my granddaughter complaining that she was being put in isolation because her trousers didn't comply with school rules. 'She said that around 70 pupils were also there, both boys and girls, because they had the wrong style trousers or skirts, or the wrong fit. 'I understand that uniforms are important but the reasons seemed petty. I am up in arms that they were taken out of their lessons and that they wasted a day. 'Surely trousers that are too tight shouldn't warrant a child losing a day's education? 'My granddaughter is angry and upset. Those children learnt nothing yesterday; what a waste. 'We now apparently have until next Monday to get different trousers for her so we will go later to buy her new ones. 'I don't know why they couldn't just have sent a letter instead of excluding these children from lessons.' Ms Webster's daughter (right) was told she was not allowed to attend classes because her trousers were too tight and students at the school were offered trousers from lost property to wear instead (left) Simeon Molloy, headteacher at Denbigh High School, said: 'We believe that standards across the board, including uniform, contribute to a positive learning environment. 'At the end of the last school year we held several school assemblies that outlined our school uniform policy. 'We followed this up by telephoning parents and sending out letters with photographs and information on the style of trousers and length of skirts. 'On Monday, September 5, the first day of the school term, there were a number pupils whose uniforms did not meet the required standard. 'The main issue involved the wearing of inappropriate trousers, skirts and footwear. 'Those pupils with incorrect uniform had access to their learning through notebooks in our learning suite, separate from their normal lessons. Several pupils were able to get the correct uniform during course of the day and returned to their classes. Simeon Molloy, headteacher at Denbigh High School, said: 'We believe that standards across the board, including uniform, contribute to a positive learning environment' 'We have spoken with every parent of every student involved in this matter to explain individual issues with each pupil's uniform. We will continue to uphold our high expectations.' Amy Williams, 37, from Denbigh, said: 'Some of it is over the top but you're told what the uniform is. Children should go to school looking smart and presentable. 'But I do think that the staff should have a uniform too though, they should have to wear a t-shirt with the school badge on or something.' Many parents and relatives used social media to speak out against what had happened. Nikkileigh Evans, whose brother attends Denbigh High, said: 'I can't believe that children were put in the isolation room for wearing the wrong trousers/skirts. 'They are there to learn not to get judged on what they wear! 'A lot of the children that got sent to the isolation room were wearing the same sort of thing as they did last year. Isn't that contradicting themselves? 'And the new kids to the school must have been nervous enough without the school ruining their first day there.' Another woman who did not wish to be named, said: 'I have nieces at Denbigh High School. I think it's so cruel and unnecessary what the school have done.
/** * Invoked when userEntity hits back button or when userEntity hits the back button in the toolbar. */ private void onUserWantsToLeave() { if (hasUserChangedSomething()) showLeaveWithoutSaveDialog(); else super.onBackPressed(); }
def address_to_latlng(address, logger=None): if logger is None: logger = logging.getLogger(__name__) BASEURL = 'https://maps.googleapis.com/maps/api/geocode/json?{}' query = urllib.parse.urlencode({ 'address': address, 'key': settings.GOOGLE_GEOCODING_KEY, 'language': 'ja' }) req = urllib.request.Request(BASEURL.format(query)) f = urllib.request.urlopen(req) content = f.read().decode('utf-8') result = json.loads(content) if len(result['results']) == 0: logger.error('results empty result={}'.format(result)) return None location = result['results'][0]['geometry']['location'] return { 'latitude': location['lat'], 'longitude': location['lng'] }
<reponame>NachiaVivias/library #define PROBLEM "https://judge.yosupo.jp/problem/dynamic_tree_vertex_add_path_sum" #include "../../template/template.hpp" // using namespace Nyaan; #include "../../modint/montgomery-modint.hpp" using mint = LazyMontgomeryModInt<998244353>; using vm = vector<mint>; #include "../../math/affine-transformation.hpp" #include "../../misc/fastio.hpp" // #include "../../lct/link-cut-tree.hpp" // using T = long long; T f(T a, T b) { return a + b; } T ts(T a) { return a; } using namespace Nyaan; void Nyaan::solve() { int N, Q; rd(N, Q); using LCT = LinkCutTree<T, f, ts>; LCT lct; vector<LCT::Ptr> vs(N); rep(i, N) { int a; rd(a); vs[i] = lct.my_new(a); } for (int i = 1; i < N; i++) { int a, b; rd(a, b); lct.link(vs[a], vs[b]); } while (Q--) { int cmd; rd(cmd); if (cmd == 0) { int u, v, w, x; rd(u, v, w, x); lct.cut(vs[u], vs[v]); lct.link(vs[w], vs[x]); } else if (cmd == 1) { int u, x; rd(u, x); lct.set_key(vs[u], lct.get_key(vs[u]) + x); } else { int u, v; rd(u, v); T fold = lct.fold(vs[u], vs[v]); wtn(fold); } } }
# cook your dish here n,m,a,b = list(map(int,input().split())) c = n%m ans = min(n*a,(n//m)*b+c*a) if c!=0: ans = min(ans,(n//m+1)*(b)) print(ans)
/** * Removes the mapping project including a Mapping Definition and Documents related to specified ID. * @param mappingDefinitionId mapping definition ID * @return empty response */ @DELETE @Path("/project/{mappingDefinitionId}") @Produces(MediaType.APPLICATION_JSON) @Operation(summary = "Delete Mapping Project by ID", description = "Delete the mapping project including a Mapping Definition and Documents related to specified ID") @ApiResponses({ @ApiResponse(responseCode = "200", description = "Mapping project was removed successfully"), @ApiResponse(responseCode = "204", description = "Unable to remove a mapping project for the specified ID") }) public Response deleteMappingProjectById( @Parameter(description = "Mapping Definition ID") @PathParam("mappingDefinitionId") Integer mappingDefinitionId) { LOG.debug("deleteMappingProjectById {} ", mappingDefinitionId); java.nio.file.Path mappingFolderPath = Paths.get(getMappingSubDirectory(mappingDefinitionId)); File mappingFolderFile = mappingFolderPath.toFile(); if (mappingFolderFile == null || !mappingFolderFile.exists()) { return Response.ok().build(); } if (!mappingFolderFile.isDirectory()) { LOG.warn("{} is not a directory - removing anyway", mappingFolderFile.getAbsolutePath()); } AtlasUtil.deleteDirectory(mappingFolderFile); return Response.ok().build(); }
Originally posted at Themis Trading, Sometimes it’s nice to get a sanity check and hear other investors and market professionals views on how the stock market has changed over the past few years. We hear more and more from various market participants that the market seems to be one big correlated beast that doesn’t trade on supply and demand anymore. We have opined on this topic many times in the past, so today we would like to let you read what three other very well respected professionals recently had to say on the topic: Steve Wynn, Chairman and CEO of Wynn Resorts Steve Wynn knows a little something about gambling so it was only fitting that he made some comments on the way his stock has been trading: “The other day I was watching the stock open up, and it went up on share volumes of a few thousand shares. I mean, every trade was a tick up. That’s not the way it should operate in an honestly or intelligently run exchange. But that’s the thing, all those guys sold their dark pools and their order flow and the positioning on the floors of the servers to the high frequency traders. And it’s made a couple of guys that I’m friendly with very rich because they are high-frequency traders. But don’t respect the activity, and I’m severely critical of it. And I don’t mind saying so, either.” Wynn also was critical of regulators: “The activity in the stock markets is, in my view, poorly regulated and irresponsibly policed, especially with regard to short sales. ” Wynn said he has “very little respect for the integrity of the trading on the exchange of most stocks, and I have particular disdain for the fact that the SEC has failed to deal with high frequency traders.” Jim Cramer, CNBC Jim has been critical of market structure issues in the past and seems as frustrated as us when it comes to the regulatory changes that have occurred since the May 2010 Flash Crash. On last week’s 6th Anniversary of the Flash Crash, Cramer said: “Nothing has really changed to stop the market from once again losing its integrity in 15 minutes of insane, manipulated trading.” Cramer also commented on Steve Wynn’s comments: “I think Wynn’s dead right. I see this activity all of the time in his stock. It is a play thing for the shorts and the high frequency traders. He’s been able to take advantage of the shorts who he says helped drive it down, by getting great prices ahead of what turned out to be a bottom in Macau. But as for the day-to-day trading? It’s ridiculous. None of us would play cards at a table where the guy ahead of us knows our cards. Yet, that’s’ what’s going on. Wynn knows it. But most CEOs don’t. I guess it takes a gamer to know when the game’s rigged, and Wynn knows it better than anyone.” Leon Cooperman, Chairman and CEO of Omega Advisors In the past, Lee has been very critical of high frequency traders and the lack of regulatory oversight. You might recall that back in September 2012, we co-wrote a Financial Times op-ed with Lee which was titled “SEC Must Put A Stop To Casino Markets” . We’re glad to see that Lee is still speaking out and had this to say last week about the trading that is done in today’s market: “The market of today is not the one our fathers and grandfathers traded. Dodd frank. Demise of specialists. Demise of uptick rule. It’s a new game. The uptick rule worked for 70 years. In July 2007, they got rid of it for some reason. Now these momentum HFT’s are scaring people out of the market – including me! Whether the S&P is up or down 50 points in an hour – that’s not a real market!”
#include <pv/epicsException.h> #include <pv/valueBuilder.h> #include <pv/clientFactory.h> #include <pv/rpcClient.h> #include <pv/rpcServer.h> #include <pv/rpcService.h> #include <epicsUnitTest.h> #include <testMain.h> namespace pvd = epics::pvData; namespace pva = epics::pvAccess; namespace { pvd::StructureConstPtr reply_type(pvd::getFieldCreate()->createFieldBuilder() ->add("value", pvd::pvDouble) ->createStructure()); struct SumService : public pva::RPCService { virtual epics::pvData::PVStructure::shared_pointer request( epics::pvData::PVStructure::shared_pointer const & args ) OVERRIDE FINAL { testDiag("request()"); pvd::PVScalarPtr lhs(args->getSubField<pvd::PVScalar>("query.lhs")), rhs(args->getSubField<pvd::PVScalar>("query.rhs")); if(!lhs || !rhs) throw pva::RPCRequestException("Missing query.lhs and/or query.rhs"); double a = lhs->getAs<double>(), b = rhs->getAs<double>(); testDiag("Add %f + %f", a, b); pvd::PVStructure::shared_pointer reply(pvd::getPVDataCreate()->createPVStructure(reply_type)); reply->getSubFieldT<pvd::PVDouble>("value")->put(a+b); return reply; } }; void testSum(const pva::ChannelProvider::shared_pointer& cli_prov) { pva::RPCClient client("sum", pvd::createRequest("field()"), cli_prov); pvd::ValueBuilder args("epics:nt/NTURI:1.0"); args.add<pvd::pvString>("scheme", "pva") .add<pvd::pvString>("path", "sum"); pvd::PVStructurePtr reply; testDiag("Request"); reply = client.request(args.addNested("query") .add<pvd::pvDouble>("lhs", 5.0) .add<pvd::pvDouble>("rhs", 3.0) .endNested() .buildPVStructure()); pvd::int32 value = reply->getSubFieldT<pvd::PVScalar>("value")->getAs<pvd::int32>(); testOk(value==8, "Reply value = %d", (unsigned)value); testDiag("Wait for connect (already connected)"); testOk1(client.waitConnect()); } struct FailService : public pva::RPCService { virtual epics::pvData::PVStructure::shared_pointer request( epics::pvData::PVStructure::shared_pointer const & args ) OVERRIDE FINAL { testDiag("failing()"); throw std::runtime_error("oops"); } }; void testRPCFail(const pva::ChannelProvider::shared_pointer& cli_prov) { testDiag("Fail"); pva::RPCClient client("fail", pvd::createRequest("field()"), cli_prov); pvd::ValueBuilder args("epics:nt/NTURI:1.0"); args.add<pvd::pvString>("scheme", "pva") .add<pvd::pvString>("path", "fail"); testDiag("Request"); try{ (void)client.request(args.addNested("query") .add<pvd::pvDouble>("lhs", 5.0) .add<pvd::pvDouble>("rhs", 3.0) .endNested() .buildPVStructure()); testFail("Missing expected exception"); }catch(pva::RPCRequestException& e){ testPass("caught expected rpc exception: %s", e.what()); }catch(std::exception& e){ testFail("caught un-expected exception: %s", e.what()); } } } // namespace MAIN(testRPC) { testPlan(3); try { pva::Configuration::shared_pointer conf(pva::ConfigurationBuilder() //.push_env() //.add("EPICS_PVA_DEBUG", "3") .add("EPICS_PVAS_INTF_ADDR_LIST", "127.0.0.1") .add("EPICS_PVA_ADDR_LIST", "127.0.0.1") .add("EPICS_PVA_AUTO_ADDR_LIST","0") .add("EPICS_PVA_SERVER_PORT", "0") .add("EPICS_PVA_BROADCAST_PORT", "0") .push_map() .build()); testDiag("Server Setup"); pva::RPCServer serv(conf); testDiag("TestServer on ports TCP=%u UDP=%u", serv.getServer()->getServerPort(), serv.getServer()->getBroadcastPort()); { std::tr1::shared_ptr<pva::RPCService> service(new SumService); serv.registerService("sum", service); } { std::tr1::shared_ptr<pva::RPCService> service(new FailService); serv.registerService("fail", service); } testDiag("Client Setup"); pva::ClientFactory::start(); pva::ChannelProvider::shared_pointer cli_prov(pva::ChannelProviderRegistry::clients()->createProvider("pva", serv.getServer()->getCurrentConfig())); if(!cli_prov) testAbort("No pva provider"); testDiag("Client Ready"); testSum(cli_prov); testRPCFail(cli_prov); }catch(std::exception& e){ PRINT_EXCEPTION(e); testAbort("Unexpected exception: %s", e.what()); } return testDone(); }
<reponame>Nedgang/adt_project<filename>terms_counter.py #!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Analyze a list of words and search for complex terms. Return a list of simple and complex terms. """ ########## # IMPORT # ########## from collections import Counter from nltk.util import ngrams ######## # MAIN # ######## def complexe(list_words, complex_size=2, threshold=2): """ Analyze a list of words and search for complexe terms. Take a list in entry, with the words in the text order. Return a dictionary, with complex terms and their occurences. """ association_list = [] # For each word in the list, we take the next one and save the combinaison association_list = ngrams(list_words, complex_size) # The count of each combinaison will be stored in a dict asso_number = Counter(association_list) # Remove combinations with score lower than threshold complex_terms = __filter_terms(asso_number, threshold) return complex_terms def simple(list_words, threshold=1): """ Analyze a list of words and search for complexe terms. Take a list in entry, with the words in the text order. Return a dictionary, with simple terms and their occurences. """ # Count all simple terms and remove if lower than threshold return __filter_terms(Counter(list_words), threshold) ############# # FUNCTIONS # ############# def __filter_terms(dic_combinaison, threshold): """ Take a dictionnary containing each combinations of terms and return those which are found k or more time (default k=2). It returns a dict, containing each complex term as a key <str>, and value is the number of occurence <int> in dic_combinaison. """ # Remove all key with score lower than k, and replace key tuple by a # string return {__key2str(key): dic_combinaison[key] for key in dic_combinaison if dic_combinaison[key] >= threshold} def __key2str(key): """ Take a key and return in string format. """ if type(key) is tuple: return " ".join(key) else: return key ######## # TEST # ######## if __name__ == "__main__": list_of_terms = ["a", "la", "queue", "leu", "leu", "insuffisance", "cardiaque", "queue", "supermarché", "insuffisance", "cardiaque", "diabète", "queue", "leu"] print(complexe(list_of_terms)) print(simple(list_of_terms))
module TheGreatZimbabwe.Text where import qualified Data.Text as T tshow :: Show a => a -> T.Text tshow = T.pack . show
import * as mysql from 'mysql' const DB_CONFIG: mysql.ConnectionConfig = { host: process.env.MYSQL_HOST, port: parseInt(process.env.MYSQL_PORT || '3306'), user: process.env.MYSQL_USER, password: process.<PASSWORD>.MYSQL_PASSWORD, database: process.env.MYSQL_DATABASE, timezone: 'Z', } /** * Create a connection to a MySQL database defined through `config` * @param config Config object as defined in the `mysql` module * * @returns A database connection as a promise if connection is sucessful */ export function mysqlConnection (): Promise<mysql.Connection> { const connection = mysql.createConnection(DB_CONFIG) return new Promise<mysql.Connection>((resolve, reject) => { connection.connect((error) => { if (error) { console.log('Error connecting to MySQL database.') reject(Error(error.message)) } console.log(`Connected as ${connection.threadId}`) resolve(connection) }) }).catch(error => Promise.reject(new Error(`Could not connect to MySQL database: ${error}`))) } export function endMysqlConnection (connection: mysql.Connection) { connection.end((error) => { if (error) { console.log('Error ending MySQL database connection') console.log(error.message) } }) } // Promise-based query export function promiseQuery (connection: mysql.Connection, query: string): Promise<Array<any>> { return new Promise((resolve, reject) => { console.log(`Executing query ${query}`) connection.query( query, (error, results) => { if (error) { console.log(`Error executing query ${query}`) reject(error) } if (!results) return null // Convert RowDataPackets into JS objects const ret = [] for (const result of results) { ret.push(Object.assign({}, result)) } resolve(ret) } ) }) } /** * Get all trips with the corresponding user from the Trip database which users want to get notified * about in the time interval from now to in `interval` minutes * * @param db_config MySQL database configuration * @param interval Time interval in minutes * * @returns A list of trips with the user info flat in the object, as a promise */ export async function getTrips (interval: number): Promise<Array<Trip>|null> { const connection = await mysqlConnection().catch(error => Promise.reject(new Error(error))) const now = new Date() now.setUTCFullYear(1970, 0, 1) now.setSeconds(0, 0) const later = new Date() later.setUTCFullYear(1970, 0, 1) later.setMinutes(later.getMinutes() + interval, 0, 0) const from = now.toISOString() const to = later.toISOString() const trips = await promiseQuery( connection, 'SELECT Trip.*, User.email FROM `Trip` LEFT JOIN `User` ON Trip.user = User.id ' + `WHERE \`enabled\` = 1 AND \`notify_at\` >= "${from}" AND \`notify_at\` < "${to}"` ) endMysqlConnection(connection) return trips } export async function writeForecasts (forecasts: Array<HourlyForecast>) { const connection = await mysqlConnection().catch(error => Promise.reject(new Error(error))) for (const forecast of forecasts) { const tripId = forecast.trip delete forecast.trip if (forecast.date instanceof Date) forecast.date = forecast.date.toISOString().slice(0, -1) await promiseQuery( connection, `UPDATE \`Trip\` SET \`forecast\` = '${JSON.stringify(forecast)}' WHERE \`id\` = "${tripId}";` ).catch(error => { console.log(`Could not write forecast for trip ${tripId} to DB`) console.log(error.stack) }) } endMysqlConnection(connection) } /* // Async query -- Uses node.js util function "promisify" const async_query_promisified = util.promisify(connection.query).bind(connection) async function async_query(query: string) { return await async_query_promisified(query) } // Callback query function cquery(query: string, callback?: Function) { connection.query( query, (error, results, fields) => { if (error) throw error //console.log(results) if (!callback) return return callback(results) } ) } */
// Turn an OINLCALL into a statement. func inlconv2stmt(n *Node) { n.Op = OBLOCK n.List.Set(n.Nbody.Slice()) n.Nbody.Set(nil) n.Rlist.Set(nil) }
<filename>1929-concatenation_of_array/single_line.go package lc // Time: O(n) // Benchmark: 8ms 6.4mb | 89% 69% func getConcatenation(nums []int) []int { return append(nums, nums...) }
package github import ( "context" "fmt" "github.com/google/go-github/v28/github" "github.com/imjoseangel/cigame/league" "time" ) type Service struct { client *github.Client } func NewService(client *github.Client) *Service { return &Service{client: client} } func (g *Service) GetCommits(ctx context.Context, since time.Time, owner string, repos ...string) ([]league.SimpleCommit, error) { var allCommits []league.SimpleCommit for _, repo := range repos { options := github.CommitsListOptions{ Since: since, ListOptions: github.ListOptions{}, } for { commits, response, err := g.client.Repositories.ListCommits(ctx, owner, repo, &options) if err != nil { return nil, fmt.Errorf("couldn't get commits, %s", err) } for _, commit := range commits { simpleCommit, err := g.newSimpleCommit(ctx, owner, repo, commit) if err != nil { return allCommits, err } allCommits = append(allCommits, simpleCommit) } if response.NextPage == 0 { break } options.Page = response.NextPage } } return allCommits, nil } func (g *Service) newSimpleCommit(ctx context.Context, owner string, repo string, commit *github.RepositoryCommit) (league.SimpleCommit, error) { status, _, err := g.client.Repositories.GetCombinedStatus(ctx, owner, repo, commit.GetSHA(), nil) if err != nil { return league.SimpleCommit{}, fmt.Errorf("problem getting status %v", err) } simpleCommit := league.SimpleCommit{ Email: commit.GetCommit().GetAuthor().GetEmail(), AvatarURL: commit.GetAuthor().GetAvatarURL(), Message: commit.GetCommit().GetMessage(), Status: status.GetState(), CreatedAt: commit.GetCommit().GetAuthor().GetDate(), } return simpleCommit, nil }
#include<iostream> using namespace std; int main() { string s,t; int n,x,y,a,b,d,m,p; cin>>s; cin>>t; string ans[10][2]; x=t[0]-s[0]; a=x; y=t[1]-s[1]; b=y; //first segment if(x<0) { x=-x; } if(y<0) { y=-y; } //second segment if(x==y) { n=x; d=x; m=0; p=0; } else if(x<y) { n=y; d=x; m=0; p=y-x; } else if(x>y) { n=x; d=y; m=x-y; p=0; } if(a>0) { for(int i=0;i<x;i++) { ans[i][0]='R'; } } if(a<0) { for(int i=0;i<x;i++) { ans[i][0]='L'; } } if(b>0) { for(int i=0;i<y;i++) { ans[i][1]='U'; } } if(b<0) { for(int i=0;i<y;i++) { ans[i][1]='D'; } } cout<<n<<endl; for(int i=0;i<n;i++) { cout<<ans[i][0]<<ans[i][1]; if(i<n-1) { cout<<endl; } } return 0; }
ST. PETERSBURG — It would be asking a lot of Rowdies fans to show up for Game No. 2 of the season the way they did for the opener, when an Al Lang Stadium record crowd of 7,710 created a veritable soundtrack for a 1-0 victory over Orlando City B a week ago today. Still, if supporters of the green and gold wouldn't terribly mind … "Right now teams look forward to coming here," Rowdies coach Stuart Campbell said. "The pitch is so nice, the surroundings. We want to turn that around. We want to make teams dread coming here." Toronto FC II, Campbell hopes, leaves with that feeling after tonight's game at Al Lang. Campbell isn't suggesting fans get nasty. He just wants a large showing of support that uplifts the home team. "It's absolutely priceless, just the energy, the encouragement, the lift it gives to the players," Campbell said. "(Last Saturday's game) was the best I've ever experienced at Al Lang." A notable observer would agree. United Soccer League president Jake Edwards was there. "You had this really loud, boisterous, tribal singing, chanting throughout the game. I thoroughly enjoyed it," said Edwards, leader of the league in which the Rowdies are spending their first season after six years in the NASL. "At that stadium, the roof keeps the noise in the venue, which is great. But the civility goes to the ethos of the club. They put on an event that's family focused, and community focused." Edwards is a pretty happy man after last weekend. Two other USL teams also set attendance records, with Richmond drawing 8,021 and Phoenix 6,890. Tonight's Rowdies opponent was the visitor in the latter, and that crowd didn't bother Toronto, which won 1-0. Ryan Telfer, just signed out of York University, got the lone goal. Toronto's wing backs had their way with Phoenix. "They were really impressive, and I've got to be honest, it wasn't against the odds they got the three points," Campbell said. Tampa Bay will be tested more than it was by Orlando City B, which got outshot 21-3. Georgi Hristov had the only score on a second-half penalty kick. The Rowdies substituted only twice, both in injury time "We looked very good in the preseason, fitness wise," Campbell said. "I just felt, we looked fit, sharp, strong, and I didn't want to tinker too much with it." Five new Rowdies went the distance: goalkeeper Akira Fitzgerald, defenders Damion Lowe and Luke Boden, and midfielders Marcel Schafer and Leo Fernandes, whose run created the penalty kick. Now, to see if the fans can keep up their endurance with another strong turnout.
def load_schema_for_modelling(): filename = "modelling_schema.csv" folder = os.path.abspath(os.path.dirname(__file__)) path = os.path.join(folder, filename) return pd.read_csv(path).set_index('table_name')
def num_tasks(self) -> int: if self.args.dataset_type == 'multiclass': return int(max([i[0] for i in self.targets()])) + 1 else: return self.data[0].num_tasks() if len(self.data) > 0 else None
/** Test general aspects of system actions. * Currently, just the icon. * @author Jesse Glick */ public class SystemActionTest extends NbTestCase { public SystemActionTest(String name) { super(name); } @Override protected Level logLevel() { return Level.OFF; } /** Test that iconResource really works. * @see "#26887" */ public void testIcons() throws Exception { Image i = Toolkit.getDefaultToolkit().getImage(SystemActionTest.class.getResource("data/someicon.gif")); int h = imageHash("Control icon", i, 16, 16); SystemAction a = SystemAction.get(SystemAction1.class); CharSequence log = Log.enable("org.openide.util", Level.WARNING); assertEquals("Absolute slash-initial iconResource works (though deprecated)", h, imageHash("icon1", icon2Image(a.getIcon()), 16, 16)); assertTrue(log.toString(), log.toString().contains("Initial slashes in Utilities.loadImage deprecated")); a = SystemAction.get(SystemAction2.class); assertEquals("Absolute no-slash-initial iconResource works", h, imageHash("icon2", icon2Image(a.getIcon()), 16, 16)); a = SystemAction.get(SystemAction3.class); assertEquals("Relative iconResource works (though deprecated)", h, imageHash("icon3", icon2Image(a.getIcon()), 16, 16)); assertTrue(log.toString(), log.toString().contains("Deprecated relative path")); a = SystemAction.get(SystemAction4.class); a.getIcon(); assertTrue(log.toString(), log.toString().contains("No such icon")); } private static abstract class TestSystemAction extends SystemAction { public void actionPerformed(ActionEvent e) {} public HelpCtx getHelpCtx() { return HelpCtx.DEFAULT_HELP; } public String getName() { return getClass().getName(); } } public static final class SystemAction1 extends TestSystemAction { protected String iconResource() { return "/org/openide/util/actions/data/someicon.gif"; } } public static final class SystemAction2 extends TestSystemAction { protected String iconResource() { return "org/openide/util/actions/data/someicon.gif"; } } public static final class SystemAction3 extends TestSystemAction { protected String iconResource() { return "data/someicon.gif"; } } public static final class SystemAction4 extends TestSystemAction { protected String iconResource() { return "no/such/icon.gif"; } } private static Image icon2Image(Icon ico) { int w = ico.getIconWidth(); int h = ico.getIconHeight(); BufferedImage img = new BufferedImage(w, h, BufferedImage.TYPE_INT_ARGB); ico.paintIcon(new JButton(), img.getGraphics(), 0, 0); return img; } // Copied from SystemFileSystemTest: private static int imageHash(String name, Image img, int w, int h) throws InterruptedException { int[] pixels = new int[w * h]; PixelGrabber pix = new PixelGrabber(img, 0, 0, w, h, pixels, 0, w); pix.grabPixels(); assertEquals(0, pix.getStatus() & ImageObserver.ABORT); if (false) { // Debugging. System.out.println("Pixels of " + name + ":"); for (int y = 0; y < h; y++) { for (int x = 0; x < w; x++) { if (x == 0) { System.out.print('\t'); } else { System.out.print(' '); } int p = pixels[y * w + x]; String hex = Integer.toHexString(p); while (hex.length() < 8) { hex = "0" + hex; } System.out.print(hex); if (x == w - 1) { System.out.print('\n'); } } } } int hash = 0; for (int i = 0; i < pixels.length; i++) { hash += 172881; int p = pixels[i]; if ((p & 0xff000000) == 0) { // Transparent; normalize. p = 0; } hash ^= p; } return hash; } }
<reponame>shyamjangid07/Reverse-Engineering #!/usr/bin/python # coding=utf-8 # Originally Written By:<NAME> # Source : Python2" # Donot Recode It. __author__ = "<NAME>" __copyright = "Copyright (c) 2020-2025, <NAME>" Description = """ This Tool Is Created To Hide Your Python Codes From The Eyes Of Copier.This Is Compatible With Python And Python2.7""" import random,sys,logging logging.basicConfig(level=logging.INFO) def main(files,string): s=open(files).read() z=[] for i in s: z.append(ord(i)) pea=[] for i in z: pea.append(string.replace("'","").replace('"','')*i) file=""" # coding=utf-8 # Encrypted By : <NAME> # Github : https://github.com/Hamzahash hop_programmer={};exec("".join([chr(len(i)) for i in hop_programmer])) """.format(pea) open(files.replace(".py","en1.py"),"w").write(file) logging.info(" saved as "+files.replace(".py","en1.py")) try: logging.info("Encryting Please Wait "+sys.argv[1]+" ...") main(sys.argv[1],sys.argv[2]) except: print(""" [!] ussage: plusobf.py <filename> 'string' Example: python plusobf.py myscript.py '+' """)
def func(): n = int(input()) if (n==1): print(-1) else: t = "3"*(n-1) t = t+"4" print(t) cas = int(input()) for i in range(cas): func()
<filename>pkg/sdsai/math/linear_test.go package math import ( "testing" "math" ) func TestLinearInterpolator(t *testing.T) { li,err := LinearInterpolator([]float64{0, 1}, []float64{0, 1}) if err != nil { t.Fatal(err) } v, err := li(0.5) if err != nil { t.Fatal(err) } if math.Abs(v - 0.5) > 000.1 { t.Fatal("Expected near 0.5 but got ", v) } }
package io.github.morichan.fescue.feature.direction; /** * <p> 出力クラス </p> * * <p> * 方向における出力 ({@code "out"}) クラスです。 * </p> */ public class Out implements Direction { /** * <p> 出力方向の文字列を出力する設定を行います。 </p> * * <p> * 変更はできないため、常に真を返します。 * </p> * * @return {@code true} */ @Override public boolean isOuted() { return true; } /** * {@inheritDoc} * * <p> * @deprecated * 出力方向の文字列を出力しない設定は行えないため、{@code false}を設定した場合は例外を投げます。 * </p> */ @Deprecated @Override public void setOuted(boolean isOuted) { if (! isOuted) throw new IllegalCallerException(); } /** * <p> 出力方向の文字列を取得します。 </p> * * @return {@code "out"} */ @Override public String toString() { return "out"; } }
import pandas as pd import matplotlib.pyplot as plt import seaborn as sns import requests # TODO import module for requests class example: def __init__(self): self.data = self.read("e3\india\AB_NYC_2019.csv") self.url = "https://github.com/hka-mmv/dscb230-exercise/blob/main/e1/lecturer/AB_NYC_2019.csv.zip" # --------------- GET THE DATA --------------- def fetch(self, url): """This Method fetches a dataset from a given url""" # NOTE: Fetching a HTTP request usually needs "try except"! try: r = requests.get(url = url, timeout=10) dataset=pd.read_json(r.json()) # TODO Get the dataset via using method requests to get URL except requests.exceptions.Timeout: # Maybe set up for a retry, or continue in a retry loop print("Timeout of the request") except requests.exceptions.TooManyRedirects: # Tell the user their URL was bad and try a different one print("Your URL was bad. Please try a different one.") except requests.exceptions.RequestException as e: # catastrophic error. bail. raise SystemExit(e) return dataset def read(self, filename:str): """This method reads a given file It can import any file extension like json, csv, etc. """ assert(filename.endswith('.csv')),f'{filename} has no falid file extension' dataf = pd.read_csv(filename) #v1 #filenames=filename.split('/') # with (filenames, 'r') as data: # dataf = pd.DataFrame(data) #v2 # data = open(filename, 'r') # data = data.read() # TODO Aufgabe 1 #assert(filename.endswith('.')) return dataf # --------------- DATA UNDERSTANDING --------------- # This section contains methods for searching the dataset for specific characteristics. # The docstrings will help you to use them. # NOTE: Use "unittest" to check if the get methods return the right data def get_if(self, statement): """This Method returns every data which fits into the bool statement""" return self.data[statement] def get_head(self, limit=10): """This Method reads the head out of the data""" return self.data.head(limit) def get_tail(self,limit=10): """This Method reads the tail out of the data""" return self.data.tail(limit) def get_info(self, verbose=True): """This method prints information about a DataFrame including the index dtype and columns, non-null values and memory usage.""" return self.data.info() def get_shape(self): """Return a tuple representing the dimensionality of the DataFrame.""" return self.data.shape def get_value(self, key): """This Method returns every value for a given key""" return self.data[key] def get_type(self): """This Method returns dtypes of the DataFrame""" return self.data.dtypes def get_null(self): """This Method detect missing values """ return self.data.isnull() # --------------- DATA PREPARATION --------------- # TODO def dropnullvalues(self): """ This method drops rows which contains missing values. See also https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.dropna.html """ return self.data.dropna(axis=1) # --------------- VISUALIZATION --------------- def draw_hist(self): """This Method draws a hist diagram""" self.data.hist() def draw_facetgrid(self): """This Method draws a facetgrid""" g = sns.FacetGrid(self.data, col='neighbourhood_group') return g.map(plt.scatter, 'price', 'number_of_reviews') def draw_scatter(self): """This Method draws a scatter plot""" self.data.plot.scatter(x=self.data["longitude"], y=self.data["latitude"]) obj = example() # TODO Run you tested implementation on the dataset. # For this plot we need a customized data set. # Use index of the data frame for selecticing specific columns. obj2 = obj.data.iloc[1:, :] obj2.plot.hist() # Show a histogram obj.draw_hist() plt.show() # Very important, otherwise no plot will show up and stay
Oblique needle segmentation for 3D TRUS-guided robot-aided transperineal prostate brachytherapy 3D TRUS-guided robot-aided prostate brachytherapy provides tools for dynamic re-optimization of a dose plan by freeing needle insertions from parallel trajectory constraints, i.e., needle trajectories can be positioned with considerable flexibility including oblique. However, oblique insertion results in the needle intersecting the 2D TRUS image and appearing as a dot, leading to blind guidance. Here, we propose a method for oblique needle segmentation and tracking to be used in a 3D TRUS guided and robot aided prostate brachytherapy system. This algorithm applies a grey-level change detection technique to find the location and orientation of needles from 3D images. Three 2D images containing the needle (oblique sagittal, coronal and transverse planes) are extracted and displayed in near real-time. Testing showed that our algorithm can find 3D needle orientation within 0.54/spl deg/ for a chicken tissue phantom, and 0.58/spl deg/ for agar phantoms, over a /spl plusmn/15/spl deg/ insertion orientation. The execution time averaged 0.13s on a 1.2 GHz computer.
<filename>models/__init__.py from .rnn_gauss import RNN_GAUSS from .vrnn_single import VRNN_SINGLE from .vrnn_indep import VRNN_INDEP from .vrnn_mixed import VRNN_MIXED from .macro_vrnn import MACRO_VRNN from .macro_shared_vrnn import MACRO_SHARED_VRNN from .vrae_mi import VRAE_MI def load_model(model_name, params, parser=None): model_name = model_name.lower() if model_name == 'rnn_gauss': return RNN_GAUSS(params, parser) elif model_name == 'vrnn_single': return VRNN_SINGLE(params, parser) elif model_name == 'vrnn_indep': return VRNN_INDEP(params, parser) elif model_name == 'vrnn_mixed': return VRNN_MIXED(params, parser) elif model_name == 'macro_vrnn': return MACRO_VRNN(params, parser) elif model_name == 'macro_shared_vrnn': return MACRO_SHARED_VRNN(params, parser) elif model_name == 'vrae_mi': return VRAE_MI(params, parser) else: raise NotImplementedError
import numpy as np import torch import torch.nn as nn import torch.nn.functional as F import torch.optim as optim from torch.utils.data import DataLoader, TensorDataset import matplotlib.pyplot as plt from losses import PerceptualLoss import os, h5py from shaun import Shaun from time import time from crispy.utils import pt_vibrant class Trainer: """ The class used to set up the training and do the training and save the models for the network. Parameters ---------- in_channels : int The number of channels of the images input to the network. out_channels : int The number of channels of the output images. nef : int The base number of feature maps to use at the first convolutional layer. data_pth : str The path to the training data. slic_pth : str The path to the Slic repository. slic_model_pth : str The path to the trained Slic model. save_dir : str The directory to save the models in. minibatches_per_epoch : int The number of minibatches to train on per epoch. max_val_batches : int The number of batches to do the validation on. layer : int The layer that the feature maps are compared at in Slic. """ def __init__(self, in_channels, out_channels, nef, data_pth, slic_pth, slic_model_pth, save_dir, minibatches_per_epoch, max_val_batches, layer): self.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") self.model = Shaun(in_channels=in_channels, out_channels=out_channels, nef=nef).to(self.device) print("=> Created Shaun.") self.data_pth = data_pth self.perceptual_loss = PerceptualLoss(loss=nn.MSELoss(), slic_pth=slic_pth, model_pth=slic_model_pth, layer=layer) print("=> Shaun has become perceptually aware.") self.mse_loss = nn.MSELoss() print("=> Shaun knows how to calculate distances.") self.combined_loss = lambda *x: (self.perceptual_loss.get_loss(*x), self.mse_loss(*x)) self.save_dir = save_dir self.current_epoch = 0 self.minibatches_per_epoch = minibatches_per_epoch self.max_val_batches = max_val_batches def load_data(self): """ This class method loads the training data and splits it into training and validation data 90/10%. """ f = h5py.File(self.data_pth, "r") inp, out = np.array(f.get("input")), np.array(f.get("output")) indices = np.arange(inp.shape[0]) np.random.RandomState(seed=42).shuffle(indices) max_idx = int(0.9*indices.shape[0]) self.train_in, self.train_out = inp[indices[:max_idx]], out[indices[:max_idx]] self.val_in, self.val_out = inp[indices[max_idx:]], out[indices[max_idx:]] def checkpoint(self): """ This class method creates a checkpoint for the current epoch. """ if hasattr(self, "scheduler"): self.chkpt = { "epoch" : self.current_epoch, "model_state_dict" : self.model.state_dict(), "optimiser_state_dict" : self.optimiser.state_dict(), "scheduler_state_dict" : self.scheduler.state_dict(), "losses" : self.losses } else: self.chkpt = { "epoch" : self.current_epoch, "model_state_dict" : self.model.state_dict(), "optimiser_state_dict" : self.optimiser.state_dict(), "losses" : self.losses } def save_checkpoint(self): """ This class method saves the current checkpoint. """ save_pth = f"{self.save_dir}{self.current_epoch}.pth" torch.save(self.chkpt, save_pth) def load_checkpoint(self, filename): """ This class method loads a checkpoint. Parameters ---------- filename : str The checkpoint to be loaded. """ if os.path.isfile(filename): print(f"=> loading checkpoint {filename}") ckp = torch.load(filename) self.current_epoch = ckp["epoch"] self.losses = ckp["losses"] self.model.load_state_dict(ckp["model_state_dict"]) if hasattr(self, "optimiser"): self.optimiser.load_state_dict(ckp["optimiser_state_dict"]) if hasattr(self, "scheduler"): self.scheduler.load_state_dict(ckp["scheduler_state_dict"]) train_p = self.losses["train_p"][-1] train_l = self.losses["train_l"][-1] train_c = self.losses["train_c"][-1] val_p = self.losses["val_p"][-1] val_l = self.losses["val_l"][-1] val_c = self.losses["val_c"][-1] print(f"=> loaded checkpoint {filename} at epoch {self.current_epoch} with training perceptual {train_p}, training L2 {train_l}, training combined {train_c}, validation perceptual {val_p}, validation L2 {val_l}, validation combined {val_c}") else: print(f"=> no checkpoint found at {filename}") def train(self, train_loader, scheduler=False): """ This class method carries out the training for one epoch. Parameters ---------- train_loader : torch.utils.data.DataLoader The training data. scheduler : bool, optional Whether or not there is a learning rate scheduler. """ pt_losses, lt_losses, t_losses = [], [], [] self.model.train() minibatch_idx = 0 for jj, (blr, img) in enumerate(train_loader): minibatch_idx += 1 if minibatch_idx > self.minibatches_per_epoch: break blr = blr.float().to(self.device).unsqueeze(1) img = img.float().to(self.device).unsqueeze(1) output = self.model(blr) self.optimiser.zero_grad() p_loss, mse_loss = self.combined_loss(output, img) loss = p_loss + mse_loss loss.backward() self.optimiser.step() pt_losses.append(p_loss.item()) lt_losses.append(mse_loss.item()) t_losses.append(loss.item()) if jj == 0: plt_blr = blr[0,0].clone().detach().cpu().squeeze().numpy() plt_gen = output[0,0].clone().detach().cpu().squeeze().numpy() plt_img = img[0,0].clone().detach().cpu().squeeze().numpy() if scheduler: self.scheduler.step() return np.mean(np.array(pt_losses)), np.mean(np.array(lt_losses)), np.mean(np.array(t_losses)), plt_blr, plt_gen, plt_img def validation(self, val_loader): """ This class method does the validation for one epoch. Parameters ---------- val_loader : torch.utils.data.DataLoader The validation data. """ pv_losses, lv_losses, v_losses = [], [], [] self.model.eval() val_batches = 0 with torch.no_grad(): for jj, (blr, img) in enumerate(val_loader): val_batches += 1 if val_batches > self.max_val_batches: break blr = blr.float().to(self.device).unsqueeze(1) img = img.float().to(self.device).unsqueeze(1) output = self.model(blr) p_loss, mse_loss = self.combined_loss(output, img) loss = p_loss + mse_loss pv_losses.append(p_loss.item()) lv_losses.append(mse_loss.item()) v_losses.append(loss.item()) return np.mean(np.array(pv_losses)), np.mean(np.array(lv_losses)), np.mean(np.array(v_losses)) def arcade_star(self, num_epochs, lr, reg=1e-6, batch_size=64, load=False, load_pth=None, scheduler=False, n_oscillate=100, lr_min=1e-5): """ This class method trains the network with the interactive plotting environment. Parameters ---------- num_epochs : int The total number of epochs to train for. lr : float The learning rate of the system. reg : float, optional The regularisation parameter for the optimiser. Default is 1e-6. batch_size : int, optional The batch size of the data loaders. Default is 64. load : bool, optional Whether or not an earlier model is being restored. Default is False. load_pth : str, optional The path to the earlier model that is being restored. Default is None. scheduler : bool, optional Whether or not a learning rate scheduler is used. Default is False. n_oscillate : int, optional The number of epochs the learning rate is descreased before being reset to maximum. Default is 100. lr_min : float, optional The minimum learning rate that is scheduled. Default is 1e-5. """ self.optimiser = optim.Adam(self.model.parameters(), lr=lr, weight_decay=reg) if scheduler: self.scheduler = optim.lr_scheduler.CosineAnnealingLR(self.optimiser, T_max=n_oscillate, eta_min=lr_min) if load: print("=> Shaun has been restored to an earlier save.") self.load_checkpoint(load_pth) #=====================================================================# # dataset and data loader creation train_dataset = TensorDataset(torch.from_numpy(self.train_in), torch.from_numpy(self.train_out)) val_dataset = TensorDataset(torch.from_numpy(self.val_in), torch.from_numpy(self.val_out)) train_loader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True) val_loader = DataLoader(val_dataset, batch_size=batch_size, shuffle=True) print("=> created data loaders") #=====================================================================# # initialisation of the plotting environment fig = plt.figure(figsize=(9,9), constrained_layout=True) gs = fig.add_gridspec(nrows=4, ncols=3) pt_ax = fig.add_subplot(gs[1,:]) lt_ax = pt_ax.twinx() t_ax = fig.add_subplot(gs[2,:]) v_ax = t_ax.twinx() pv_ax = fig.add_subplot(gs[3,:]) lv_ax = pv_ax.twinx() bi_ax = fig.add_subplot(gs[:1,0]) ci_ax = fig.add_subplot(gs[:1,1]) ti_ax = fig.add_subplot(gs[:1,2]) pt_ax.set_ylabel("Perceptual loss", color=pt_vibrant["teal"]) lt_ax.set_ylabel("L2 loss", color=pt_vibrant["red"]) t_ax.set_ylabel("Training loss", color=pt_vibrant["cyan"]) v_ax.set_ylabel("Validation loss", color=pt_vibrant["orange"]) pv_ax.set_ylabel("Perceptual loss", color=pt_vibrant["teal"]) lv_ax.set_ylabel("L2 loss", color=pt_vibrant["red"]) bi_ax.set_xticks([]) bi_ax.set_yticks([]) ci_ax.set_xticks([]) ci_ax.set_yticks([]) ti_ax.set_xticks([]) ti_ax.set_yticks([]) pt_ax.grid(True) t_ax.grid(True) pv_ax.grid(True) print("=> the interactive plotting environment has been created") fig.show() fig.canvas.draw() #=====================================================================# # define lists to store the different losses in perct_losses, mset_losses, train_losses, percv_losses, msev_losses, val_losses = [], [], [], [], [], [] #=====================================================================# # do the training and validation t_init = time() for j in range(num_epochs): if j != 0: self.current_epoch += 1 if j == 0 and load: self.current_epoch += 1 pt, lt, t, plt_blr, plt_gen, plt_img = self.train(train_loader, scheduler=scheduler) perct_losses.append(pt) mset_losses.append(lt) train_losses.append(t) pv, lv, v = self.validation(val_loader) percv_losses.append(pv) msev_losses.append(lv) val_losses.append(v) t_now = round(time() - t_init, 3) #====================================================================== # save the model self.losses = { "train_p" : perct_losses, "train_l" : mset_losses, "train_c" : train_losses, "val_p" : percv_losses, "val_l" : msev_losses, "val_c" : val_losses } self.checkpoint() self.save_checkpoint() #====================================================================== # plot the results fig.suptitle(f"Time elapsed {t_now} s after epoch {self.current_epoch}") pt_ax.set_ylabel("Perceptual loss", color=pt_vibrant["teal"]) lt_ax.set_ylabel("L2 loss", color=pt_vibrant["red"]) pt_ax.semilogy(np.arange(j+1), perct_losses, color=pt_vibrant["teal"]) lt_ax.semilogy(np.arange(j+1), mset_losses, color=pt_vibrant["red"]) t_ax.set_ylabel("Training loss", color=pt_vibrant["cyan"]) v_ax.set_ylabel("Validation loss", color=pt_vibrant["orange"]) t_ax.semilogy(np.arange(j+1), train_losses, color=pt_vibrant["cyan"]) v_ax.semilogy(np.arange(j+1), val_losses, color=pt_vibrant["orange"]) pv_ax.set_ylabel("Perceptual loss", color=pt_vibrant["teal"]) lv_ax.set_ylabel("L2 loss", color=pt_vibrant["red"]) pv_ax.semilogy(np.arange(j+1), percv_losses, color=pt_vibrant["teal"]) lv_ax.semilogy(np.arange(j+1), msev_losses, color=pt_vibrant["red"]) bi_ax.imshow(plt_blr, cmap="Greys_r") ci_ax.imshow(plt_gen, cmap="Greys_r") ti_ax.imshow(plt_img, cmap="Greys_r") fig.canvas.draw()
/** * This method is called by SDL using JNI. */ public static void audioWriteShortBuffer(short[] buffer) { if (mAudioTrack == null) { Log.e(TAG, "Attempted to make audio call with uninitialized audio!"); return; } for (int i = 0; i < buffer.length;) { int result = mAudioTrack.write(buffer, i, buffer.length - i); if (result > 0) { i += result; } else if (result == 0) { try { Thread.sleep(1); } catch(InterruptedException e) { } } else { Log.w(TAG, "SDL audio: error return from write(short)"); return; } } }
import Header from "./Header.vue"; import SizeSlider from "./SizeSlider.vue"; import SearchField from "./SearchField.vue"; import SortingField from "./SortingField.vue"; import FilterButton from "./FilterButton.vue"; export default { Header, SizeSlider, SearchField, SortingField, FilterButton };
def bytes(b): def humanize(b, base, suffices=[]): bb = int(b) for suffix in suffices: if bb < base: break bb /= float(base) return "%.2f %s" % (bb, suffix) print("Base 1024: ", humanize( b, 1024, ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB'])) print("Base 1000: ", humanize( b, 1000, ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB']))
<reponame>ZmnSCPxj/cldcb #include<assert.h> #include<cstdint> #include<fcntl.h> #include<future> #include<sys/stat.h> #include<sys/types.h> #include<thread> #include<unistd.h> #include<vector> #include"Net/DirectConnector.hpp" #include"Net/Fd.hpp" #include"Net/Listener.hpp" #include"Net/ProxyConnector.hpp" #include"Net/SocketFd.hpp" #include"Net/socketpair.hpp" #include"Secp256k1/Random.hpp" #include"Util/Logger.hpp" #include"Util/make_unique.hpp" class NullLogger : public Util::Logger { public: void log(LogLevel, std::string) override { } }; int main() { { auto fd = Net::Fd(-1); assert(!fd); assert(!((bool) fd)); auto sfd = Net::SocketFd(std::move(fd)); assert(!fd); assert(!sfd); assert(!((bool)sfd)); } /* Only works on nixlikes. */ { auto fd = Net::Fd(open("/dev/null", O_RDONLY)); assert(fd); fd = std::move(fd); assert(fd); auto sfd = Net::SocketFd(std::move(fd)); assert(!fd); assert(sfd); sfd = std::move(sfd); assert(sfd); close(sfd.release()); assert(!sfd); } /* Only works if you have normal network access... */ if (1) /* Set to 0 if you do not have normal network access. */ { auto connector = Net::DirectConnector(); auto sfd = connector.connect("www.google.com", 80); assert(sfd); } /* Only works if you have Tor installed and set its SOCKS5 proxy * to 9050. * Does not work in valgrind since we are too slow in valgrind * mode. */ if (0) /* Set to 1 if you have Tor installed and running on 9050 and not on valgrind. */ { auto proxy_port = 9050; auto connector = Net::ProxyConnector ( Util::make_unique<Net::DirectConnector>() , "127.0.0.1" , proxy_port ); errno = 0; auto sfd = connector.connect("www.torproject.org", 443); assert(sfd); } /* Only works if the port given below is allowed by your * firewall, and you are not already having a listener * on that port. */ if (1) /* Set to 0 if your firewall disallows or otherwise cannot use the port. */ { auto port = 29736; auto len = 6; Secp256k1::Random rand; /* We need to wait for the server to establish * the listener before we can connect to it. * This promise synchronizes the server and the * client threads. */ auto promise = std::promise<void>(); auto data = ([&rand, len]() { auto data = std::vector<std::uint8_t>(); for (auto i = 0; i < len; ++i) data.push_back(rand.get()); return data; })(); auto server = std::thread([&promise, port, len]() { auto logger = NullLogger(); auto listener = Net::Listener(port, logger); promise.set_value(); auto sock = listener.accept(); assert(sock); auto data = std::vector<std::uint8_t>(len); auto rres = ::read(sock.get(), &data[0], len); assert(rres == ssize_t(len)); auto wres = ::write(sock.get(), &data[0], len); assert(wres == ssize_t(len)); }); auto client = std::thread([&promise, &data, port, len]() { auto connector = Net::DirectConnector(); promise.get_future().get(); auto sock = connector.connect("127.0.0.1", port); assert(sock); auto wres = ::write(sock.get(), &data[0], len); assert(wres == ssize_t(len)); auto ret_data = std::vector<std::uint8_t>(len); auto rres = ::read(sock.get(), &ret_data[0], len); assert(rres == ssize_t(len)); assert(data == ret_data); }); server.join(); client.join(); } { auto len = 6; Secp256k1::Random rand; auto socks = Net::socketpair(); auto& ssock = socks.first; auto& csock = socks.second; auto data = ([&rand, len]() { auto data = std::vector<std::uint8_t>(); for (auto i = 0; i < len; ++i) data.push_back(rand.get()); return data; })(); auto server = std::thread([&ssock, len]() { auto& sock = ssock; assert(sock); auto data = std::vector<std::uint8_t>(len); auto rres = ::read(sock.get(), &data[0], len); assert(rres == ssize_t(len)); auto wres = ::write(sock.get(), &data[0], len); assert(wres == ssize_t(len)); /* Close socket in the thread, else would block. */ sock.reset(); }); auto client = std::thread([&csock, &data, len]() { auto& sock = csock; assert(sock); auto wres = ::write(sock.get(), &data[0], len); assert(wres == ssize_t(len)); auto ret_data = std::vector<std::uint8_t>(len); auto rres = ::read(sock.get(), &ret_data[0], len); assert(rres == ssize_t(len)); assert(data == ret_data); /* Close socket in the thread, else would block. */ sock.reset(); }); server.join(); client.join(); } return 0; }
/** * This class provides the user interface to manage proposal links features ( manage, create, modify, remove ) */ @Controller( controllerJsp = "ManageProposalLinks.jsp", controllerPath = "jsp/admin/plugins/participatoryideation/", right = "PARTICIPATORYIDEATION_LINKS_MANAGEMENT" ) public class ProposalLinksJspBean extends ManageProposalLinksJspBean { // ////////////////////////////////////////////////////////////////////////// // Constants private static final long serialVersionUID = 1L; // templates private static final String TEMPLATE_MANAGE_LINKS = "/admin/plugins/participatoryideation/manage_links.html"; private static final String TEMPLATE_CREATE_LINK = "/admin/plugins/participatoryideation/create_link.html"; private static final String TEMPLATE_CREATE_SEVERAL_LINKS = "/admin/plugins/participatoryideation/create_several_links.html"; private static final String TEMPLATE_MODIFY_LINK = "/admin/plugins/participatoryideation/modify_link.html"; // Parameters private static final String PARAMETER_ID_LINK = "id"; private static final String PARAMETER_FILTER_CODE_CAMPAIGN = "filter_code_campaign"; private static final String PARAMETER_FILTER_CODE_PROPOSAL = "filter_code_proposal"; private static final String PARAMETER_FILTER_TITLE = "filter_title"; // Properties for page titles private static final String PROPERTY_PAGE_TITLE_MANAGE_LINKS = "participatoryideation.manage_links.pageTitle"; private static final String PROPERTY_PAGE_TITLE_MODIFY_LINK = "participatoryideation.modify_link.pageTitle"; private static final String PROPERTY_PAGE_TITLE_CREATE_LINK = "participatoryideation.create_link.pageTitle"; private static final String PROPERTY_PAGE_TITLE_CREATE_SEVERAL_LINKS = "participatoryideation.create_several_links.pageTitle"; // Markers private static final String MARK_LINKS_LIST = "links_list"; private static final String MARK_LINK = "link"; private static final String MARK_SEVERAL_LINKS_PARENT_CODE_CAMPAIN = "severalLinksParentCodeCampain"; private static final String MARK_SEVERAL_LINKS_PARENT_CODE_PROPOSAL = "severalLinksParentCodeProposal"; private static final String MARK_SEVERAL_LINKS_CHILD_CODE_CAMPAIN = "severalLinksChildCodeCampain"; private static final String MARK_SEVERAL_LINKS_CHILD_CODES_PROPOSALS = "severalLinksChildCodesProposals"; // private static final String MARK_LIST_CAMPAIGNS = "listCampaigns"; private static final String MARK_FILTER_CODE_CAMPAIGN = "filter_code_campaign"; private static final String MARK_FILTER_CODE_PROPOSAL = "filter_code_proposal"; private static final String MARK_FILTER_TITLE = "filter_title"; private static final String JSP_MANAGE_LINKS = "jsp/admin/plugins/participatoryideation/ManageProposalLinks.jsp"; private static final String VALIDATION_ATTRIBUTES_PREFIX = "participatoryideation.model.entity.link.attribute."; // Views private static final String VIEW_MANAGE_LINKS = "manageLinks"; private static final String VIEW_CREATE_LINK = "createLink"; private static final String VIEW_CREATE_SEVERAL_LINKS = "createSeveralLinks"; private static final String VIEW_MODIFY_LINK = "modifyLink"; // Actions private static final String ACTION_CREATE_LINK = "createLink"; private static final String ACTION_CREATE_SEVERAL_LINKS = "createSeveralLinks"; private static final String ACTION_MODIFY_LINK = "modifyLink"; private static final String ACTION_REMOVE_LINK = "removeLink"; private static final String ACTION_CONFIRM_REMOVE_LINK = "confirmRemoveLink"; private static final String ACTION_SEARCH_LINK = "searchLink"; private static final String ACTION_CANCEL_SEARCH = "cancelSearch"; // Messages private static final String MESSAGE_CONFIRM_REMOVE_LINK = "participatoryideation.message.confirmRemoveLink"; private static final String MESSAGE_ERROR_CHILD_CODES_PROPOSALS_MANDATORY = "participatoryideation.validation.link.parentId.childCodesProposalsMandatory"; // private static final String MESSAGE_ERROR_NOT_AN_INTEGER = "participatoryideation.validation.link.parentId.notAnInteger"; // Infos private static final String INFO_LINK_CREATED = "participatoryideation.info.link.created"; private static final String INFO_SEVERAL_LINKS_CREATED = "participatoryideation.info.several.links.created"; private static final String INFO_LINK_UPDATED = "participatoryideation.info.link.updated"; private static final String INFO_LINK_REMOVED = "participatoryideation.info.link.removed"; // Session variable to store working values private Link _link; private String _parentCodeCampaign; private int _parentCodeProposal; private String _childCodeCampaign; private String _childCodesProposals; private LinkSearcher _linkSearcher; private static LinkSearcher defaultSearcher; static { defaultSearcher = new LinkSearcher( ); } /* *********************************************************************************** */ /* * MANAGE MANAGE MANAGE MANAGE MANAGE MANAGE MANAGE MANAGE MANAGE MANAGE MANAGE MA * */ /* * MANAGE MANAGE MANAGE MANAGE MANAGE MANAGE MANAGE MANAGE MANAGE MANAGE MANAGE MA * */ /* *********************************************************************************** */ /** * Build the Manage View * * @param request * The HTTP request * @return The page */ @View( value = VIEW_MANAGE_LINKS, defaultView = true ) public String getManageLinks( HttpServletRequest request ) { _link = null; LinkSearcher currentSearcher = _linkSearcher != null ? _linkSearcher : defaultSearcher; List<Link> listLinks = (List<Link>) LinkHome.getLinksListSearch( currentSearcher ); Map<String, Object> model = getPaginatedListModel( request, MARK_LINKS_LIST, listLinks, JSP_MANAGE_LINKS ); // Collection<Campaign> listCampaigns = CampaignHome.getCampaignsList( ); if ( _linkSearcher != null ) { if ( StringUtils.isNotBlank( _linkSearcher.getCodeCampaign( ) ) ) { model.put( MARK_FILTER_CODE_CAMPAIGN, _linkSearcher.getCodeCampaign( ) ); } if ( _linkSearcher.getCodeProposal( ) != null ) { model.put( MARK_FILTER_CODE_PROPOSAL, _linkSearcher.getCodeProposal( ) ); } if ( StringUtils.isNotBlank( _linkSearcher.getTitle( ) ) ) { model.put( MARK_FILTER_TITLE, _linkSearcher.getTitle( ) ); } } IdeationStaticService.getInstance( ).fillAllStaticContent( model ); return getPage( PROPERTY_PAGE_TITLE_MANAGE_LINKS, TEMPLATE_MANAGE_LINKS, model ); } /* *********************************************************************************** */ /* * CREATE_LINK CREATE_LINK CREATE_LINK CREATE_LINK CREATE_LINK CREATE_LINK C * */ /* * CREATE_LINK CREATE_LINK CREATE_LINK CREATE_LINK CREATE_LINK CREATE_LINK C * */ /* *********************************************************************************** */ /** * Returns the form to create a link * * @param request * The Http request * @return the html code of the link form */ @View( VIEW_CREATE_LINK ) public String getCreateLink( HttpServletRequest request ) { _link = ( _link != null ) ? _link : new Link( ); Map<String, Object> model = getModel( ); model.put( MARK_LINK, _link ); IdeationStaticService.getInstance( ).fillAllStaticContent( model ); return getPage( PROPERTY_PAGE_TITLE_CREATE_LINK, TEMPLATE_CREATE_LINK, model ); } /** * Process the data capture form of a new link * * @param request * The Http Request * @return The Jsp URL of the process result */ @Action( ACTION_CREATE_LINK ) public String doCreateLink( HttpServletRequest request ) { populate( _link, request ); // Check constraints if ( !validateBean( _link, VALIDATION_ATTRIBUTES_PREFIX ) ) { return redirect( request, VIEW_MODIFY_LINK, PARAMETER_ID_LINK, _link.getId( ) ); } if ( determineProposalsIdFromCodes( _link, request ) ) { return redirectView( request, ACTION_CREATE_LINK ); } LinkHome.create( _link ); addInfo( INFO_LINK_CREATED, getLocale( ) ); return redirectView( request, VIEW_MANAGE_LINKS ); } /* *********************************************************************************** */ /* * CREATE_SEVERAL_LINK CREATE_SEVERAL_LINK CREATE_SEVERAL_LINK CREATE_SEVERAL_L * */ /* * CREATE_SEVERAL_LINK CREATE_SEVERAL_LINK CREATE_SEVERAL_LINK CREATE_SEVERAL_L * */ /* *********************************************************************************** */ /** * Returns the form to create several links * * @param request * The Http request * @return the html code of the form */ @View( VIEW_CREATE_SEVERAL_LINKS ) public String getCreateSeveralLinks( HttpServletRequest request ) { Map<String, Object> model = getModel( ); model.put( MARK_SEVERAL_LINKS_PARENT_CODE_CAMPAIN, _parentCodeCampaign ); model.put( MARK_SEVERAL_LINKS_PARENT_CODE_PROPOSAL, _parentCodeProposal ); // Si -1, mettre "" model.put( MARK_SEVERAL_LINKS_CHILD_CODE_CAMPAIN, _childCodeCampaign ); model.put( MARK_SEVERAL_LINKS_CHILD_CODES_PROPOSALS, _childCodesProposals ); IdeationStaticService.getInstance( ).fillAllStaticContent( model ); return getPage( PROPERTY_PAGE_TITLE_CREATE_SEVERAL_LINKS, TEMPLATE_CREATE_SEVERAL_LINKS, model ); } /** * Process the data capture form of several new links * * @param request * The Http Request * @return The Jsp URL of the process result */ @Action( ACTION_CREATE_SEVERAL_LINKS ) public String doCreateSeveralLinks( HttpServletRequest request ) { _parentCodeCampaign = request.getParameter( "parentCodeCampaign" ); _parentCodeProposal = parseLinkProposalCode( request.getParameter( "parentCodeProposal" ) ); _childCodeCampaign = request.getParameter( "childCodeCampaign" ); _childCodesProposals = request.getParameter( "childCodesProposals" ); List<Link> linksToCreate = new ArrayList<Link>( ); boolean error = false; // Split children codes, then process the add for each of one if ( StringUtils.isEmpty( _childCodesProposals ) ) { addError( MESSAGE_ERROR_CHILD_CODES_PROPOSALS_MANDATORY, request.getLocale( ) ); return redirectView( request, ACTION_CREATE_SEVERAL_LINKS ); } String [ ] severaLlinkCodes = _childCodesProposals.split( ";" ); // Testing all number before creating in SGBD. for ( String childCodeProposalStr : severaLlinkCodes ) { int childCodeProposal = parseLinkProposalCode( childCodeProposalStr ); if ( childCodeProposal == -1 ) { error = true; } else { Link link = new Link( ); link.setParentCodeCampaign( _parentCodeCampaign ); link.setParentCodeProposal( _parentCodeProposal ); link.setChildCodeCampaign( _childCodeCampaign ); link.setChildCodeProposal( childCodeProposal ); if ( determineProposalsIdFromCodes( link, request ) ) { error = true; } else { linksToCreate.add( link ); } } } // If no error, creating links in SGBD. if ( error ) { return redirectView( request, ACTION_CREATE_SEVERAL_LINKS ); } for ( Link linkToCreate : linksToCreate ) { LinkHome.create( linkToCreate ); } addInfo( INFO_SEVERAL_LINKS_CREATED, getLocale( ) ); return redirectView( request, VIEW_MANAGE_LINKS ); } /* *********************************************************************************** */ /* * REMOVE REMOVE REMOVE REMOVE REMOVE REMOVE REMOVE REMOVE REMOVE REMOVE REMOVE RE * */ /* * REMOVE REMOVE REMOVE REMOVE REMOVE REMOVE REMOVE REMOVE REMOVE REMOVE REMOVE RE * */ /* *********************************************************************************** */ /** * Manages the removal form of a link whose identifier is in the http request * * @param request * The Http request * @return the html code to confirm */ @Action( ACTION_CONFIRM_REMOVE_LINK ) public String getConfirmRemoveLink( HttpServletRequest request ) { int nId = Integer.parseInt( request.getParameter( PARAMETER_ID_LINK ) ); UrlItem url = new UrlItem( getActionUrl( ACTION_REMOVE_LINK ) ); url.addParameter( PARAMETER_ID_LINK, nId ); String strMessageUrl = AdminMessageService.getMessageUrl( request, MESSAGE_CONFIRM_REMOVE_LINK, url.getUrl( ), AdminMessage.TYPE_CONFIRMATION ); return redirect( request, strMessageUrl ); } /** * Handles the removal form of a link * * @param request * The Http request * @return the jsp URL to display the form to manage links */ @Action( ACTION_REMOVE_LINK ) public String doRemoveLink( HttpServletRequest request ) { int nId = Integer.parseInt( request.getParameter( PARAMETER_ID_LINK ) ); LinkHome.remove( nId ); addInfo( INFO_LINK_REMOVED, getLocale( ) ); return redirectView( request, VIEW_MANAGE_LINKS ); } /* *********************************************************************************** */ /* * MODIFY MODIFY MODIFY MODIFY MODIFY MODIFY MODIFY MODIFY MODIFY MODIFY MODIFY MO * */ /* * MODIFY MODIFY MODIFY MODIFY MODIFY MODIFY MODIFY MODIFY MODIFY MODIFY MODIFY MO * */ /* *********************************************************************************** */ /** * Returns the form to update info about a link * * @param request * The Http request * @return The HTML form to update info */ @View( VIEW_MODIFY_LINK ) public String getModifyLink( HttpServletRequest request ) { int nId = Integer.parseInt( request.getParameter( PARAMETER_ID_LINK ) ); if ( _link == null || ( _link.getId( ) != nId ) ) { _link = LinkHome.findByPrimaryKey( nId ); } Map<String, Object> model = getModel( ); model.put( MARK_LINK, _link ); IdeationStaticService.getInstance( ).fillAllStaticContent( model ); return getPage( PROPERTY_PAGE_TITLE_MODIFY_LINK, TEMPLATE_MODIFY_LINK, model ); } /** * Process the change form of a link * * @param request * The Http request * @return The Jsp URL of the process result */ @Action( ACTION_MODIFY_LINK ) public String doModifyLink( HttpServletRequest request ) { populate( _link, request ); // Check constraints if ( !validateBean( _link, VALIDATION_ATTRIBUTES_PREFIX ) ) { return redirect( request, VIEW_MODIFY_LINK, PARAMETER_ID_LINK, _link.getId( ) ); } if ( determineProposalsIdFromCodes( _link, request ) ) { return redirectView( request, ACTION_MODIFY_LINK ); } LinkHome.update( _link ); addInfo( INFO_LINK_UPDATED, getLocale( ) ); return redirectView( request, VIEW_MANAGE_LINKS ); } /* *********************************************************************************** */ /* * SEARCH SEARCH SEARCH SEARCH SEARCH SEARCH SEARCH SEARCH SEARCH SEARCH SEARCH SE * */ /* * SEARCH SEARCH SEARCH SEARCH SEARCH SEARCH SEARCH SEARCH SEARCH SEARCH SEARCH SE * */ /* *********************************************************************************** */ /** * Process to search a link * * @param request * The Http request * @return The Jsp URL of the process result */ @Action( value = ACTION_SEARCH_LINK ) public String doSearchLink( HttpServletRequest request ) { if ( _linkSearcher == null ) { _linkSearcher = new LinkSearcher( ); } String strCodeCampaign = request.getParameter( PARAMETER_FILTER_CODE_CAMPAIGN ); if ( strCodeCampaign != null ) { if ( StringUtils.isBlank( strCodeCampaign ) ) { _linkSearcher.setCodeCampaign( null ); } else { _linkSearcher.setCodeCampaign( strCodeCampaign ); } } String strCodeProposal = request.getParameter( PARAMETER_FILTER_CODE_PROPOSAL ); if ( strCodeProposal != null ) { if ( StringUtils.isBlank( strCodeProposal ) ) { _linkSearcher.setCodeProposal( null ); } else { _linkSearcher.setCodeProposal( Integer.parseInt( strCodeProposal ) ); } } String strTitle = request.getParameter( PARAMETER_FILTER_TITLE ); if ( strTitle != null ) { if ( StringUtils.isBlank( strTitle ) ) { _linkSearcher.setTitle( null ); } else { _linkSearcher.setTitle( strTitle ); } } return redirectView( request, VIEW_MANAGE_LINKS ); } /** * Reset the search * * @param request * The HTTP request * @return The page */ @Action( value = ACTION_CANCEL_SEARCH ) public String doCancelSearch( HttpServletRequest request ) { _linkSearcher = null; return redirectView( request, VIEW_MANAGE_LINKS ); } /* *********************************************************************************** */ /* * UTILS UTILS UTILS UTILS UTILS UTILS UTILS UTILS UTILS UTILS UTILS UTILS UTILS U * */ /* * UTILS UTILS UTILS UTILS UTILS UTILS UTILS UTILS UTILS UTILS UTILS UTILS UTILS U * */ /* *********************************************************************************** */ /** * Parse the string and return the result as an int. If not parsable, returns -1. */ private int parseLinkProposalCode( String _str ) { try { return Integer.parseInt( _str ); } catch( NumberFormatException ex ) { addError( "Can not parse this string as an integer : '" + _str + "'." ); return -1; } } /** * Populate parentId and childId of link, from codeCampaigns and codeProposals * * @param _link * The link * @param request * The HTTP request * @return True if an error occured */ private boolean determineProposalsIdFromCodes( Link _link, HttpServletRequest request ) { boolean isError = false; // Calculating technical ids from campaign and idea codes Proposal parentProposal = ProposalHome.findByCodes( _link.getParentCodeCampaign( ), _link.getParentCodeProposal( ) ); if ( parentProposal == null ) { // addError( MESSAGE_ERROR_NO_SUCH_PARENT, request.getLocale()); addError( "Can not find parent proposal : " + _link.getParentCodeCampaign( ) + "-" + _link.getParentCodeProposal( ) + "." ); isError = true; } else { _link.setParentId( parentProposal.getId( ) ); } Proposal childProposal = ProposalHome.findByCodes( _link.getChildCodeCampaign( ), _link.getChildCodeProposal( ) ); if ( childProposal == null ) { // addError( MESSAGE_ERROR_NO_SUCH_CHILD, request.getLocale()); addError( "Can not find child proposal : " + _link.getChildCodeCampaign( ) + "-" + _link.getChildCodeProposal( ) + "." ); isError = true; } else { _link.setChildId( childProposal.getId( ) ); } return isError; } }
def pivToStart(card): piv = xform(card.start(), q=True, ws=True, piv=True) xform( card, ws=True, piv=piv[:3])
/** Explicitly calls layout on the current root {@link LithoView} */ public LithoViewRule layout() { final LithoView lithoView = getLithoView(); lithoView.layout(0, 0, lithoView.getMeasuredWidth(), lithoView.getMeasuredHeight()); return this; }
// using default import import cv from '@u4/opencv4nodejs'; import { getResource } from './utils'; const file = getResource('Lenna.png'); console.log('loading ', file); const image = cv.imread(file); console.log('Lenna.png loaded'); const processedImage = cv.applyColorMap(image, cv.COLORMAP_AUTUMN); const windowName = "applyColorMap"; cv.imshow(windowName, processedImage); cv.setWindowProperty(windowName, cv.WND_PROP_FULLSCREEN, cv.WINDOW_FULLSCREEN) // console.log('FULLSCREEN:', cv.getWindowProperty(windowName, cv.WND_PROP_FULLSCREEN)); // console.log('AUTOSIZE:', cv.getWindowProperty(windowName, cv.WND_PROP_AUTOSIZE)); // console.log('VISIBLE:', cv.getWindowProperty(windowName, cv.WND_PROP_VISIBLE)); // cv.setWindowProperty(windowName, cv.WND_PROP_FULLSCREEN, cv.WINDOW_NORMAL) cv.waitKey();
<filename>src/third_party/swfparser/DoABCContext.cpp #include "DoABCContext.h" #include <cstdlib> namespace SWF { DoABCContext::DoABCContext() { total_len_ = 0; } DoABCContext::~DoABCContext() { for (int i = 0; i < context_.size(); ++i) { delete context_[i]; } } std::vector<unsigned char *>DoABCContext::getContext() { return context_; } std::vector<int> DoABCContext::getLen() { return len_vec_; } std::vector<unsigned char *> DoABCContext::getContext() const { return context_; } std::vector<int> DoABCContext::getLen() const { return len_vec_; } void DoABCContext::setContext(unsigned char *buf, int length) { if (NULL == buf || 0 == length) { return; } context_.push_back(buf); len_vec_.push_back(length); total_len_ += length; } int DoABCContext::getTotalLen() { return total_len_; } }
def create_modis_command(cli: Group) -> Command: @cli.group("modis", short_help=("Commands for working with MODIS.")) def modis() -> None: pass @modis.command("create-catalog", short_help="Creates a STAC Catalog with contents " "defined by urls in the INFILE") @click.argument("INFILE") @click.argument("OUTDIR") @click.option("-i", "--id", help="The ID of the output catalog", default="modis") @click.option("-t", "--title", help="The title of the output catalog", default="MODIS") @click.option( "-d", "--description", help="The description of the output catalog", default="MODIS STAC Catalog containg a subset of MODIS assets") @click.option("--cogify/--no-cogify", help="Create COGs for each file", default=False) def create_collection_command( infile: str, outdir: str, id: str, title: str, description: str, cogify: bool, ) -> None: with open(infile) as f: hrefs = [line.strip() for line in f.readlines()] item_dict: defaultdict[str, defaultdict[ str, List[Item]]] = defaultdict(lambda: defaultdict(list)) collection_id_set = set() for href in hrefs: file = File(href) directory = os.path.dirname(href) prefix = os.path.splitext(os.path.basename(file.hdf_href)) has_hdf = os.path.exists(file.hdf_href) has_tiffs = any( os.path.splitext(file_name)[1] == ".tif" and file_name.startswith(prefix) for file_name in os.listdir(directory)) if has_tiffs: cog_directory = os.path.abspath(directory) elif cogify: if has_hdf: cog_directory = os.path.abspath(directory) else: print( f"WARNING: not cogifying {file.xml_href} because HDF file does not exist" ) cogify = False cog_directory = None else: cog_directory = None item = stac.create_item(href, cog_directory=cog_directory, create_cogs=cogify) item.set_self_href(href) item_dict[file.version][file.collection_id()].append(item) collection_id_set.add(file.collection_id()) collection_ids = list(collection_id_set) collection_ids.sort() catalog = Catalog(id=id, description=description, title=title, catalog_type=CatalogType.SELF_CONTAINED) for version, collections in item_dict.items(): version_catalog = Catalog( id=f"{id}-{version}", description=f"{description}, version {version}", title=f"{title}, version {version}", catalog_type=CatalogType.SELF_CONTAINED) for collection_id in collection_ids: if collection_id not in collections: continue items = collections[collection_id] file = File(items[0].get_self_href()) collection = stac.create_collection(str(file.product), version) platform_set = set() for item in items: item.set_self_href(None) collection.add_item(item) for platform in item.common_metadata.platform.split(","): platform_set.add(platform) platforms = list(platform_set) platforms.sort() collection.summaries.update(Summaries({"platform": platforms})) version_catalog.add_child(collection) catalog.add_child(version_catalog) catalog.normalize_hrefs(outdir) catalog.validate_all() catalog.make_all_asset_hrefs_relative() catalog.save() @modis.command("create-item", short_help="Create a STAC Item from a MODIS metadata file") @click.argument("INFILE") @click.argument("OUTDIR") @click.option("-c", "--cogify", is_flag=True, help="Convert the hdf into COGs.", default=False) def create_item_command(infile: str, outdir: str, cogify: bool) -> None: item = stac.create_item(infile) item_path = os.path.join(outdir, "{}.json".format(item.id)) item.set_self_href(item_path) if cogify: cog.add_cogs(item, outdir, create=True) item.validate() item.save_object() @modis.command("cogify") @click.argument("INFILE") @click.argument("OUTDIR") def cogify_command(infile: str, outdir: str) -> None: cog.cogify(infile, outdir) return modis
import sys input=sys.stdin.readline from collections import defaultdict as dc from collections import Counter from bisect import bisect_right, bisect_left import math from operator import itemgetter from heapq import heapify, heappop, heappush from queue import PriorityQueue as pq a,b,c,d=map(int,input().split()) x,y=a/b,c/d z=(1-x)*(1-y) s=1/(1-z) print(x*s)
package main import ( "database/sql" // "flag" "fmt" "github.com/cgrates/fsock" "github.com/garyburd/redigo/redis" _ "github.com/go-sql-driver/mysql" "log" "log/syslog" "time" ) //redis连接 var ( err error db *sql.DB pool *redis.Pool redisServer = "192.168.36.3:6379" // redisPassword = flag.String("", "", "") ) // type DB struct { // db *sql.DB // } // func (dbt *DB) fail(method, query string, err error) { // if len(query) > 300 { // query = "[query too large to print]" // } // log.Fatalf("Error on %s %s: %s", method, query, err.Error()) // } // func (dbt *DB) myExec(query string, args ...interface{}) (res sql.Result) { // res, err := dbt.db.Exec(query, args...) // if err != nil { // dbt.fail("Exec", query, err) // } // return res // } // func (dbt *DB) myQuery(query string, args ...interface{}) (rows *sql.Rows) { // rows, err := dbt.db.Query(query, args...) // if err != nil { // dbt.fail("Query", query, err) // } // return rows // } // dbt := &DB{db} // res := dbt.myExec("INSERT INTO haosoo_call(caller_staffId,destination_number,start_stamp,uuid) VALUES(?,?,?,?)", cname, destination, start, uuid) // func init() { // db, err = sql.Open("mysql", "root:<PASSWORD>@tcp(127.0.0.1:3306)/call?charset=utf8") // if err != nil { // log.Fatal(err) // } // db.SetMaxOpenConns(2000) // db.SetMaxIdleConns(1000) // } func newPool(server string) *redis.Pool { return &redis.Pool{ MaxIdle: 3, IdleTimeout: 240 * time.Second, Dial: func() (redis.Conn, error) { c, err := redis.Dial("tcp", server) if err != nil { return nil, err } return c, err }, TestOnBorrow: func(c redis.Conn, t time.Time) error { _, err := c.Do("PING") return err }, } } // Formats the event as map and prints it out func printChannelCreate(eventStr string) { // Format the event from string into Go's map type eventMap := fsock.FSEventStrToMap(eventStr, []string{}) uuid := eventMap["Channel-Call-UUID"] caller := eventMap["Caller-Caller-ID-Number"] // 主叫号码,内部固话号码 office_number cname := eventMap["Caller-Caller-ID-Name"] destination := eventMap["Caller-Destination-Number"] // user := eventMap["Caller-Username"] direction := eventMap["variable_direction"] // callee := eventMap["Caller-Callee-ID-Number"] // 发起呼叫时的几种状况。 (呼入:转发,总台,分机,二次呼叫。 呼出:销售(提示客户,非提示客户),客服(客户评价),其他部门) if len(cname) == 4 && direction == "inbound" { //外呼 fmt.Println("Channel-Call-UUID:" + uuid) fmt.Println(cname + " / " + caller + " --> " + destination) // fmt.Println("Caller-Caller-ID-Number:" + caller) fmt.Println("created ") start := time.Now().Format("2006-01-02 15:04:05") stmt, err := db.Prepare("INSERT INTO haosoo_call(subnum,caller_id_number,destination_number,start_stamp,uuid) VALUES(?,?,?,?,?)") if err != nil { log.Fatal(err) } res, err := stmt.Exec(cname, caller, destination, start, uuid) if err != nil { log.Fatal(err) } lastId, err := res.LastInsertId() if err != nil { log.Fatal(err) } rowCnt, err := res.RowsAffected() if err != nil { log.Fatal(err) } log.Printf("ID = %d, affected = %d\n", lastId, rowCnt) c := pool.Get() // 连接完关闭,其实没有关闭,是放回池里,也就是队列里面,等待下一个重用 defer c.Close() ring := fmt.Sprintf("ring:out,%s,%s,%s", caller, destination, uuid) redis.Bool(c.Do("PUBLISH", fmt.Sprintf("channel:%s", cname), ring)) } } func printChannelAnswer(eventStr string) { // Format the event from string into Go's map type // 应答时的几种状况 eventMap := fsock.FSEventStrToMap(eventStr, []string{}) uuid := eventMap["Channel-Call-UUID"] caller := eventMap["Caller-Caller-ID-Number"] cname := eventMap["Caller-Caller-ID-Name"] destination := eventMap["Caller-Destination-Number"] direction := eventMap["variable_direction"] // callee := eventMap["Caller-Callee-ID-Number"] if len(cname) == 4 && len(destination) > 7 && direction == "inbound" { // if callee == "" { // callee = destination // } fmt.Println("Channel-Call-UUID:" + uuid) fmt.Println(caller + " --> " + destination + " answered") fmt.Println("answered") // fmt.Printf("%v", eventMap) anwser := time.Now().Format("2006-01-02 15:04:05") // var cuuid int // db.QueryRow("select count(id) from haosoo_call where uuid=? ", uuid).Scan(&cuuid) stmt, err := db.Prepare("update haosoo_call set answer_stamp=? where uuid=?") if err != nil { log.Fatal(err) } res, err := stmt.Exec(anwser, uuid) if err != nil { log.Fatal(err) } lastId, err := res.LastInsertId() if err != nil { log.Fatal(err) } rowCnt, err := res.RowsAffected() if err != nil { log.Fatal(err) } log.Printf("ID = %d, affected = %d\n", lastId, rowCnt) c := pool.Get() // 连接完关闭,其实没有关闭,是放回池里,也就是队列里面,等待下一个重用 defer c.Close() redis.Bool(c.Do("PUBLISH", fmt.Sprintf("channel:%s", cname), "anwsered")) } } func printChannelHungup(eventStr string) { // Format the event from string into Go's map type eventMap := fsock.FSEventStrToMap(eventStr, []string{}) caller := eventMap["Caller-Caller-ID-Number"] uuid := eventMap["Channel-Call-UUID"] cname := eventMap["Caller-Caller-ID-Name"] // destination := eventMap["Caller-Destination-Number"] callee := eventMap["Caller-Callee-ID-Number"] direction := eventMap["variable_direction"] if len(cname) == 4 && direction == "inbound" { fmt.Println("Channel-Call-UUID:" + uuid) fmt.Println(caller + " --> " + callee + " hangup") fmt.Println(" hungup ") end := time.Now().Format("2006-01-02 15:04:05") year := time.Now().Format("2006") month := time.Now().Format("01") day := time.Now().Format("02") stmt, err := db.Prepare("update haosoo_call set end_stamp=?, billsec=TIMESTAMPDIFF(SECOND,answer_stamp,end_stamp), url= CONCAT('http://rec.haosoo.cn/static/rec2/',?,'/',?,'/',?,'/',?,'_',destination_number,'_',uuid,'.wav' ) where answer_stamp is not null and uuid=?") if err != nil { log.Fatal(err) } res, err := stmt.Exec(end, year, month, day, caller, uuid) if err != nil { log.Fatal(err) } lastId, err := res.LastInsertId() if err != nil { log.Fatal(err) } rowCnt, err := res.RowsAffected() if err != nil { log.Fatal(err) } log.Printf("ID = %d, affected = %d\n", lastId, rowCnt) c := pool.Get() // 连接完关闭,其实没有关闭,是放回池里,也就是队列里面,等待下一个重用 defer c.Close() redis.Bool(c.Do("PUBLISH", fmt.Sprintf("channel:%s", cname), "hangup")) } // fmt.Printf("%v", eventMap) } func main() { db, err = sql.Open("mysql", "root:haosoo8888@tcp(127.0.0.1:3306)/call?charset=utf8") if err != nil { log.Fatal(err) } db.SetMaxOpenConns(2000) db.SetMaxIdleConns(1000) defer db.Close() err = db.Ping() if err != nil { fmt.Println("数据库连接错误!") } pool = newPool(redisServer) // Init a syslog writter for our test l, errLog := syslog.New(syslog.LOG_INFO, "TestFSock") if errLog != nil { l.Crit(fmt.Sprintf("Cannot connect to syslog:", errLog)) return } // No filters evFilters := map[string]string{} // We are interested in heartbeats, define handler for them evHandlers := map[string][]func(string){"CHANNEL_CREATE": []func(string){printChannelCreate}, "CHANNEL_ANSWER": []func(string){printChannelAnswer}, "CHANNEL_HANGUP_COMPLETE": []func(string){printChannelHungup}} // evHandlers := map[string][]func(string){"CHANNEL_CREATE": []func(string){printChannelCreate}} fs, err := fsock.NewFSock("127.0.0.1:8021", "ClueCon", 10, evHandlers, evFilters, l) if err != nil { l.Crit(fmt.Sprintf("FreeSWITCH error:", err)) return } fs.ReadEvents() }
/** * Validate TourDto * * @author Oleh Chui */ public class TourValidator { private static final Logger logger = LogManager.getLogger(TourValidator.class); private TourValidator() {} /** * Validate all fields of TourDto except 'burning' * and process exceptions * * @param tourDto An instance of TourDto class that should be validated * @param request An instance of HttpServletRequest class * @return A boolean representing is TourDto valid or not */ public static boolean validate(TourDto tourDto, HttpServletRequest request) { try { checkForNameIsNotEmpty(tourDto.getName()); checkForValidPrice(tourDto.getPrice()); checkForCountryIsNotEmpty(tourDto.getCountry()); checkForCityIsNotEmpty(tourDto.getCity()); checkForDescriptionIsNotEmpty(tourDto.getDescription()); checkForValidMaxDiscount(tourDto.getMaxDiscount()); checkForValidDiscountStep(tourDto.getDiscountStep()); checkForTourTypeisNotEmpty(tourDto.getTourType()); checkForHotelTypeIsNotEmpty(tourDto.getHotelType()); checkForValidPersonNumber(tourDto.getPersonNumber()); checkForValidStartDate(tourDto.getStartDate()); checkForValidEndDate(tourDto.getStartDate(), tourDto.getEndDate()); return true; } catch (NameIsEmptyException e) { logger.warn("<tour creating> name is empty"); request.setAttribute(NAME_IS_EMPTY, true); } catch (PriceIsNotValidException e) { logger.warn("<tour creating> price is not valid ({})", tourDto.getPrice()); request.setAttribute(PRICE_IS_NOT_VALID, true); } catch (CountryIsEmptyException e) { logger.warn("<tour creating> country is empty"); request.setAttribute(COUNTRY_IS_EMPTY, true); } catch (CityIsEmptyException e) { logger.warn("<tour creating> city is empty"); request.setAttribute(CITY_IS_EMPTY, true); } catch (DescriptionIsEmptyException e) { logger.warn("<tour creating> description is empty"); request.setAttribute(DESCRIPTION_IS_EMPTY, true); } catch (MaxDiscountIsNotValidException e) { logger.warn("<tour creating> max discount value is not valid ({})", tourDto.getMaxDiscount()); request.setAttribute(MAX_DISCOUNT_IS_NOT_VALID, true); } catch (DiscountStepIsNotValidException e) { logger.warn("<tour creating> discount step is not valid ({})", tourDto.getDiscountStep()); request.setAttribute(DISCOUNT_STEP_IS_NOT_VALID, true); } catch (TourTypeIsEmptyException e) { logger.warn("<tour creating> tour type is empty"); request.setAttribute(TOUR_TYPE_IS_EMPTY, true); } catch (HotelTypeIsEmptyException e) { logger.warn("<tour creating> hotel type is empty"); request.setAttribute(HOTEL_TYPE_IS_EMPTY, true); } catch (PersonNumberIsNotValidException e) { logger.warn("<tour creating> person number is not valid ({})", tourDto.getPersonNumber()); request.setAttribute(PERSON_NUMBER_IS_NOT_VALID, true); } catch (StartDateIsNotValidException e) { logger.warn("<tour creating> start date is not valid ({})", tourDto.getStartDate()); request.setAttribute(START_DATE_IS_NOT_VALID, true); } catch (EndDateIsNotValidException e) { logger.warn("<tour creating> end date is not valid ({})", tourDto.getEndDate()); request.setAttribute(END_DATE_IS_NOT_VALID, true); } return false; } /** * Validate only discount information [maxDiscount and discountStep] * of the instance of the TourDto class * * @param tourDto An instance of TourDto class that should be validated * @param req An instance of HttpServletRequest class * @return A boolean representing is discount information valid or not */ public static boolean validateDiscountInfo(TourDto tourDto, HttpServletRequest req) { try { checkForValidMaxDiscount(tourDto.getMaxDiscount()); checkForValidDiscountStep(tourDto.getDiscountStep()); return true; } catch (MaxDiscountIsNotValidException e) { logger.warn("<discount updating> max discount value is not valid ({})", tourDto.getMaxDiscount()); req.setAttribute(MAX_DISCOUNT_IS_NOT_VALID, true); } catch (DiscountStepIsNotValidException e) { logger.warn("<discount updating> discount step is not valid ({})", tourDto.getDiscountStep()); req.setAttribute(DISCOUNT_STEP_IS_NOT_VALID, true); } return false; } private static void checkForNameIsNotEmpty(String name) throws NameIsEmptyException { if (FieldValidator.fieldIsEmpty(name)) throw new NameIsEmptyException(); } private static void checkForValidPrice(String priceString) throws PriceIsNotValidException { if (FieldValidator.fieldIsEmpty(priceString) || FieldValidator.fieldIsNotValidBigDecimal(priceString)) { throw new PriceIsNotValidException(); } BigDecimal price = new BigDecimal(priceString); if (price.compareTo(PRICE_MIN) < 0 || price.compareTo(PRICE_MAX) > 0) { throw new PriceIsNotValidException(); } } private static void checkForCountryIsNotEmpty(String country) throws CountryIsEmptyException { if (FieldValidator.fieldIsEmpty(country)) throw new CountryIsEmptyException(); } private static void checkForCityIsNotEmpty(String city) throws CityIsEmptyException { if (FieldValidator.fieldIsEmpty(city)) throw new CityIsEmptyException(); } private static void checkForDescriptionIsNotEmpty(String description) throws DescriptionIsEmptyException { if (FieldValidator.fieldIsEmpty(description)) throw new DescriptionIsEmptyException(); } private static void checkForValidMaxDiscount(String maxDiscountString) throws MaxDiscountIsNotValidException { if (FieldValidator.fieldIsEmpty(maxDiscountString) || FieldValidator.fieldIsNotValidInteger(maxDiscountString)) { throw new MaxDiscountIsNotValidException(); } int maxDiscount = Integer.parseInt(maxDiscountString); if (maxDiscount < MAX_DISCOUNT_MIN || maxDiscount > MAX_DISCOUNT_MAX) { throw new MaxDiscountIsNotValidException(); } } private static void checkForValidDiscountStep(String discountStepString) throws DiscountStepIsNotValidException { if (FieldValidator.fieldIsEmpty(discountStepString) || !FieldValidator.fieldIsValidDouble(discountStepString)) { throw new DiscountStepIsNotValidException(); } double discountStep = Double.parseDouble(discountStepString); if (discountStep < DISCOUNT_STEP_MIN || discountStep > DISCOUNT_STEP_MAX) { throw new DiscountStepIsNotValidException(); } } private static void checkForTourTypeisNotEmpty(String tourType) throws TourTypeIsEmptyException { if (FieldValidator.fieldIsEmpty(tourType)) { throw new TourTypeIsEmptyException(); } } private static void checkForHotelTypeIsNotEmpty(String hotelType) throws HotelTypeIsEmptyException { if (FieldValidator.fieldIsEmpty(hotelType)) { throw new HotelTypeIsEmptyException(); } } private static void checkForValidPersonNumber(String personNumberString) throws PersonNumberIsNotValidException { if (FieldValidator.fieldIsEmpty(personNumberString) || FieldValidator.fieldIsNotValidInteger(personNumberString)) { throw new PersonNumberIsNotValidException(); } int personNumber = Integer.parseInt(personNumberString); if (personNumber < PERSON_NUMBER_MIN || personNumber > PERSON_NUMBER_MAX) { throw new PersonNumberIsNotValidException(); } } private static void checkForValidStartDate(String startDateString) throws StartDateIsNotValidException { if (FieldValidator.fieldIsEmpty(startDateString)) { throw new StartDateIsNotValidException(); } LocalDate startDate = LocalDate.parse(startDateString); if (!startDate.isAfter(LocalDate.now())) { throw new StartDateIsNotValidException(); } } private static void checkForValidEndDate(String startDateString, String endDateString) throws EndDateIsNotValidException { if (FieldValidator.fieldIsEmpty(endDateString)) { throw new EndDateIsNotValidException(); } if (!FieldValidator.fieldIsEmpty(startDateString)) { LocalDate startDate = LocalDate.parse(startDateString); LocalDate endDate = LocalDate.parse(endDateString); if (!endDate.isAfter(startDate)) { throw new EndDateIsNotValidException(); } } } }
import { GraphQLError } from "graphql"; import { ExecutionContext } from "graphql/execution/execute"; export declare type NullTrimmer = (data: any, errors: GraphQLError[]) => any; /** * * @param {ExecutionContext} exeContext * @returns {(data: any, errors: GraphQLError[]) => {data: any; errors: GraphQLError[]}} */ export declare function createNullTrimmer(exeContext: ExecutionContext): NullTrimmer;
// IsSetshouldDestroySnap method check if the shouldDestroySnap field of volumeRollback object is set. func IsshouldDestroySnapSet() PredicateFunc { return func(v *volumeRollback) bool { return v.shouldDestroySnap == true } }
/** * Appends the * <a href="https://tools.ietf.org/html/rfc5849">The OAuth 1.0 Protocol</a> * signature to the HTTP request. * This request signer computes the signature components of the * "OAuth" auth-scheme and adds them as the Authorization header value. * * <p> * See also * <a href="http://www.iana.org/assignments/http-authschemes/http-authschemes.xhtml#authschemes"> * HTTP Authentication Scheme Registry</a> for a list of authschemes. * * @author kmccrack */ public class OAuth1Signer implements HttpProvider.HttpRequestAuthorizer { /** * HERE Account recommends 6-character nonces. */ private static final int NONCE_LENGTH = 6; private final Clock clock; /** * HERE client accessKeyId. Becomes the value of oauth_consumer_key in the * Authorization: OAuth header. */ private final String consumerKey; /** * HERE client accessKeySecret. Used to calculate the oauth_signature in the * Authorization: OAuth header. */ private final String consumerSecret; private final SignatureMethod signatureMethod; /** * Construct the OAuth signer based on accessKeyId and accessKeySecret. * * @param accessKeyId the HERE client accessKeyId. Becomes the value of oauth_consumer_key in * the Authorization: OAuth header. * @param accessKeySecret the HERE client accessKeySecret. Used to calculate the oauth_signature * in the Authorization: OAuth header. */ public OAuth1Signer(String accessKeyId, String accessKeySecret) { this(new SettableSystemClock(), accessKeyId, accessKeySecret); } /** * Construct the OAuth signer based on clock, accessKeyId, and accessKeySecret. * Use this if you want to inject your own clock, such as during unit tests. * * @param clock the implementation of a clock you want to use * @param accessKeyId the HERE clientId. Becomes the value of oauth_consumer_key in * the Authorization: OAuth header. * @param accessKeySecret the HERE clientSecret. Used to calculate the oauth_signature * in the Authorization: OAuth header. */ public OAuth1Signer(Clock clock, String accessKeyId, String accessKeySecret) { this(clock, accessKeyId, accessKeySecret, SignatureMethod.HMACSHA256); } /** * * @param consumerKey the identity of the caller, sent in plaintext. * Becomes the value of oauth_consumer_key in * the Authorization: OAuth header. * @param consumerSecret secret of the caller, or private key of the caller. * Used to calculate the oauth_signature * in the Authorization: OAuth header. * @param signatureMethod the choice of signature algorithm to use. */ public OAuth1Signer(String consumerKey, String consumerSecret, SignatureMethod signatureMethod) { this(new SettableSystemClock(), consumerKey, consumerSecret, signatureMethod); } /** * Construct the OAuth signer based on clock, consumerKey, consumerSecret, * and signatureMethod. * * @param clock the implementation of a clock you want to use * @param consumerKey the identity of the caller, sent in plaintext. * Becomes the value of oauth_consumer_key in * the Authorization: OAuth header. * @param consumerSecret secret of the caller, or private key of the caller. * Used to calculate the oauth_signature * in the Authorization: OAuth header. * @param signatureMethod the choice of signature algorithm to use. */ public OAuth1Signer(Clock clock, String consumerKey, String consumerSecret, SignatureMethod signatureMethod) { this.clock = clock; this.consumerKey = consumerKey; this.consumerSecret = consumerSecret; this.signatureMethod = signatureMethod; } /** * The source of entropy for OAuth1.0 nonce values. * File bytes with entropy for OAuth1.0 nonce values. * Note the OAuth1.0 spec specifically tells us we do not need to use a SecureRandom * number generator. * * @param bytes the byte array in which to stick the nonce value */ protected void nextBytes(byte[] bytes) { ThreadLocalRandom.current().nextBytes(bytes);; } /** * For cases where there is no Content-Type: application/x-www-form-urlencoded, * and no request token, call this method to get the Authorization Header Value * for a single request. * * <p> * Computes the OAuth1 Authorization header value including all required components of the * OAuth type. * See also the OAuth 1.0 * <a href="https://tools.ietf.org/html/rfc5849#section-3.5.1">Authorization Header</a> * Section. * * <p> * Note that the client accessKeySecret, once configured on this object, does not leave this method, * as signatures are used in its place on the wire. * * @param method * @return */ private String getAuthorizationHeaderValue(String method, String url, Map<String, List<String>> formParams) { SignatureCalculator calculator = getSignatureCalculator(); // <a href="https://tools.ietf.org/html/rfc5849#section-3.3">timestamp</no I a>. // the number of seconds since January 1, 1970 00:00:00 GMT long timestamp = clock.currentTimeMillis() / 1000L; // choose the first 6 chars from base64 alphabet byte[] bytes = new byte[NONCE_LENGTH]; nextBytes(bytes); String nonce = Base64.getUrlEncoder().withoutPadding().encodeToString(bytes).substring(0, NONCE_LENGTH); String computedSignature = calculator.calculateSignature(method, url, timestamp, nonce, signatureMethod, formParams, null); return calculator.constructAuthHeader(computedSignature, nonce, timestamp, signatureMethod); } /** * Gets the signature calculator, given that we don't use a user auth, and we do use * the configured client accessKeyId, client accessKeySecret pair. * * @return */ SignatureCalculator getSignatureCalculator() { // client accessKeyId is "Client Identifier" a.k.a. "oauth_consumer_key" in the OAuth1.0 spec // client accessKeySecret is "Client Shared-Secret" , which becomes the client shared-secret component // of the HMAC-SHA1 key per http://tools.ietf.org/html/rfc5849#section-3.4.2. SignatureCalculator calculator = new SignatureCalculator(consumerKey, consumerSecret); return calculator; } /** * {@inheritDoc} */ @Override public void authorize(HttpRequest httpRequest, String method, String url, Map<String, List<String>> formParams) { String authorizationHeaderValue = getAuthorizationHeaderValue(method, url, formParams); httpRequest.addAuthorizationHeader(authorizationHeaderValue); } }
def test(): import pyproj proj = pyproj.Proj(proj='utm', zone=11, ellps='WGS84') proj = Transform(proj, rotate=40.0, origin=(-121.0, 34.5)) x, y = proj(-120.0, 35.0) assert([x, y] == [38031.1000251, 100171.63485189]) x, y = proj(0, 0, inverse=True) assert([x, y] == [-121.0, 34.5]) return
We know two things for sure about Justin Anderson after one season in the NBA: 1) He’s a world-class athlete, 2) with a world-class appetite. Dinner with Justin Anderson at Texas Land and Cattle Company View photos of Justin Anderson’s dinner at Texas Land & Cattle. View Gallery View More Galleries The next thing we are beginning to learn about the 22-year-old wing is he has a deep appreciation for his teammates. Monday night Anderson hosted most of the new Mavericks at Texas Land & Cattle on Lemmon, also inviting some 80-plus fans and media for a dinner benefiting the C5 Youth Foundation of Texas. Those of us who either spend a lot of time in the kitchen or watch a lot of cooking shows know that by eating prepared food, you can gather something about the creator. In Anderson’s case, he worked with executive chef John Imbriolo to prepare a six-course meal, each selected by Anderson to combine elements of his Virginia childhood and professional career in the big city. “I’m just giving an opportunity for Dallas to get to know a country kid from Virginia, and just see how we live, and I guess to bring a taste of Montross to Dallas,” Anderson said. The second-year pro picked up a penchant for cooking early in his childhood. Montross is a small town of just 384, per the 2010 Census, and Anderson said once the streetlights came on outside, there wasn’t much to do. Life moves at a different pace in the country. But he, along with his siblings, would gather around the kitchen table and watch their parents cook dinner, and Justin and his brother E.J. would occasionally sneak in some seasoning or an extra ingredient to see how it would change the taste. His experience would prove valuable in college, when he’d often cook for his roommates or teammates, most of whom — as is the case with just about every college student out there, myself included once upon a time — didn’t know which way was up when it came to operating in the kitchen. Anderson said one of his teammates would cook frozen chicken nuggets in the microwave and crinkle-cut french fries in the oven. “You’re doing it all wrong,” he’d bemoan. “People ask sometimes, ‘What do you do outside of playing basketball?’ My first answer is always cooking,” Anderson said. “I just love it. It’s something that takes the mind away from everything. If you’ve been having a long day, and you make a dish that you love, you almost forget about it.” Familiarity with Anderson’s roots helps to understand his menu, which began with a crab bisque with “just the right amount of spice,” as the Anderson-autographed menu stated, along with a grilled king crab leg. Imbriolo said crab was the first thing Anderson mentioned as he imagined the courses. Then came a Buffalo chicken dip and a tomato and cucumber salad before the meats came out. And, my, the meat was good. To say Anderson has been impressed by Texas beef would be an understatement: There simply aren’t enough sources of local beef in his hometown, so his first real introduction to fresh beef, such as the salt block prime tenderloin or chipotle sugar bacon-wrapped ribeye courses, came when he moved to Dallas. That’s one more reason he’s happy to be a Maverick. The highlight of the meal was “Justin’s ABJ,” his spin on the traditional peanut butter and jelly sandwich, in which he substitutes almond butter in for peanut butter “for health purposes,” served on pan-seared Texas toast, which he has said has always been his preferred choice of bread with any sandwich. But there’s more on it which he won’t divulge. “I’m not gonna say what I do from there, because hopefully one day I can make that mine,” he said, visions of future Executive Chef Justin Anderson in his head. “But it’s ridiculous. It’s so good.” (It is.) After the meal, head coach Rick Carlisle introduced each player to those in attendance as this was the first opportunity for many of them to speak to the community. This is a much younger roster than the Mavericks have put together in recent seasons, so it’s important for the young guys to get to know each other, their coach, and their community as soon as possible to ease their transition to the NBA. Assistant coaches Melvin Hunt and Kaleb Canales also attended. The mission of the C5 Youth Foundation of Texas is to change the odds for high-potential youth from risk-filled environments, inspiring them to pursue personal success, and preparing them for leadership roles in college, work, and their communities. Of more than 200 kids to complete the five-year program, each of them has graduated high school and all of have been accepted into colleges, including one at Stanford and one at Dartmouth. Former NHL goalie and Dallas Stars legend Marty Turco is among those spearheading the efforts of the foundation.
def minimal_payload(self): payload = {"cluster": self.__dict__} return _make_minimal(payload)
def main(input_file=sys.stdin, output_file=sys.stdout): if hasattr(input_file, 'buffer'): input_file = input_file.buffer if hasattr(output_file, 'buffer'): output_file = output_file.buffer parser = CodeGeneratorParser.from_input_file(input_file) comment_data = {} for filename, message_structure in parser.find_docs(): comment_data.setdefault(filename, set()) comment_data[filename].add(message_structure) answer = [] _BATCH_TOKEN = "CD985272F78311" meta_docstrings = [] meta_structs = [] for fn, structs in comment_data.items(): for struct in structs: if meta_docstrings: meta_docstrings.append("\n%s" % _BATCH_TOKEN) meta_docstrings.append(struct.get_meta_docstring()) meta_structs.append((fn, struct)) meta_docstring = convert_text("".join(meta_docstrings), 'rst', format='md') meta_docstrings = meta_docstring.split("%s" % _BATCH_TOKEN) index = 0 while index < len(meta_structs) and index < len(meta_docstrings): fn = meta_structs[index][0] struct = meta_structs[index][1] answer.append(CodeGeneratorResponse.File( name=fn.replace('.proto', '_pb2.py'), insertion_point='class_scope:%s' % struct.name, content=',\n\'__doc__\': """{docstring}""",'.format( docstring=struct.get_python_docstring(meta_docstrings[index]), ), )) index += 1 for fn in _init_files(comment_data.keys()): answer.append(CodeGeneratorResponse.File( name=fn, content='', )) cgr = CodeGeneratorResponse(file=answer) output_file.write(cgr.SerializeToString())
Mark Morgan resigned from his position today as chief of the Border Patrol after being asked to leave by the Trump administration. It came as President Donald Trump has announced an executive order to go ahead with building the promised border wall. Morgan spoke to senior Border Patrol agents in a video conference today and said that he had been asked to leave, according to a source quoted by the Associated Press. Moragan’s tenure was quite brief, having been named in June 2016. A former FBI agent, Morgan had once led the internal affairs department for Customs and Border patrol. He was the first to lead the Border Patrol without having ever served in the Border Patrol. Acting Commissioner for the Border Patrol Kevin McAleenan, in a statement, praised Morgan for "his unwavering dedication to our border security mission" and "lifelong career in service to the nation." Morgan had clashed with the Border Patrol's union, which endorsed Trump during the presidential campaign. The National Border Patrol Council had wanted a Border Patrol veteran to lead, and criticized Morgan consistently. During his speech at the Department of Homeland Security today, Trump pointedly did not mention Morgan. Instead the president pointed out union president Brandon Judd, who had served on the transition team. On December 1, Morgan responded to a question from Sen. Tom Carper (D-DE) that he was supportive of comprehensive immigration overhaul, which the Border Patrol union interpreted to include a path to citizenship for illegal aliens. Morgan later clarified that blanket amnesty “could not be further from my position.”
<filename>sdk/go/ovh/publicCloudUser.go // *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** // *** Do not edit by hand unless you're certain you know what you are doing! *** package ovh import ( "context" "reflect" "github.com/pulumi/pulumi/sdk/v3/go/pulumi" ) // > __DEPRECATED:__ Use `CloudUser` instead. // // Creates a user in a public cloud project. // // ## Example Usage // // ```go // package main // // import ( // "github.com/pulumi/pulumi-ovh/sdk/go/ovh" // "github.com/pulumi/pulumi/sdk/v3/go/pulumi" // ) // // func main() { // pulumi.Run(func(ctx *pulumi.Context) error { // _, err := ovh.NewPublicCloudUser(ctx, "user1", &ovh.PublicCloudUserArgs{ // ProjectId: pulumi.String("67890"), // }) // if err != nil { // return err // } // return nil // }) // } // ``` type PublicCloudUser struct { pulumi.CustomResourceState // the date the user was created. CreationDate pulumi.StringOutput `pulumi:"creationDate"` // A description associated with the user. Description pulumi.StringPtrOutput `pulumi:"description"` // a convenient map representing an openstackRc file. // Note: no password nor sensitive token is set in this map. OpenstackRc pulumi.MapOutput `pulumi:"openstackRc"` // (Sensitive) the password generated for the user. The password can // be used with the Openstack API. This attribute is sensitive and will only be // retrieve once during creation. Password pulumi.StringOutput `pulumi:"password"` // The id of the public cloud project. If omitted, // the `OVH_PROJECT_ID` environment variable is used. ProjectId pulumi.StringPtrOutput `pulumi:"projectId"` RoleName pulumi.StringPtrOutput `pulumi:"roleName"` RoleNames pulumi.StringArrayOutput `pulumi:"roleNames"` Roles PublicCloudUserRoleArrayOutput `pulumi:"roles"` // Service name of the resource representing the id of the cloud project. ServiceName pulumi.StringPtrOutput `pulumi:"serviceName"` // the status of the user. should be normally set to 'ok'. Status pulumi.StringOutput `pulumi:"status"` // the username generated for the user. This username can be used with // the Openstack API. Username pulumi.StringOutput `pulumi:"username"` } // NewPublicCloudUser registers a new resource with the given unique name, arguments, and options. func NewPublicCloudUser(ctx *pulumi.Context, name string, args *PublicCloudUserArgs, opts ...pulumi.ResourceOption) (*PublicCloudUser, error) { if args == nil { args = &PublicCloudUserArgs{} } var resource PublicCloudUser err := ctx.RegisterResource("ovh:index/publicCloudUser:PublicCloudUser", name, args, &resource, opts...) if err != nil { return nil, err } return &resource, nil } // GetPublicCloudUser gets an existing PublicCloudUser resource's state with the given name, ID, and optional // state properties that are used to uniquely qualify the lookup (nil if not required). func GetPublicCloudUser(ctx *pulumi.Context, name string, id pulumi.IDInput, state *PublicCloudUserState, opts ...pulumi.ResourceOption) (*PublicCloudUser, error) { var resource PublicCloudUser err := ctx.ReadResource("ovh:index/publicCloudUser:PublicCloudUser", name, id, state, &resource, opts...) if err != nil { return nil, err } return &resource, nil } // Input properties used for looking up and filtering PublicCloudUser resources. type publicCloudUserState struct { // the date the user was created. CreationDate *string `pulumi:"creationDate"` // A description associated with the user. Description *string `pulumi:"description"` // a convenient map representing an openstackRc file. // Note: no password nor sensitive token is set in this map. OpenstackRc map[string]interface{} `pulumi:"openstackRc"` // (Sensitive) the password generated for the user. The password can // be used with the Openstack API. This attribute is sensitive and will only be // retrieve once during creation. Password *string `pulumi:"password"` // The id of the public cloud project. If omitted, // the `OVH_PROJECT_ID` environment variable is used. ProjectId *string `pulumi:"projectId"` RoleName *string `pulumi:"roleName"` RoleNames []string `pulumi:"roleNames"` Roles []PublicCloudUserRole `pulumi:"roles"` // Service name of the resource representing the id of the cloud project. ServiceName *string `pulumi:"serviceName"` // the status of the user. should be normally set to 'ok'. Status *string `pulumi:"status"` // the username generated for the user. This username can be used with // the Openstack API. Username *string `pulumi:"username"` } type PublicCloudUserState struct { // the date the user was created. CreationDate pulumi.StringPtrInput // A description associated with the user. Description pulumi.StringPtrInput // a convenient map representing an openstackRc file. // Note: no password nor sensitive token is set in this map. OpenstackRc pulumi.MapInput // (Sensitive) the password generated for the user. The password can // be used with the Openstack API. This attribute is sensitive and will only be // retrieve once during creation. Password pulumi.StringPtrInput // The id of the public cloud project. If omitted, // the `OVH_PROJECT_ID` environment variable is used. ProjectId pulumi.StringPtrInput RoleName pulumi.StringPtrInput RoleNames pulumi.StringArrayInput Roles PublicCloudUserRoleArrayInput // Service name of the resource representing the id of the cloud project. ServiceName pulumi.StringPtrInput // the status of the user. should be normally set to 'ok'. Status pulumi.StringPtrInput // the username generated for the user. This username can be used with // the Openstack API. Username pulumi.StringPtrInput } func (PublicCloudUserState) ElementType() reflect.Type { return reflect.TypeOf((*publicCloudUserState)(nil)).Elem() } type publicCloudUserArgs struct { // A description associated with the user. Description *string `pulumi:"description"` // a convenient map representing an openstackRc file. // Note: no password nor sensitive token is set in this map. OpenstackRc map[string]interface{} `pulumi:"openstackRc"` // The id of the public cloud project. If omitted, // the `OVH_PROJECT_ID` environment variable is used. ProjectId *string `pulumi:"projectId"` RoleName *string `pulumi:"roleName"` RoleNames []string `pulumi:"roleNames"` // Service name of the resource representing the id of the cloud project. ServiceName *string `pulumi:"serviceName"` } // The set of arguments for constructing a PublicCloudUser resource. type PublicCloudUserArgs struct { // A description associated with the user. Description pulumi.StringPtrInput // a convenient map representing an openstackRc file. // Note: no password nor sensitive token is set in this map. OpenstackRc pulumi.MapInput // The id of the public cloud project. If omitted, // the `OVH_PROJECT_ID` environment variable is used. ProjectId pulumi.StringPtrInput RoleName pulumi.StringPtrInput RoleNames pulumi.StringArrayInput // Service name of the resource representing the id of the cloud project. ServiceName pulumi.StringPtrInput } func (PublicCloudUserArgs) ElementType() reflect.Type { return reflect.TypeOf((*publicCloudUserArgs)(nil)).Elem() } type PublicCloudUserInput interface { pulumi.Input ToPublicCloudUserOutput() PublicCloudUserOutput ToPublicCloudUserOutputWithContext(ctx context.Context) PublicCloudUserOutput } func (*PublicCloudUser) ElementType() reflect.Type { return reflect.TypeOf((**PublicCloudUser)(nil)).Elem() } func (i *PublicCloudUser) ToPublicCloudUserOutput() PublicCloudUserOutput { return i.ToPublicCloudUserOutputWithContext(context.Background()) } func (i *PublicCloudUser) ToPublicCloudUserOutputWithContext(ctx context.Context) PublicCloudUserOutput { return pulumi.ToOutputWithContext(ctx, i).(PublicCloudUserOutput) } // PublicCloudUserArrayInput is an input type that accepts PublicCloudUserArray and PublicCloudUserArrayOutput values. // You can construct a concrete instance of `PublicCloudUserArrayInput` via: // // PublicCloudUserArray{ PublicCloudUserArgs{...} } type PublicCloudUserArrayInput interface { pulumi.Input ToPublicCloudUserArrayOutput() PublicCloudUserArrayOutput ToPublicCloudUserArrayOutputWithContext(context.Context) PublicCloudUserArrayOutput } type PublicCloudUserArray []PublicCloudUserInput func (PublicCloudUserArray) ElementType() reflect.Type { return reflect.TypeOf((*[]*PublicCloudUser)(nil)).Elem() } func (i PublicCloudUserArray) ToPublicCloudUserArrayOutput() PublicCloudUserArrayOutput { return i.ToPublicCloudUserArrayOutputWithContext(context.Background()) } func (i PublicCloudUserArray) ToPublicCloudUserArrayOutputWithContext(ctx context.Context) PublicCloudUserArrayOutput { return pulumi.ToOutputWithContext(ctx, i).(PublicCloudUserArrayOutput) } // PublicCloudUserMapInput is an input type that accepts PublicCloudUserMap and PublicCloudUserMapOutput values. // You can construct a concrete instance of `PublicCloudUserMapInput` via: // // PublicCloudUserMap{ "key": PublicCloudUserArgs{...} } type PublicCloudUserMapInput interface { pulumi.Input ToPublicCloudUserMapOutput() PublicCloudUserMapOutput ToPublicCloudUserMapOutputWithContext(context.Context) PublicCloudUserMapOutput } type PublicCloudUserMap map[string]PublicCloudUserInput func (PublicCloudUserMap) ElementType() reflect.Type { return reflect.TypeOf((*map[string]*PublicCloudUser)(nil)).Elem() } func (i PublicCloudUserMap) ToPublicCloudUserMapOutput() PublicCloudUserMapOutput { return i.ToPublicCloudUserMapOutputWithContext(context.Background()) } func (i PublicCloudUserMap) ToPublicCloudUserMapOutputWithContext(ctx context.Context) PublicCloudUserMapOutput { return pulumi.ToOutputWithContext(ctx, i).(PublicCloudUserMapOutput) } type PublicCloudUserOutput struct{ *pulumi.OutputState } func (PublicCloudUserOutput) ElementType() reflect.Type { return reflect.TypeOf((**PublicCloudUser)(nil)).Elem() } func (o PublicCloudUserOutput) ToPublicCloudUserOutput() PublicCloudUserOutput { return o } func (o PublicCloudUserOutput) ToPublicCloudUserOutputWithContext(ctx context.Context) PublicCloudUserOutput { return o } type PublicCloudUserArrayOutput struct{ *pulumi.OutputState } func (PublicCloudUserArrayOutput) ElementType() reflect.Type { return reflect.TypeOf((*[]*PublicCloudUser)(nil)).Elem() } func (o PublicCloudUserArrayOutput) ToPublicCloudUserArrayOutput() PublicCloudUserArrayOutput { return o } func (o PublicCloudUserArrayOutput) ToPublicCloudUserArrayOutputWithContext(ctx context.Context) PublicCloudUserArrayOutput { return o } func (o PublicCloudUserArrayOutput) Index(i pulumi.IntInput) PublicCloudUserOutput { return pulumi.All(o, i).ApplyT(func(vs []interface{}) *PublicCloudUser { return vs[0].([]*PublicCloudUser)[vs[1].(int)] }).(PublicCloudUserOutput) } type PublicCloudUserMapOutput struct{ *pulumi.OutputState } func (PublicCloudUserMapOutput) ElementType() reflect.Type { return reflect.TypeOf((*map[string]*PublicCloudUser)(nil)).Elem() } func (o PublicCloudUserMapOutput) ToPublicCloudUserMapOutput() PublicCloudUserMapOutput { return o } func (o PublicCloudUserMapOutput) ToPublicCloudUserMapOutputWithContext(ctx context.Context) PublicCloudUserMapOutput { return o } func (o PublicCloudUserMapOutput) MapIndex(k pulumi.StringInput) PublicCloudUserOutput { return pulumi.All(o, k).ApplyT(func(vs []interface{}) *PublicCloudUser { return vs[0].(map[string]*PublicCloudUser)[vs[1].(string)] }).(PublicCloudUserOutput) } func init() { pulumi.RegisterInputType(reflect.TypeOf((*PublicCloudUserInput)(nil)).Elem(), &PublicCloudUser{}) pulumi.RegisterInputType(reflect.TypeOf((*PublicCloudUserArrayInput)(nil)).Elem(), PublicCloudUserArray{}) pulumi.RegisterInputType(reflect.TypeOf((*PublicCloudUserMapInput)(nil)).Elem(), PublicCloudUserMap{}) pulumi.RegisterOutputType(PublicCloudUserOutput{}) pulumi.RegisterOutputType(PublicCloudUserArrayOutput{}) pulumi.RegisterOutputType(PublicCloudUserMapOutput{}) }
It may or may not be a coincidence, but these were precisely the months in which recovery from the 9.0 earthquake and 15 meter tsunami that struck Japan took place. Much infrastructure was destroyed, including most famously, several nuclear reactors, although unremarked in public conciousness, roads, buildings, dams, harbors, refineries and other infrastructure was also destroyed.. Twenty thousand people, roughly died in this event, and in spite of what you've heard, not one of them has yet to die from the effects of radiation sickness. This month, by the way, Japan shut down all of its nuclear reactors to make sure that they are "safe." Oddly enough, they didn't all decide to live in tents this month until they could figure out of buildings are "safe," even though lots of people died in buildings. The first time in the last half a century that the year to year monthly increase in dangerous fossil fuel waste levels in the planetary atmosphere exceeded 3.00 ppm - which it has done 8 times since - was in June of 1997 to June of 1998, when it rose by. In fact in 1998 there were six months in that year where the year to year (1997-1998) monthly increases equaled or exceeded 3.00 ppm. The anti-nuke gas bag Joe Romm was running the climate office in 1998 by the way, but apparently his wonderful efforts to build wind and solar plants all around the world and stop climate change dead in its tracks stopped that stuff dead in its tracks, well, sort of. The other year to year monthly increase exceeded 3.00 ppm was between July of 2004 and July of 2005, when it jumped by 3.17 ppm, possibly because of noxious gases released by that famous gas bag Dick Cheney, or maybe it was something else. Who am I to say? By the way, since the gas bag Joe Romm ran the climate office in 1998 the average year to year monthly increase, including 1998 has been 1.99 ppm, higher, obviously than the 50 year average. Happily though, global temperatures are increasing, and the up side of all that is that we need to burn less dangerous natural gas and less dangerous oil to heat our swell "green" homes. Aren't we lucky? Nevertheless, the April figures from April 2011 to April 2012 don't look all that good to me. It may be premature to declare "victory." Pay no attention to me. Don't worry. Be happy. Have a wonderful Memorial Day weekend, and try not to think too much about any thing too memorable.
<reponame>zmij/wire /* * wire_traits.hpp * * Created on: Dec 11, 2015 * Author: zmij */ #ifndef WIRE_DETAIL_WIRE_TRAITS_HPP_ #define WIRE_DETAIL_WIRE_TRAITS_HPP_ #include <wire/encoding/types.hpp> #include <type_traits> #include <string> #include <vector> #include <list> #include <array> #include <set> #include <unordered_set> #include <queue> #include <deque> #include <map> #include <unordered_map> #include <memory> namespace wire { namespace encoding { namespace detail { enum wire_types { SCALAR_FIXED, SCALAR_VARINT, SCALAR_WITH_SIZE, VARIANT, ARRAY_FIXED, ARRAY_VARLEN, DICTIONARY, STRUCT, CLASS, EXCEPTION, PROXY }; template < wire_types V > using wire_type_constant = ::std::integral_constant< wire_types, V >; template < typename ConstantType, bool Condition, ConstantType ifTrue, ConstantType ifFalse > struct conditional_constant : ::std::integral_constant<ConstantType, ifTrue> {}; template < typename ConstantType, ConstantType ifTrue, ConstantType ifFalse > struct conditional_constant< ConstantType, false, ifTrue, ifFalse > : ::std::integral_constant<ConstantType, ifFalse> {}; template < typename T > struct is_user_exception : ::std::false_type{}; template < typename T > struct is_proxy : ::std::false_type{}; template < typename T, bool > struct wire_polymorphic_type; template < typename T > struct wire_polymorphic_type< T, true > : conditional_constant<wire_types, is_user_exception<T>::value, EXCEPTION, conditional_constant< wire_types, is_proxy<T>::value, PROXY, CLASS>::value > {}; template < typename T > struct wire_polymorphic_type< T, false > : ::std::integral_constant< wire_types, STRUCT > {}; template < typename T, bool > struct wire_enum_type; template < typename T > struct wire_enum_type< T, true > : ::std::integral_constant< wire_types, SCALAR_VARINT > {}; template < typename T > struct wire_enum_type< T, false > : wire_polymorphic_type< T, ::std::is_polymorphic<T>::value >{}; template < typename T > struct wire_type : wire_enum_type< T, ::std::is_enum<T>::value > {}; //@{ /** @name Fixed wire size types */ template <> struct wire_type<bool> : ::std::integral_constant< wire_types, SCALAR_FIXED > {}; template <> struct wire_type<char> : ::std::integral_constant< wire_types, SCALAR_FIXED > {}; template <> struct wire_type<int8_t> : ::std::integral_constant< wire_types, SCALAR_FIXED > {}; template <> struct wire_type<uint8_t> : ::std::integral_constant< wire_types, SCALAR_FIXED > {}; template <> struct wire_type< float > : ::std::integral_constant< wire_types, SCALAR_FIXED > {}; template <> struct wire_type< double > : ::std::integral_constant< wire_types, SCALAR_FIXED > {}; template <> struct wire_type< long double > : ::std::integral_constant< wire_types, SCALAR_FIXED > {}; template < typename T > struct wire_type< fixed_size< T > > : ::std::integral_constant< wire_types, SCALAR_FIXED > {}; //@} template <> struct wire_type< int16_t > : ::std::integral_constant< wire_types, SCALAR_VARINT > {}; template <> struct wire_type< int32_t > : ::std::integral_constant< wire_types, SCALAR_VARINT > {}; template <> struct wire_type< long > : ::std::integral_constant< wire_types, SCALAR_VARINT > {}; template <> struct wire_type< long long > : ::std::integral_constant< wire_types, SCALAR_VARINT > {}; template <> struct wire_type< uint16_t > : ::std::integral_constant< wire_types, SCALAR_VARINT > {}; template <> struct wire_type< uint32_t > : ::std::integral_constant< wire_types, SCALAR_VARINT > {}; template <> struct wire_type< unsigned long > : ::std::integral_constant< wire_types, SCALAR_VARINT > {}; template <> struct wire_type< unsigned long long > : ::std::integral_constant< wire_types, SCALAR_VARINT > {}; template <> struct wire_type< ::std::string > : ::std::integral_constant< wire_types, SCALAR_WITH_SIZE > {}; template < typename T, ::std::size_t N > struct wire_type< ::std::array< T, N > > : ::std::integral_constant< wire_types, ARRAY_FIXED > {}; template < typename T > struct wire_type< ::std::vector< T > > : ::std::integral_constant< wire_types, ARRAY_VARLEN > {}; template < typename T > struct wire_type< ::std::list< T > > : ::std::integral_constant< wire_types, ARRAY_VARLEN > {}; template < typename T > struct wire_type< ::std::deque< T > > : ::std::integral_constant< wire_types, ARRAY_VARLEN > {}; template < typename T > struct wire_type< ::std::set< T > > : ::std::integral_constant< wire_types, ARRAY_VARLEN > {}; template < typename T > struct wire_type< ::std::multiset< T > > : ::std::integral_constant< wire_types, ARRAY_VARLEN > {}; template < typename T > struct wire_type< ::std::unordered_set< T > > : ::std::integral_constant< wire_types, ARRAY_VARLEN > {}; template < typename T > struct wire_type< ::std::unordered_multiset< T > > : ::std::integral_constant< wire_types, ARRAY_VARLEN > {}; template < typename K, typename V > struct wire_type< ::std::map< K, V > > : ::std::integral_constant< wire_types, DICTIONARY > {}; template < typename K, typename V > struct wire_type< ::std::multimap< K, V > > : ::std::integral_constant< wire_types, DICTIONARY > {}; template < typename K, typename V > struct wire_type< ::std::unordered_map< K, V > > : ::std::integral_constant< wire_types, DICTIONARY > {}; template < typename K, typename V > struct wire_type< ::std::unordered_multimap< K, V > > : ::std::integral_constant< wire_types, DICTIONARY > {}; template < typename T > struct polymorphic_type { using type = typename ::std::decay<T>::type; }; template < typename T > struct polymorphic_type< ::std::shared_ptr< T > > : polymorphic_type< T >{}; template < typename T > struct polymorphic_type< ::std::weak_ptr< T > > : polymorphic_type< T >{}; template < typename T > struct polymorphic_type< ::std::unique_ptr< T > > : polymorphic_type< T >{}; template < typename T > struct wire_type< ::std::shared_ptr<T> > : wire_type< typename polymorphic_type< T >::type > {}; template < typename T > struct wire_type< ::std::weak_ptr<T> > : wire_type< typename polymorphic_type< T >::type > {}; template < typename T > struct wire_type< ::std::unique_ptr<T> > : wire_type< typename polymorphic_type< T >::type > {}; } // namespace detail } // namespace encoding } // namespace wire #endif /* WIRE_DETAIL_WIRE_TRAITS_HPP_ */
Manchester United are confident of wrapping up a stunning £75 million deal for Everton striker Romelu Lukaku before the club leave for their US tour on Sunday, despite Chelsea refusing to throw in the towel. Chelsea believed they were favourites to sign Lukaku but have been left blindsided by United’s dramatic hijacking as Jose Mourinho prepares to be reunited with a player whom he allowed to leave Stamford Bridge three years ago. United have been happy for talk of Lukaku going to Chelsea to gather pace in recent weeks while they worked quietly behind the scenes to secure Mourinho’s first-choice target to replace Zlatan Ibrahimovic. Chelsea, though, are exploring whether there is any scope to launch an 11th-hour bid for Lukaku after being convinced the 24-year-old wanted to move to West London and with United still to formally complete a deal. But having agreed a £75m fee with Everton, United are optimistic that Lukaku will be their player by the time the squad fly to Los Angeles on Sunday for the start of their two-and-a-half-week pre-season tour of America.
package ast func concatBytes(a, b, c []byte) []byte { aLength := len(a) bLength := len(b) cLength := len(c) result := make([]byte, aLength+bLength+cLength+2) for i, byt := range a { result[i] = byt } result[aLength] = byte(' ') for i, byt := range b { result[i+bLength+1] = byt } result[aLength+bLength+1] = byte(' ') for i, byt := range b { result[i+aLength+bLength+2] = byt } return result }
April 7, 2016 FOCUSSING ON her newly released Women and Socialism: Class, Race, and Capitalism, author Sharon Smith brought her national speaking tour to Chicago on the eve of a strike and day of action led by the Chicago Teachers Union. Smith connected the struggle to tax the rich in order to fund the city that working people deserve--an immediate battle that brought at least 15,000 Chicagoans into the streets the following day--with a theoretical perspective that we need just as urgently in order to understand and build the fight for gender justice. The tour couldn't come at a more important time. Abortion access is under attack all over the country, as state governments race to pass cynical legislation that shutters clinics in the name of "protecting women." Chicago's neighbors in Indiana face one of the most sweeping measures yet--its restrictive provisions single out abortion providers for medically unnecessary regulations, including publishing the names and hospital admitting privileges of all abortion providers, while also requiring patients to undergo ultrasounds and banning abortions of a fetus diagnosed with Downs syndrome or other disabilities. Sharon Smith speaks at the Socialism conference (WeAreMany.org) Attacks on transgender people are on the rise, too. As local politicians all over the country pass measures either criminalizing or protecting trans access to public facilities, state lawmakers in North Carolina just struck down local anti-discrimination laws protecting the rights of LGBT people and mandated that trans people must use restrooms that don't conform to their gender identity. SMITH BEGAN by pointing out some of the many ways that daily life is marred by gender, racial and sexual oppression, from low wages and limited access to child care to vicious discrimination. But rampant sexual assault, homophobia and racism don't arise on their own, she argued--they serve a very profitable purpose. Even apparently contradictory stereotypes--like expectations that women be both sexually available and most fulfilled when scrubbing toilets for happy husbands and children--underpin a crucial process of social reproduction, Smith argued. Sexual objectification and the mythical bliss of motherhood are two sides of the same coin. In both cases, women's role is cleanly carved out in service of a presumed heterosexual, subservient destiny, ultimately best fulfilled in a nuclear family. What you can do Catch Sharon Smith's ongoing book tour as she discusses Women and Socialism: Class, Race, and Capitalism: April 12, Chicago, Illinois | April 14, Burlington, Vermont | April 20, Austin, Texas | April 30, Denver, Colorado Smith deftly described how the rise of the family structure we are encouraged to believe is natural and eternal actually emerged quite recently in human history--alongside systematic slavery. Women's unpaid labor inside the nuclear family provided and continues to provide crucial stability as class society has developed. Not only does "women's work" reproduce labor power from day to day and generation to generation, but it provides a basis for the continued inequality between owners and exploiters to be passed on over time, too. The centrality of that process to modern capitalism is quantifiable in the trillions of dollars of yearly, unpaid labor that women provide in the home. Propping up the lie that this work doesn't need to be paid for because it is in women's "nature" means punishing all other gender and sexual identities. Smith credited those who have led decades of struggles against homophobia and transphobia for illuminating the common roots of these oppressions in the role of women in the traditional family. Smith also addressed past and present ways in which racism has painfully shaped Black, Latina and other women of color's experiences of oppression, from widespread sterilization abuse to the use of sexual violence to both inflict and justify white-supremacist terror. It's an additional crime, Smith noted, that some of the sharpest voices and organized efforts to contest these conditions have been written out of conventional histories of feminist struggle. Barbara Smith, a luminary of Black feminist thought active since the 1960s, deserves to be at least as much of a household name as Betty Friedan, Smith argued. Since the struggles of the 1960s and '70s, capitalism has bolstered its unjust order by attacking and distorting the gains of social movements. Today, we face the greatest income inequality the United States has seen in 100 years, made possible in part by attacking women's rights and living standards. But Smith painted an inspiring picture of what the alternative could look like, recounting how the Russian Revolution of 1917 granted legal freedoms to women and took steps to collectivize the drudgery most women face in private households. Such measures were unheard of in their time--and are still unmatched in the most developed capitalist countries today. THE PRESENTATION opened up a wide-ranging discussion among teachers, midwives, students and activists who attended the forum. People spoke about how Smith's framework brought to light the limits of Hillary Clinton's supposed feminism, which is dedicated to promoting the racist, sexist and war-hungry needs of the wealthy elite. Others questioned how to bring up feminist perspectives and demands in rising movements today, many of which are being initiated and led by women--from Idle No More, to Black Lives Matter and the Fight for 15. At the same time, grassroots organizing focused primarily on feminist politics remains small and scattered, hemmed in on one side by liberal organizations tied to Democrats like Clinton and on the other by pessimism about the ability to unite in multiracial, multigender and intersectional solidarity. We were lucky to have an example to look forward to of exactly what solidarity can look like: The following day's citywide actions were anchored by Chicago teachers, a majority of whom are women, and whose union is led by President Karen Lewis. The Chicago Tribune resorted to slander, referring to the strike as "tantrum day," desperately hoping to shame teachers into crossing picket lines. But the CTU had already built critical momentum in collaboration with parents, public-sector workers, fast-food workers, and public universities facing closure due to Gov. Bruce Rauner's insistence on a budget that will gut services across the board. Women and women of color make up disproportionate numbers of affected workers, and the day's agenda explicitly linked the fight for funding with the fight for justice against racism and police brutality. United action like what unfolded in Chicago on April 1 needs to grow and multiply in cities all over the county. And while the fight to stem the tide of attacks on women's rights remains much smaller, we need the clarity and insight that Smith's book and speaking tour offer to help keep our eyes on the prize.
Closing the gastrin loop in pancreatic carcinoma Alpha‐amidated gastrin promotes the growth of nontransfected pancreatic cell lines expressing the gastrin/cholecystokinin (CCK)‐B receptor. Gastrin/CCK‐B and CCK‐A receptors recently were demonstrated in human pancreatic adenocarcinomas, but to the authors' knowledge expression of their ligands to date have not been adequately investigated. As a prerequisite for making suggestions regarding local growth stimulation, the authors examined whether gastrin and the homologous CCK peptides as well as their specific receptors were expressed in consecutively collected solid human pancreatic adenocarcinomas.
/** * Create an SVG Text object. * @param doc the document to create the element * @param x the start x position * @param y the start y position * @param str the string * @return the new text element */ public static Element createText(Document doc, float x, float y, String str) { Element textGraph = doc.createElementNS(SVG_NS, "text"); textGraph.setAttributeNS(null, "x", "" + x); textGraph.setAttributeNS(null, "y", "" + y); org.w3c.dom.Text text = doc.createTextNode(str); textGraph.appendChild(text); return textGraph; }
/* * AnmUnitNodeList.cpp * * Created on: 28/07/2014 * Author: jestrada */ #include "AnmUnitNodeList.h" #include <string> #include "../PELE/PeleTasks/Output/LogUtils.h" #include "ModesCalculator/Internals/CoarseGrainModel/UnitsBuilder.h" #include "../Tools/Utils.h" #include "ModesCalculator/Internals/CoarseGrainModel/Unit.h" using namespace std; AnmUnitNodeList::AnmUnitNodeList(){ } AnmUnitNodeList::~AnmUnitNodeList(){ Utils::clearVector(this->nodeList); } std::vector<Unit*> AnmUnitNodeList::getNodeList() const { return this->nodeList; } void AnmUnitNodeList::centerAtCOM(){ //TODO: IMPLEMENT! cout<<"***ATENTION****"<<endl<<"AnmUnitNodeList::centerAtCOM must be implemented"<<endl<<"*********"<<endl; } void AnmUnitNodeList::setNodeList(vector<Unit*>& units) { this->nodeList = units; } void AnmUnitNodeList::updateUnitList(){ cout<<"DBG: Updating CG Model"<<endl; for(unsigned int i = 0; i< nodeList.size();++i){ nodeList[i]->update(); } } string AnmUnitNodeList::showNodeList(){ return LogUtils::showUnitLabels(this->nodeList); } unsigned int AnmUnitNodeList::size(){ return this->nodeList.size(); }
Operation Supply Drop’s third annual fundraising event, the “8-Bit Salute”, will take place May 17 – 18, 2014 and gamers can play all day and night to help raise funds to send video game equipment and titles to US troops deployed to combat zones or recovering in military hospitals. Naughty Dog, Wargaming America, Rooster Teeth and ASTRO Gaming will be part of the charity event. The developers referenced have pledged significant support and will field fundraising teams via the Operation Supply Drop website. Those who wish to participate can log into the Operation Supply Drop website to either join the current teams or create teams of their own to raise funds for the troops, while having a chance to win a variety of prizes. More than a dozen additional video game developers and publishers will be fielding fund-raising teams, supplying donations and video games. Founded in 2010 by Army Airborne Ranger, Captain Stephen “Shanghai Six” Machuga, Operation Supply Drop is a public charity that sends video game themed care packages to US military troops both deployed to combat zones and recovering in military hospitals. To date, Operation Supply Drop has raised more than $700,000 in games and gear and has helped approximately 2,500 servicemen and women worldwide.
#include<bits/stdc++.h> #define REP(i,a,b) for(int i=a;i<=b;i++) #define MS0(a) memset(a,0,sizeof(a)) #define PII pair<int,int> using namespace std; typedef long long ll; const int maxn=3100; const int INF=1e9+10; int n,m; int u,v; vector<int> G[maxn]; int d[maxn][maxn]; int from[maxn][3],to[maxn][3]; bool vis[maxn]; void bfs(int s) { queue<int> q; MS0(vis); q.push(s);vis[s]=1; d[s][s]=0; while(!q.empty()){ int u=q.front();q.pop(); for(int i=0;i<G[u].size();i++){ int v=G[u][i]; if(vis[v]) continue; q.push(v);vis[v]=1; d[s][v]=d[s][u]+1; } } } void Init() { REP(u,1,n) bfs(u); REP(u,1,n){ vector<PII> dt; REP(v,1,n) dt.push_back({d[u][v],v}); sort(dt.begin(),dt.end()); int sz=dt.size(); REP(i,0,2) from[u][i]=dt[sz-1-i].second; } REP(v,1,n){ vector<PII> dt; REP(u,1,n) dt.push_back({d[u][v],u}); sort(dt.begin(),dt.end()); int sz=dt.size(); REP(i,0,2) to[v][i]=dt[sz-1-i].second; } } void solve() { int res=0,A=0,D=0,B=0,C=0; REP(b,1,n){ REP(c,1,n){ if(b==c||d[b][c]==0) continue; REP(i,0,2){ int a=to[b][i]; if(a==c||a==b||d[a][b]==0) continue; REP(j,0,2){ int e=from[c][j]; if(e==a||e==b||e==c||d[c][e]==0) continue; int tmp=d[a][b]+d[b][c]+d[c][e]; if(tmp>res){ A=a;B=b;C=c;D=e; res=tmp; } } } } } printf("%d %d %d %d\n",A,B,C,D); //cout<<"res="<<res<<endl; } int main() { //freopen("in.txt","r",stdin); while(cin>>n>>m){ REP(i,1,n) REP(j,1,n) d[i][j]=0; REP(i,1,n) G[i].clear(); REP(i,1,m){ scanf("%d%d",&u,&v); if(d[u][v]) continue; d[u][v]=1; G[u].push_back(v); } Init(); solve(); } return 0; }
JALAWLA, Iraq (Reuters) - It was a tip-off about a weapons cache that drew the U.S. soldiers of Charlie Troop away from their Stryker armored vehicles in the densely populated Iraqi town of Jalawla one Friday morning last month. A U.S. Army soldier looks through the scope of his rifle during a patrol in the town of Jalawla, northeast of Baghdad, June 28, 2010. REUTERS/Saad Shalash That was when the suicide bomber struck, detonating a car bomb so “catastrophic” that details of the attack that killed Sergeant Israel O’Bryan and Specialist William Yauch are still hazy, their commanding officer said. One thing was clear: the insurgency in Jalawla won’t lie down. Like other towns across Iraq’s restive northern provinces of Diyala, Kirkuk and Nineveh, Jalawla defies the U.S. narrative of an end to combat operations next month under a plan to pull out of Iraq completely by the end of 2011. “I would say we’re pretty far from rolling up the insurgency in Jalawla,” said Charlie Troop commander Captain Mark Adams of the 1st Squadron, 14th U.S. Cavalry. “I don’t feel we’ve made a whole lot of progress there.” For the ethnically and religiously-mixed arc running from Jalawla near Iraq’s eastern border with Iran to the western frontier with Syria, the transition on August 31 is less a milestone than a matter of semantics. Operations that to outsiders will look pretty much like combat will continue in areas where a stubborn Sunni Islamist insurgency remains entrenched, despite a sharp fall in overall violence since the height of the sectarian slaughter in 2006/07. They will, however, be called “stability operations,” loosely defined as advising, assisting, training and equipping Iraqi forces — a role U.S. forces have had for some time. U.S. troops will “continue to conduct partnered counter terrorism operations to maintain pressure on extremist terrorist networks,” said chief spokesman Major General Stephen Lanza. U.S. troop numbers will fall to 50,000 on September 1 from around 77,000 now. Bases are closing, hardware going to Afghanistan and units flying home without replacement. In disputed territories adjacent to Iraq’s semi-autonomous Kurdistan region, where Arabs and Kurds wrestle over land and power, insurgent cells have regrouped after being driven out of much of Iraq’s Sunni heartland. Here, U.S. soldiers will still occasionally shoot, and be shot at after September 1. Al Qaeda “is down but not out,” said U.S. forces Division North commander Major General Tony Cucolo. “We take down a cell, but on a smaller, less capable level it re-forms.” The threat “can’t be handled” by Iraqi Security Forces “as they are,” he said on a Blackhawk helicopter flight over Diyala. PLAYING SECOND FIDDLE The response to the Jalawla attack on June 11 provides a snapshot of the challenges and frustrations that confront U.S. forces often playing second fiddle to their Iraqi counterparts. While U.S. special forces successfully hunted down at least one suspected insurgent, Iraqi police failed to turn up for a 6 a.m. (0300 GMT) roadside rendezvous on the last day of a two-week search operation across Jalawla. They began without U.S. support and found nothing. “We’re supposed to clear the whole town, but they never find anything,” said Lieutenant Jan Dudzinski, 26, seeking shade in the desert as his platoon provided a “cordon” for the operation named Jalawla Peacemaker. Trust between the two forces is low. “The planning, the way they do it, doesn’t work,” said Sergeant Jeremy Hare, a 32-year-old veteran of four Iraq tours. “They get bored of it and don’t clear as well.” As other bases close, Forward Operating Base Cobra in Diyala will remain at the same strength beyond Sept 1. U.S. soldiers will continue to man checkpoints with Iraqi army and Kurdish Peshmerga forces, an exercise in cooperation which some observers say might not survive a U.S. departure. Slideshow (3 Images) A recent spike in violence, with mortar rounds lobbed at FOB Cobra and nearby Checkpoint Three, had reinforced the need for a robust U.S. presence, said Major Robert Halvorson, who drafted the military’s report into the Jalawla attack. The insurgents were perhaps trying to exploit a political paralysis in the capital, where Sunni, Shi’ite and Kurdish political factions have yet to form a government almost four months after an election, officers said. “By all their activity here they’re actually drawing us here,” said Halvorson, “and this is where we’re going to fight them so people don’t have to fight them in Baghdad.”
<filename>1237.Find-Positive-Integer-Solution-for-a-Given-Equation.py<gh_stars>10-100 # https://leetcode.com/problems/find-positive-integer-solution-for-a-given-equation/ # Easy (64.08%) # Total Accepted: 4,134 # Total Submissions: 6,451 # beats 100.0 % of python submissions """ This is the custom function interface. You should not implement it, or speculate about its implementation class CustomFunction: # Returns f(x, y) for any given positive integers x and y. # Note that f(x, y) is increasing with respect to both x and y. # i.e. f(x, y) < f(x + 1, y), f(x, y) < f(x, y + 1) def f(self, x, y): """ class Solution(object): def findSolution(self, customfunction, z): """ :type num: int :type z: int :rtype: List[List[int]] """ res = [] i, j = 1, 1000 while 1 <= i <= 1000 and 1 <= j <= 1000: tmp = customfunction.f(i, j) if tmp == z: res.append([i, j]) i += 1 elif tmp > z: j -= 1 else: i += 1 return res
The Knicks' overall 3-point percentage as a team this season wouldn't indicate they have the best long-range gunner in the game. Currently, the team's 33.1 percent accuracy from downtown is the eighth-worst in the NBA. But Steve Novak has basically single-handedly put the Knicks in a tie for fifth place for most 3-pointers made (488). Novak is 125-for-268, and those makes equal 25.6 percent of the total team amount. Carmelo Anthony is second on the team with 65 (33.2 percent accuracy), and J.R. Smith proceeds him with 62 (34.8 percent accuracy). As for Novak? Currently 46.6 percent -- the best in the league. In fact, he's within striking distance of the best 3-point shooting season ever (with a minimum of 250 3-pointers attempted). Former Knick Hubert Davis is third on the all-time list. Here's a breakdown: Highest 3-Point Percentage In A Single Season All-Time* 47.8 (162-for-339) -- Dale Ellis (SuperSonics; 1988-89) 47.8 (177-for-370) -- Joe Johnson (Hawks; 2004-05) 47.6 (127-for-267) -- Hubert Davis (Knicks; 1995-96) 47.0 (179-for-381) -- Steve Nash (Suns; 2007-08) 47.0 (207-for440) -- Glen Rice (Hornets; 1996-97) *Minimum of 250 3-point attempts Since Novak's breakout game on February 6, when he was 5-for-8 from beyond the arc in a 99-88 win over the Jazz, he's averaged 2.9 makes out of 6.2 3-point attempts in only 21.4 minutes per game. That means per 48 minutes, Novak is on place to finish with the most 3-pointers made in a single season ever (with a minimum of 100 made). Smith is currently first on the all-time list. Here's a breakdown: Most 3-Pointers Per 48 Minutes In A Single Season* 5.3 -- J.R. Smith (Nuggets; 2007-08) 4.9 -- Eddie House (Celtics; 2008-09) 4.9 -- Steve Novak (Clippers; 2008-09) 4.9 -- J.R. Smith (Nuggets; 2006-07) 4.8 -- Dennis Scott (Magic; 1994-95) *Minimum of 100 3-pointers made What's most impressive about Novak is that he's money from all across the perimeter. The one area where he's slightly higher is from the right wing to the baseline corner. To illustrate that, here's his ESPN "Zone Chart" (entering Friday's play): ESPN Stats & Info Against the Cavaliers, he was 5-for-12 from 3-point territory. So on the season up until this point, he's 61-for-120 from the right side, 23-for-55 from the top of the key and 41-for-94 from the left side. Whereas most players say the shortened season has posed challenges, interestingly Novak told ESPNNewYork.com recently that the condensed schedule has benefited him as a shooter. "I think, to be honest, and it may seem funny, but the amount of games that we're playing I feel like helps," Novak said. "Because you never go that long of a stretch without getting up or down about a great night or a down night. In like two hours, it's kind of like so what? You've got to do it again tomorrow. "It's a what-have-you-done-for-me lately kind of thing. It's just kind of nice to know that if you miss a couple or it doesn't go your way one night, you get to play right away. So I feel like it's kind of easier to keep that groove because mentally it's easy too." Of course, Novak's accuracy has a lot to do with his preparation, which is basically the same every practice and game. After practices, he works his way around the perimeter several times with an assistant coach, getting extra shots in. Then, about an hour before tip-off, he practices different shooting routines -- some standstill, some off of screens, some running into his set-up and others from five feet behind the 3-point line. Novak always looks balanced upon lift-off and release, which is a credit to his footwork, and during games he utilizes a timely pump fake to create space when the defender is closing in. In addition to his technique, Novak said his confidence is higher than it's ever been in his career. Every day, Novak estimates that he puts up between 500 and 1,000 shots -- something he's been doing ever since he was a kid growing up in Wisconsin. Novak didn't stop then and he hasn't stopped now. You can follow Jared Zwerling on Twitter.
package com.kavanj.marioai.nodes; /** * Created by joe on 2015-05-12. */ public class StaticNode implements Node { public static final StaticNode TRUE_NODE = new StaticNode(true); public static final StaticNode FALSE_NODE = new StaticNode(false); private final boolean returnValue; public StaticNode(boolean returnValue) { this.returnValue = returnValue; } @Override public boolean run() { return returnValue; } }
// Copyright 2016 PingCAP, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // See the License for the specific language governing permissions and // limitations under the License. //! Scheduler which schedules the execution of `storage::Command`s. //! //! There is one scheduler for each store. It receives commands from clients, executes them against //! the MVCC layer storage engine. //! //! Logically, the data organization hierarchy from bottom to top is row -> region -> store -> //! database. But each region is replicated onto N stores for reliability, the replicas form a Raft //! group, one of which acts as the leader. When the client read or write a row, the command is //! sent to the scheduler which is on the region leader's store. //! //! Scheduler runs in a single-thread event loop, but command executions are delegated to a pool of //! worker thread. //! //! Scheduler keeps track of all the running commands and uses latches to ensure serialized access //! to the overlapping rows involved in concurrent commands. But note that scheduler only ensures //! serialized access to the overlapping rows at command level, but a transaction may consist of //! multiple commands, therefore conflicts may happen at transaction level. Transaction semantics //! is ensured by the transaction protocol implemented in the client library, which is transparent //! to the scheduler. use std::boxed::Box; use std::fmt::{self, Formatter, Debug}; use threadpool::ThreadPool; use prometheus::HistogramTimer; use storage::{Engine, Command, Snapshot, StorageCb, Result as StorageResult, Error as StorageError, ScanMode}; use kvproto::kvrpcpb::{Context, LockInfo}; use storage::mvcc::{MvccTxn, MvccReader, Error as MvccError, MAX_TXN_WRITE_SIZE}; use storage::{Key, Value, KvPair}; use storage::engine::CbContext; use std::collections::HashMap; use mio::{self, EventLoop}; use util::transport::SendCh; use storage::engine::{Result as EngineResult, Callback as EngineCallback, Modify}; use super::Result; use super::Error; use super::store::SnapshotStore; use super::latch::{Latches, Lock}; use super::super::metrics::*; // TODO: make it configurable. pub const GC_BATCH_SIZE: usize = 512; pub const RESOLVE_LOCK_BATCH_SIZE: usize = 512; /// Process result of a command. pub enum ProcessResult { Res, MultiRes { results: Vec<StorageResult<()>> }, MultiKvpairs { pairs: Vec<StorageResult<KvPair>> }, Value { value: Option<Value> }, Locks { locks: Vec<LockInfo> }, NextCommand { cmd: Command }, Failed { err: StorageError }, } /// Message types for the scheduler event loop. pub enum Msg { Quit, RawCmd { cmd: Command, cb: StorageCb }, SnapshotFinished { cid: u64, cb_ctx: CbContext, snapshot: EngineResult<Box<Snapshot>>, }, ReadFinished { cid: u64, pr: ProcessResult }, WritePrepareFinished { cid: u64, cmd: Command, pr: ProcessResult, to_be_write: Vec<Modify>, }, WritePrepareFailed { cid: u64, err: Error }, WriteFinished { cid: u64, pr: ProcessResult, cb_ctx: CbContext, result: EngineResult<()>, }, } /// Debug for messages. impl Debug for Msg { fn fmt(&self, f: &mut Formatter) -> fmt::Result { match *self { Msg::Quit => write!(f, "Quit"), Msg::RawCmd { ref cmd, .. } => write!(f, "RawCmd {:?}", cmd), Msg::SnapshotFinished { cid, .. } => write!(f, "SnapshotFinished [cid={}]", cid), Msg::ReadFinished { cid, .. } => write!(f, "ReadFinished [cid={}]", cid), Msg::WritePrepareFinished { cid, ref cmd, .. } => { write!(f, "WritePrepareFinished [cid={}, cmd={:?}]", cid, cmd) } Msg::WritePrepareFailed { cid, ref err } => { write!(f, "WritePrepareFailed [cid={}, err={:?}]", cid, err) } Msg::WriteFinished { cid, .. } => write!(f, "WriteFinished [cid={}]", cid), } } } /// Delivers the process result of a command to the storage callback. fn execute_callback(callback: StorageCb, pr: ProcessResult) { match callback { StorageCb::Boolean(cb) => { match pr { ProcessResult::Res => cb(Ok(())), ProcessResult::Failed { err } => cb(Err(err)), _ => panic!("process result mismatch"), } } StorageCb::Booleans(cb) => { match pr { ProcessResult::MultiRes { results } => cb(Ok(results)), ProcessResult::Failed { err } => cb(Err(err)), _ => panic!("process result mismatch"), } } StorageCb::SingleValue(cb) => { match pr { ProcessResult::Value { value } => cb(Ok(value)), ProcessResult::Failed { err } => cb(Err(err)), _ => panic!("process result mismatch"), } } StorageCb::KvPairs(cb) => { match pr { ProcessResult::MultiKvpairs { pairs } => cb(Ok(pairs)), ProcessResult::Failed { err } => cb(Err(err)), _ => panic!("process result mismatch"), } } StorageCb::Locks(cb) => { match pr { ProcessResult::Locks { locks } => cb(Ok(locks)), ProcessResult::Failed { err } => cb(Err(err)), _ => panic!("process result mismatch"), } } } } /// Context for a running command. pub struct RunningCtx { cid: u64, cmd: Option<Command>, lock: Lock, callback: Option<StorageCb>, tag: &'static str, latch_timer: Option<HistogramTimer>, _timer: HistogramTimer, } impl RunningCtx { /// Creates a context for a running command. pub fn new(cid: u64, cmd: Command, lock: Lock, cb: StorageCb) -> RunningCtx { let tag = cmd.tag(); RunningCtx { cid: cid, cmd: Some(cmd), lock: lock, callback: Some(cb), tag: tag, latch_timer: Some(SCHED_LATCH_HISTOGRAM_VEC.with_label_values(&[tag]).start_timer()), _timer: SCHED_HISTOGRAM_VEC.with_label_values(&[tag]).start_timer(), } } } /// Creates a callback to receive async results of write prepare from the storage engine. fn make_engine_cb(cid: u64, pr: ProcessResult, ch: SendCh<Msg>) -> EngineCallback<()> { Box::new(move |(cb_ctx, result)| { if let Err(e) = ch.send(Msg::WriteFinished { cid: cid, pr: pr, cb_ctx: cb_ctx, result: result, }) { panic!("send write finished to scheduler failed cid={}, err:{:?}", cid, e); } }) } /// Scheduler which schedules the execution of `storage::Command`s. pub struct Scheduler { engine: Box<Engine>, // cid -> context cmd_ctxs: HashMap<u64, RunningCtx>, schedch: SendCh<Msg>, // cmd id generator id_alloc: u64, // write concurrency control latches: Latches, sched_too_busy_threshold: usize, // worker pool worker_pool: ThreadPool, } impl Scheduler { /// Creates a scheduler. pub fn new(engine: Box<Engine>, schedch: SendCh<Msg>, concurrency: usize, worker_pool_size: usize, sched_too_busy_threshold: usize) -> Scheduler { Scheduler { engine: engine, cmd_ctxs: HashMap::new(), schedch: schedch, id_alloc: 0, latches: Latches::new(concurrency), sched_too_busy_threshold: sched_too_busy_threshold, worker_pool: ThreadPool::new_with_name(thd_name!("sched-worker-pool"), worker_pool_size), } } } /// Processes a read command within a worker thread, then posts `ReadFinished` message back to the /// event loop. fn process_read(cid: u64, mut cmd: Command, ch: SendCh<Msg>, snapshot: Box<Snapshot>) { debug!("process read cmd(cid={}) in worker pool.", cid); SCHED_WORKER_COUNTER_VEC.with_label_values(&[cmd.tag(), "read"]).inc(); let pr = match cmd { // Gets from the snapshot. Command::Get { ref key, start_ts, .. } => { let snap_store = SnapshotStore::new(snapshot.as_ref(), start_ts); let res = snap_store.get(key); match res { Ok(val) => ProcessResult::Value { value: val }, Err(e) => ProcessResult::Failed { err: StorageError::from(e) }, } } // Batch gets from the snapshot. Command::BatchGet { ref keys, start_ts, .. } => { let snap_store = SnapshotStore::new(snapshot.as_ref(), start_ts); match snap_store.batch_get(keys) { Ok(results) => { let mut res = vec![]; for (k, v) in keys.into_iter().zip(results) { match v { Ok(Some(x)) => res.push(Ok((k.raw().unwrap(), x))), Ok(None) => {} Err(e) => res.push(Err(StorageError::from(e))), } } ProcessResult::MultiKvpairs { pairs: res } } Err(e) => ProcessResult::Failed { err: StorageError::from(e) }, } } // Scans a range starting with `start_key` up to `limit` rows from the snapshot. Command::Scan { ref start_key, limit, start_ts, ref options, .. } => { let snap_store = SnapshotStore::new(snapshot.as_ref(), start_ts); let res = snap_store.scanner(ScanMode::Forward, options.key_only, None) .and_then(|mut scanner| scanner.scan(start_key.clone(), limit)) .and_then(|mut results| { Ok(results.drain(..).map(|x| x.map_err(StorageError::from)).collect()) }); match res { Ok(pairs) => ProcessResult::MultiKvpairs { pairs: pairs }, Err(e) => ProcessResult::Failed { err: e.into() }, } } // Scans locks with timestamp <= `max_ts` Command::ScanLock { max_ts, .. } => { let mut reader = MvccReader::new(snapshot.as_ref(), Some(ScanMode::Forward), true, None); let res = reader.scan_lock(None, |lock| lock.ts <= max_ts, None) .map_err(Error::from) .and_then(|(v, _)| { let mut locks = vec![]; for (key, lock) in v { let mut lock_info = LockInfo::new(); lock_info.set_primary_lock(lock.primary); lock_info.set_lock_version(lock.ts); lock_info.set_key(try!(key.raw())); locks.push(lock_info); } Ok(locks) }); match res { Ok(locks) => ProcessResult::Locks { locks: locks }, Err(e) => ProcessResult::Failed { err: e.into() }, } } // Scan the locks with timestamp `start_ts`, then either commit them if the command has // commit timestamp populated or rollback otherwise. Command::ResolveLock { ref ctx, start_ts, commit_ts, ref mut scan_key, .. } => { let mut reader = MvccReader::new(snapshot.as_ref(), Some(ScanMode::Forward), true, None); let res = reader.scan_lock(scan_key.take(), |lock| lock.ts == start_ts, Some(RESOLVE_LOCK_BATCH_SIZE)) .map_err(Error::from) .and_then(|(v, next_scan_key)| { let keys: Vec<Key> = v.into_iter().map(|x| x.0).collect(); if keys.is_empty() { Ok(None) } else { Ok(Some(Command::ResolveLock { ctx: ctx.clone(), start_ts: start_ts, commit_ts: commit_ts, scan_key: next_scan_key, keys: keys, })) } }); match res { Ok(Some(cmd)) => ProcessResult::NextCommand { cmd: cmd }, Ok(None) => ProcessResult::Res, Err(e) => ProcessResult::Failed { err: e.into() }, } } // Collects garbage. Command::Gc { ref ctx, safe_point, ref mut scan_key, .. } => { let mut reader = MvccReader::new(snapshot.as_ref(), Some(ScanMode::Forward), true, None); let res = reader.scan_keys(scan_key.take(), GC_BATCH_SIZE) .map_err(Error::from) .and_then(|(keys, next_start)| { if keys.is_empty() { Ok(None) } else { Ok(Some(Command::Gc { ctx: ctx.clone(), safe_point: safe_point, scan_key: next_start, keys: keys, })) } }); match res { Ok(Some(cmd)) => ProcessResult::NextCommand { cmd: cmd }, Ok(None) => ProcessResult::Res, Err(e) => ProcessResult::Failed { err: e.into() }, } } Command::RawGet { ref key, .. } => { match snapshot.get(key) { Ok(val) => ProcessResult::Value { value: val }, Err(e) => ProcessResult::Failed { err: StorageError::from(e) }, } } _ => panic!("unsupported read command"), }; if let Err(e) = ch.send(Msg::ReadFinished { cid: cid, pr: pr }) { // Todo: if this happens we need to clean up command's context panic!("send read finished failed, cid={}, err={:?}", cid, e); } } /// Processes a write command within a worker thread, then posts either a `WritePrepareFinished` /// message if successful or a `WritePrepareFailed` message back to the event loop. fn process_write(cid: u64, cmd: Command, ch: SendCh<Msg>, snapshot: Box<Snapshot>) { SCHED_WORKER_COUNTER_VEC.with_label_values(&[cmd.tag(), "write"]).inc(); if let Err(e) = process_write_impl(cid, cmd, ch.clone(), snapshot.as_ref()) { if let Err(err) = ch.send(Msg::WritePrepareFailed { cid: cid, err: e }) { // Todo: if this happens, lock will hold for ever panic!("send WritePrepareFailed message to channel failed. cid={}, err={:?}", cid, err); } } } fn process_write_impl(cid: u64, mut cmd: Command, ch: SendCh<Msg>, snapshot: &Snapshot) -> Result<()> { let (pr, modifies) = match cmd { Command::Prewrite { ref mutations, ref primary, start_ts, ref options, .. } => { let mut txn = MvccTxn::new(snapshot, start_ts, None); let mut results = vec![]; for m in mutations { match txn.prewrite(m.clone(), primary, options) { Ok(_) => results.push(Ok(())), e @ Err(MvccError::KeyIsLocked { .. }) => results.push(e.map_err(Error::from)), Err(e) => return Err(Error::from(e)), } } let res = results.drain(..).map(|x| x.map_err(StorageError::from)).collect(); let pr = ProcessResult::MultiRes { results: res }; (pr, txn.modifies()) } Command::Commit { ref keys, lock_ts, commit_ts, .. } => { let mut txn = MvccTxn::new(snapshot, lock_ts, None); for k in keys { try!(txn.commit(&k, commit_ts)); } let pr = ProcessResult::Res; (pr, txn.modifies()) } Command::Cleanup { ref key, start_ts, .. } => { let mut txn = MvccTxn::new(snapshot, start_ts, None); try!(txn.rollback(&key)); let pr = ProcessResult::Res; (pr, txn.modifies()) } Command::Rollback { ref keys, start_ts, .. } => { let mut txn = MvccTxn::new(snapshot, start_ts, None); for k in keys { try!(txn.rollback(&k)); } let pr = ProcessResult::Res; (pr, txn.modifies()) } Command::ResolveLock { ref ctx, start_ts, commit_ts, ref mut scan_key, ref keys } => { let mut scan_key = scan_key.take(); let mut txn = MvccTxn::new(snapshot, start_ts, None); for k in keys { match commit_ts { Some(ts) => try!(txn.commit(&k, ts)), None => try!(txn.rollback(&k)), } if txn.write_size() >= MAX_TXN_WRITE_SIZE { scan_key = Some(k.to_owned()); break; } } if scan_key.is_none() { (ProcessResult::Res, txn.modifies()) } else { let pr = ProcessResult::NextCommand { cmd: Command::ResolveLock { ctx: ctx.clone(), start_ts: start_ts, commit_ts: commit_ts, scan_key: scan_key.take(), keys: vec![], }, }; (pr, txn.modifies()) } } Command::Gc { ref ctx, safe_point, ref mut scan_key, ref keys } => { let mut scan_key = scan_key.take(); let mut txn = MvccTxn::new(snapshot, 0, Some(ScanMode::Mixed)); for k in keys { try!(txn.gc(k, safe_point)); if txn.write_size() >= MAX_TXN_WRITE_SIZE { scan_key = Some(k.to_owned()); break; } } if scan_key.is_none() { (ProcessResult::Res, txn.modifies()) } else { let pr = ProcessResult::NextCommand { cmd: Command::Gc { ctx: ctx.clone(), safe_point: safe_point, scan_key: scan_key.take(), keys: vec![], }, }; (pr, txn.modifies()) } } _ => panic!("unsupported write command"), }; box_try!(ch.send(Msg::WritePrepareFinished { cid: cid, cmd: cmd, pr: pr, to_be_write: modifies, })); Ok(()) } impl Scheduler { /// Generates the next command ID. fn gen_id(&mut self) -> u64 { self.id_alloc += 1; self.id_alloc } fn insert_ctx(&mut self, ctx: RunningCtx) { let cid = ctx.cid; if self.cmd_ctxs.insert(cid, ctx).is_some() { panic!("command cid={} shouldn't exist", cid); } SCHED_CONTEX_GAUGE.set(self.cmd_ctxs.len() as f64); } fn remove_ctx(&mut self, cid: u64) -> RunningCtx { let ctx = self.cmd_ctxs.remove(&cid).unwrap(); assert_eq!(ctx.cid, cid); SCHED_CONTEX_GAUGE.set(self.cmd_ctxs.len() as f64); ctx } fn get_ctx_tag(&self, cid: u64) -> &'static str { let ctx = &self.cmd_ctxs[&cid]; ctx.tag } /// Generates the lock for a command. /// /// Basically, read-only commands require no latches, write commands require latches hashed /// by the referenced keys. fn gen_lock(&self, cmd: &Command) -> Lock { match *cmd { Command::Prewrite { ref mutations, .. } => { let keys: Vec<&Key> = mutations.iter().map(|x| x.key()).collect(); self.latches.gen_lock(&keys) } Command::Commit { ref keys, .. } | Command::Rollback { ref keys, .. } => self.latches.gen_lock(keys), Command::Cleanup { ref key, .. } => self.latches.gen_lock(&[key]), _ => Lock::new(vec![]), } } /// Delivers a command to a worker thread for processing. fn process_by_worker(&mut self, cid: u64, cb_ctx: CbContext, snapshot: Box<Snapshot>) { SCHED_STAGE_COUNTER_VEC.with_label_values(&[self.get_ctx_tag(cid), "process"]).inc(); debug!("process cmd with snapshot, cid={}", cid); let mut cmd = { let ctx = &mut self.cmd_ctxs.get_mut(&cid).unwrap(); assert_eq!(ctx.cid, cid); ctx.cmd.take().unwrap() }; if let Some(term) = cb_ctx.term { cmd.mut_context().set_term(term); } let ch = self.schedch.clone(); let readcmd = cmd.readonly(); if readcmd { self.worker_pool.execute(move || process_read(cid, cmd, ch, snapshot)); } else { self.worker_pool.execute(move || process_write(cid, cmd, ch, snapshot)); } } /// Calls the callback with an error. fn finish_with_err(&mut self, cid: u64, err: Error) { debug!("command cid={}, finished with error", cid); SCHED_STAGE_COUNTER_VEC.with_label_values(&[self.get_ctx_tag(cid), "error"]).inc(); let mut ctx = self.remove_ctx(cid); let cb = ctx.callback.take().unwrap(); let pr = ProcessResult::Failed { err: StorageError::from(err) }; execute_callback(cb, pr); self.release_lock(&ctx.lock, cid); } /// Extracts the context of a command. fn extract_context(&self, cid: u64) -> &Context { let ctx = &self.cmd_ctxs[&cid]; assert_eq!(ctx.cid, cid); ctx.cmd.as_ref().unwrap().get_context() } /// Event handler for new command. /// /// This method will try to acquire all the necessary latches. If all the necessary latches are /// acquired, the method initiates a get snapshot operation for furthur processing; otherwise, /// the method adds the command to the waiting queue(s). The command will be handled later in /// `lock_and_get_snapshot` when its turn comes. /// /// Note that once a command is ready to execute, the snapshot is always up-to-date during the /// execution because 1) all the conflicting commands (if any) must be in the waiting queues; /// 2) there may be non-conflicitng commands running concurrently, but it doesn't matter. fn schedule_command(&mut self, cmd: Command, callback: StorageCb) { SCHED_STAGE_COUNTER_VEC.with_label_values(&[cmd.tag(), "new"]).inc(); let cid = self.gen_id(); debug!("received new command, cid={}, cmd={}", cid, cmd); let lock = self.gen_lock(&cmd); let ctx = RunningCtx::new(cid, cmd, lock, callback); self.insert_ctx(ctx); self.lock_and_get_snapshot(cid); } fn too_busy(&self) -> bool { self.cmd_ctxs.len() >= self.sched_too_busy_threshold } fn on_receive_new_cmd(&mut self, cmd: Command, callback: StorageCb) { // write flow control if !cmd.readonly() && self.too_busy() { execute_callback(callback, ProcessResult::Failed { err: StorageError::SchedTooBusy }); } else { self.schedule_command(cmd, callback); } } /// Tries to acquire all the required latches for a command. /// /// Returns true if successful; returns false otherwise. fn acquire_lock(&mut self, cid: u64) -> bool { let mut ctx = &mut self.cmd_ctxs.get_mut(&cid).unwrap(); assert_eq!(ctx.cid, cid); let ok = self.latches.acquire(&mut ctx.lock, cid); if ok { ctx.latch_timer.take(); } ok } /// Initiates an async operation to get a snapshot from the storage engine, then posts a /// `SnapshotFinished` message back to the event loop when it finishes. fn get_snapshot(&mut self, cid: u64) { SCHED_STAGE_COUNTER_VEC.with_label_values(&[self.get_ctx_tag(cid), "snapshot"]).inc(); let ch = self.schedch.clone(); let cb = box move |(cb_ctx, snapshot)| { if let Err(e) = ch.send(Msg::SnapshotFinished { cid: cid, cb_ctx: cb_ctx, snapshot: snapshot, }) { panic!("send SnapshotFinish failed cmd id {}, err {:?}", cid, e); } }; if let Err(e) = self.engine.async_snapshot(self.extract_context(cid), cb) { SCHED_STAGE_COUNTER_VEC.with_label_values(&[self.get_ctx_tag(cid), "async_snap_err"]) .inc(); self.finish_with_err(cid, Error::from(e)); } } /// Event handler for the completion of get snapshot. /// /// Delivers the command along with the snapshot to a worker thread to execute. fn on_snapshot_finished(&mut self, cid: u64, cb_ctx: CbContext, snapshot: EngineResult<Box<Snapshot>>) { debug!("receive snapshot finish msg for cid={}", cid); match snapshot { Ok(snapshot) => { SCHED_STAGE_COUNTER_VEC.with_label_values(&[self.get_ctx_tag(cid), "snapshot_ok"]) .inc(); self.process_by_worker(cid, cb_ctx, snapshot); } Err(e) => { SCHED_STAGE_COUNTER_VEC.with_label_values(&[self.get_ctx_tag(cid), "snapshot_err"]) .inc(); self.finish_with_err(cid, Error::from(e)); } } } /// Event handler for the success of read. /// /// If a next command is present, continues to execute; otherwise, delivers the result to the /// callback. fn on_read_finished(&mut self, cid: u64, pr: ProcessResult) { debug!("read command(cid={}) finished", cid); let mut ctx = self.remove_ctx(cid); SCHED_STAGE_COUNTER_VEC.with_label_values(&[ctx.tag, "read_finish"]).inc(); let cb = ctx.callback.take().unwrap(); if let ProcessResult::NextCommand { cmd } = pr { SCHED_STAGE_COUNTER_VEC.with_label_values(&[ctx.tag, "next_cmd"]).inc(); self.schedule_command(cmd, cb); } else { execute_callback(cb, pr); } self.release_lock(&ctx.lock, cid); } /// Event handler for the failure of write prepare. /// /// Write prepare failure typically means conflicting transactions are detected. Delivers the /// error to the callback, and releases the latches. fn on_write_prepare_failed(&mut self, cid: u64, e: Error) { debug!("write command(cid={}) failed at prewrite.", cid); SCHED_STAGE_COUNTER_VEC.with_label_values(&[self.get_ctx_tag(cid), "prepare_write_err"]) .inc(); self.finish_with_err(cid, e); } /// Event handler for the success of write prepare. /// /// Initiates an async write operation on the storage engine, there'll be a `WriteFinished` /// message when it finishes. fn on_write_prepare_finished(&mut self, cid: u64, cmd: Command, pr: ProcessResult, to_be_write: Vec<Modify>) { SCHED_STAGE_COUNTER_VEC.with_label_values(&[self.get_ctx_tag(cid), "write"]).inc(); if to_be_write.is_empty() { return self.on_write_finished(cid, pr, Ok(())); } let engine_cb = make_engine_cb(cid, pr, self.schedch.clone()); if let Err(e) = self.engine.async_write(cmd.get_context(), to_be_write, engine_cb) { SCHED_STAGE_COUNTER_VEC.with_label_values(&[self.get_ctx_tag(cid), "async_write_err"]) .inc(); self.finish_with_err(cid, Error::from(e)); } } /// Event handler for the success of write. fn on_write_finished(&mut self, cid: u64, pr: ProcessResult, result: EngineResult<()>) { SCHED_STAGE_COUNTER_VEC.with_label_values(&[self.get_ctx_tag(cid), "write_finish"]).inc(); debug!("write finished for command, cid={}", cid); let mut ctx = self.remove_ctx(cid); let cb = ctx.callback.take().unwrap(); let pr = match result { Ok(()) => pr, Err(e) => ProcessResult::Failed { err: ::storage::Error::from(e) }, }; if let ProcessResult::NextCommand { cmd } = pr { SCHED_STAGE_COUNTER_VEC.with_label_values(&[ctx.tag, "next_cmd"]).inc(); self.schedule_command(cmd, cb); } else { execute_callback(cb, pr); } self.release_lock(&ctx.lock, cid); } /// Releases all the latches held by a command. fn release_lock(&mut self, lock: &Lock, cid: u64) { let wakeup_list = self.latches.release(lock, cid); for wcid in wakeup_list { self.lock_and_get_snapshot(wcid); } } /// Tries to acquire all the necessary latches. If all the necessary latches are acquired, /// the method initiates a get snapshot operation for furthur processing. fn lock_and_get_snapshot(&mut self, cid: u64) { if self.acquire_lock(cid) { self.get_snapshot(cid); } } /// Shuts down the event loop. fn shutdown(&mut self, event_loop: &mut EventLoop<Self>) { info!("receive shutdown command"); event_loop.shutdown(); } } /// Handler of the scheduler event loop. impl mio::Handler for Scheduler { type Timeout = (); type Message = Msg; /// Event handler for message events. fn notify(&mut self, event_loop: &mut EventLoop<Self>, msg: Msg) { match msg { Msg::Quit => self.shutdown(event_loop), Msg::RawCmd { cmd, cb } => self.on_receive_new_cmd(cmd, cb), Msg::SnapshotFinished { cid, cb_ctx, snapshot } => { self.on_snapshot_finished(cid, cb_ctx, snapshot) } Msg::ReadFinished { cid, pr } => self.on_read_finished(cid, pr), Msg::WritePrepareFinished { cid, cmd, pr, to_be_write } => { self.on_write_prepare_finished(cid, cmd, pr, to_be_write) } Msg::WritePrepareFailed { cid, err } => self.on_write_prepare_failed(cid, err), Msg::WriteFinished { cid, pr, result, .. } => self.on_write_finished(cid, pr, result), } } /// Handler for tick events. fn tick(&mut self, event_loop: &mut EventLoop<Self>) { if !event_loop.is_running() { // stop work threads if has } } }
#include <stdio.h> int main(void) { int w, n, j, i, nextSearch; int index = 0; int a[30], b[30], result[30]; int c = 0; do { scanf("%d",&w); } while (w > 30); do { scanf("%d", &n); } while (n > 30); while (index < n) { scanf("%d,%d",&a[index],&b[index]); if ((1 <= a[index]) && (a[index] <= w) && (1 <= b[index]) && (b[index] <= w)) { index++; } } for(i = 0; i < w; i++){ nextSearch = i+1; for(j = 0; j < n; j++){ if(nextSearch == a[j]){ nextSearch = b[j]; }else if(nextSearch == b[j]){ nextSearch = a[j]; } } result[nextSearch-1] = i+1; } for(i = 0; i < w; i++) printf("%d\n", result[i]); return 0; }
PSYCHIATRY SANS PSYCHOTHERAPY Psychotherapy which dominated psychiatry for long seems to have become relegated into oblivion. Whether this change in the professional practice is a progressive or re-trograde step requires a serious consideration. Psychiatrists in India are more or less practising a biologic reductionistic model of mental illness and if this trend continues they may become manque neurologists. Having been associated with large number of postgraduate psychiatric teaching programmes in the country, I have strongly felt that psychotherapeutic training does not receive any thing more than lip service in most of the teaching schedules. The basic question is whether psychotherapy is really necessary in the treatment of mental illness. In view of certain psychiatrists, it is ineffective, no better than placebo, doing as much harm as good, and having no scientific foundation. But, inspite of limitations of various nature it is hard to find a psychiatrist who does not practice psychotherapy of some kind. A general consensus is that regardless of most skilled application of psychopharmacology a large amount of human misery still remains and perhaps can not be handled by any means other than some sort of psychotherapy. Advances in biological psychiatry claim to have revealed certain pathogenic-biochemical factors relating to psychiatric illness. This has resulted in development of certain very potent psychopharmacological agents. As a result of these developments, biologically oriented psychiatrists view 'psychic symptoms as epiphenomena of underlying physiochemical events. On the other hand the psychologically oriented psychiatrists solely emphasise the need to understand the psychiatric illness in terms of psychosocial perspective. Both these approaches can be called reductionistic approaches because they offer single simp-listic explanation for complex behavioural problems. Although a greater emphasis has always been laid on the integrative approach, the reductionistic theories have a strong appeal as they reduce the ambiguities and complexities of the field and appear easy to practice. ^ An important factor which deters novice from practising or learning psychotherapy is the widely diverse schools of human behaviour and techniques of psychotherapy. In a latest count more than 450 brands of psychotherapy could be indentified (Karasu). Furthermore senior Indian psychiatrists have raised serious doubts as to whether western psychotherapeutic models could be practised in India (Neki, Surya and Jaya Ram) because psychotherapeutic concepts developed in a specific culture may be suitable in another culture only when heritage of two cultures has substantial sharing. Among other important reasons for not practising psychotherapy in India are … PSYCHIATRY SANS PSYCHOTHERAPY Psychotherapy which dominated psychiatry for long seems to have become relegated into oblivion. Whether this change in the professional practice is a progressive or retrograde step requires a serious consideration. Psychiatrists in India are more or less practising a biologic reductionistic model of mental illness and if this trend continues they may become manque neurologists. Having been associated with large number of postgraduate psychiatric teaching programmes in the country, I have strongly felt that psychotherapeutic training does not receive any thing more than lip service in most of the teaching schedules. The basic question is whether psychotherapy is really necessary in the treatment of mental illness. In view of certain psychiatrists, it is ineffective, no better than placebo, doing as much harm as good, and having no scientific foundation. But, inspite of limitations of various nature it is hard to find a psychiatrist who does not practice psychotherapy of some kind. A general consensus is that regardless of most skilled application of psychopharmacology a large amount of human misery still remains and perhaps can not be handled by any means other than some sort of psychotherapy. Advances in biological psychiatry claim to have revealed certain pathogenic-biochemical factors relating to psychiatric illness. This has resulted in development of certain very potent psychopharmacological agents. As a result of these developments, biologically oriented psychiatrists view 'psychic symptoms as epiphenomena of underlying physiochemical events. On the other hand the psychologically oriented psychiatrists solely emphasise the need to understand the psychiatric illness in terms of psychosocial perspective. Both these approaches can be called reductionistic approaches because they offer single simplistic explanation for complex behavioural problems. Although a greater emphasis has always been laid on the integrative approach, the reductionistic theories have a strong appeal as they reduce the ambiguities and complexities of the field and appear easy to practice. ^ An important factor which deters novice from practising or learning psychotherapy is the widely diverse schools of human behaviour and techniques of psychotherapy. In a latest count more than 450 brands of psychotherapy could be indentified (Karasu). Furthermore senior Indian psychiatrists have raised serious doubts as to whether western psychotherapeutic models could be practised in India (Neki, Surya and Jaya Ram) because psychotherapeutic concepts developed in a specific culture may be suitable in another culture only when heritage of two cultures has substantial sharing. Among other important reasons for not practising psychotherapy in India are enormous workload on psychiatrists and inability of Indian masses to understand and appreciate psychotherapeutic help on account oflow level of education. A controversy has also existed for long as to what, if any, is the effective component of psychotherapy. Various theoretical schools-emphasize that the change occurs because of interpretations, analysis of transference, et:. while other psychiatrists feel that the crucial factors in producing change are the relationsnip between therapist and patient and some other "Universal" factors (e.g. suggestion., psrsuation or catharsis). For Indian psychiatrists it is important to delineate the cultural factors involved in psychopathology because in individuals, percepts, beliefs and values are highly influenced by the culture in which one grows. For instanc.:, the western society is considerably individualistic and therefore a greater emphasis is laid on individual happiness and achievement while in India emphatic use of the word T is interpreted as a reflection of egostic feeling (Aham) and hence instead of T the word 'We' is often used in day to day conversation. Thus in psychotherapautic endeavour if emphasis is laid on complete autonomy of individual it iiriy not produce the desired result. Social norms and orient culture do not approve it. Likewise, certain basic tenets of Hindu Philosophy such as subtle feeling of datachment towards the worldly affairs or desirelessness could be easily used in our psychotherapeutic practices and should help tremendously in strengthening ego functioning, improving interpersonal adjustment and promoting mental health. As such, there is an urgent need of developing certain models of psychotherapy which could be used in the management of emotionally disturbed patients regardless of their low educational background. As professionals we must tailor the therapeutic skills suiting to our clients and for this purpose intensive research is required to develop certain models which could be adopted by the coming generation of psychiatrists. A. K. Agarwal
#include "stdio.h" main() { int t; scanf("%d",&t); long long a[105],b[105],i,j,n,v,s; s=2; a[0]=1; b[0]=1; for(i=1;i<31;i++) { a[i]=b[i-1]*2+s*s; b[i]=b[i-1]*2+s*s; for(j=0;j<i;j++) { a[i]+=b[j]; } s*=2; } while(t--) { scanf("%lld",&n); for(i=0;i<32;i++) { if(n<a[i]) { v=i; break; } } if(v==0) printf("%ll1\n"); else printf("%lld\n",v); } }
package r import ( "fmt" "os" "reflect" "runtime/debug" "time" "github.com/gdamore/tcell/encoding" d "retort.dev/r/debug" "retort.dev/r/internal/quadtree" ) type retort struct { root *fiber nextUnitOfWork *fiber currentRoot *fiber wipRoot *fiber wipFiber *fiber deletions []*fiber hasChangesToRender bool hasNewState bool rootBlockLayout BlockLayout quadtree quadtree.Quadtree config RetortConfiguration } // RetortConfiguration allows you to enable features your app // may want to use type RetortConfiguration struct { // UseSimulationScreen to output to a simulated screen // this is useful for automated testing UseSimulationScreen bool // UseDebugger to show a d overlay with output from // the retort.dev/d#Log function UseDebugger bool // DisableMouse to prevent Mouse Events from being created DisableMouse bool } // Retort is called with your root Component and any optional // configuration to begin running retort. // // func Example_app() { // // Call the main function on retort to start the app, // // when you call this, retort will take over the screen. // r.Retort( // // Root Element // r.CreateElement( // example.ClickableBox, // r.Properties{ // component.BoxProps{ // Width: 100, // Make the root element fill the screen // Height: 100, // Make the root element fill the screen // Border: component.Border{ // Style: component.BorderStyleSingle, // Foreground: tcell.ColorWhite, // }, // }, // }, // r.Children{ // // First Child // r.CreateElement( // example.ClickableBox, // r.Properties{ // component.BoxProps{ // Border: component.Border{ // Style: component.BorderStyleSingle, // Foreground: tcell.ColorWhite, // }, // }, // }, // nil, // Pass nil as the third argument if there are no children // ), // // Second Child // r.CreateElement( // example.ClickableBox, // r.Properties{ // component.BoxProps{ // Border: component.Border{ // Style: component.BorderStyleSingle, // Foreground: tcell.ColorWhite, // }, // }, // }, // nil, // ), // }, // ), // // Pass in optional configuration // r.RetortConfiguration{} // ) // } func Retort(root Element, config RetortConfiguration) { r := &retort{ root: root, config: config, quadtree: quadtree.Quadtree{ MaxObjects: 2000, MaxLevels: 1000, Level: 0, }, } c = &config quitChan = make(chan struct{}) setStateChan = make(chan ActionCreator, 2000) screen := UseScreen() defer screen.Fini() encoding.Register() if err := screen.Init(); err != nil { fmt.Fprintf(os.Stderr, "%v\n", err) os.Exit(1) } r.root = root w, h := screen.Size() r.quadtree.Bounds.Width = w r.quadtree.Bounds.Height = h r.parseRetortConfiguration() r.rootBlockLayout = BlockLayout{ X: 0, Y: 0, Columns: w + 1, // +1 to account for zero-indexing Rows: h + 1, // +1 to account for zero-indexing ZIndex: 0, } // TODO: this seems messy r.rootBlockLayout is copied to a bunch of places r.root.BlockLayout = r.rootBlockLayout r.root.InnerBlockLayout = r.rootBlockLayout root.Properties = append(root.Properties, r.rootBlockLayout) r.wipRoot = &fiber{ componentType: nothingComponent, Properties: Properties{Children{root}}, alternate: r.currentRoot, BlockLayout: r.root.BlockLayout, InnerBlockLayout: r.root.InnerBlockLayout, } r.nextUnitOfWork = r.wipRoot r.currentRoot = r.wipRoot.Clone() r.hasChangesToRender = true var frame int var deadline time.Time // event handling go r.handleEvents() // work loop go func() { deadline = time.Now().Add(14 * time.Millisecond) workTick := time.NewTicker(1 * time.Nanosecond) frameTick := time.NewTicker(16 * time.Millisecond) shouldYield := false var droppedFrames int defer func() { if false { // TODO: wrap this is some config denoting prod mode if r := recover(); r != nil { d.Log("Panic", r) debug.PrintStack() close(quitChan) } } }() // TODO: run again if screen size changes workloop: for { select { case <-quitChan: workTick.Stop() break workloop // workloop case action := <-setStateChan: action.addToQueue() r.hasNewState = true case <-frameTick.C: if r.hasNewState { r.updateTree() } if r.hasChangesToRender { // If there's still setStates to add to the queue, give them a chance // to be added if len(setStateChan) > 0 && droppedFrames == 0 { droppedFrames++ continue } workTick = time.NewTicker(1 * time.Nanosecond) droppedFrames = 0 } deadline = time.Now().Add(14 * time.Millisecond) case <-workTick.C: if !r.hasChangesToRender { // While we have work to do, this case is run very frequently // But when we have no work to do it can consume considerable CPU time // So we only start this ticker when we actuall have work to do, // and we stop it the rest of the time. // We use a frame tick to ensure at least once every 16ms (60fps) // we are checking if we need to do work workTick.Stop() } if r.nextUnitOfWork != nil && !shouldYield { // start := time.Now() r.nextUnitOfWork = r.performWork(r.nextUnitOfWork) // d.Log("performWork: ", time.Since(start)) // yield with time to render if time.Since(deadline) > 100*time.Nanosecond { shouldYield = true } } if r.nextUnitOfWork == nil && r.wipRoot != nil { start := time.Now() r.commitRoot() d.Log("commitRoot: ", time.Since(start)) shouldYield = false } if time.Since(deadline) > 0 { shouldYield = false frame++ } } } }() // Wait until quit <-quitChan screen.Clear() screen.Fini() } func (r *retort) parseRetortConfiguration() { screen := UseScreen() if !r.config.DisableMouse { screen.EnableMouse() } } // ForceRender can be called at any point to ask // retort to start a whole new update func (r *retort) ForceRender() { r.updateTree() } // [ Working ]------------------------------------------------------------------ func (r *retort) updateTree() { r.wipRoot = r.currentRoot r.wipRoot.alternate = r.currentRoot.Clone() r.wipRoot.dirty = true r.nextUnitOfWork = r.wipRoot r.wipFiber = nil r.deletions = nil r.hasChangesToRender = true r.hasNewState = false } func (r *retort) performWork(f *fiber) *fiber { r.updateComponent(f) if f.child != nil { return f.child } nextFiber := f for nextFiber != nil { if nextFiber.sibling != nil { return nextFiber.sibling } nextFiber = nextFiber.parent } return nil } // [ Components ]--------------------------------------------------------------- func (r *retort) updateComponent(f *fiber) { hookFiberLock.Lock() hookFiber = f hookFiberLock.Unlock() switch f.componentType { case nothingComponent: r.updateNothingComponent(f) case elementComponent: r.updateElementComponent(f) case fragmentComponent: r.updateFragmentComponent(f) case screenComponent: r.updateScreenComponent(f) } // d.Log("updateComponent", f) hookFiberLock.Lock() hookFiber = nil hookFiberLock.Unlock() } func (r *retort) updateElementComponent(f *fiber) { if f == nil || f.componentType != elementComponent { return } if f.component == nil || f.Properties == nil { return } r.wipFiber = f hookFiberLock.Lock() hookIndex = 0 hookFiberLock.Unlock() r.wipFiber.hooks = nil children := f.component(f.Properties) // d.Log("updateElementComponent children", children) r.reconcileChildren(f, []*fiber{children}) } func (r *retort) updateFragmentComponent(f *fiber) { if f == nil || f.componentType != fragmentComponent || f.Properties == nil { return } r.wipFiber = f children := f.Properties.GetProperty( Children{}, "Fragment requires r.Children", ).(Children) r.reconcileChildren(f, children) } func (r *retort) updateNothingComponent(f *fiber) { if f == nil || f.componentType != nothingComponent { return } r.wipFiber = f children := f.Properties.GetOptionalProperty( Children{}, ).(Children) r.reconcileChildren(f, children) } func (r *retort) updateScreenComponent(f *fiber) { if f == nil || f.componentType != screenComponent { return } r.wipFiber = f children := f.Properties.GetOptionalProperty( Children{}, ).(Children) r.reconcileChildren(f, children) } // [ Children ]----------------------------------------------------------------- func (r *retort) reconcileChildren(f *fiber, elements []*fiber) { index := 0 f.dirty = false var oldFiber *fiber if r.wipFiber != nil && r.wipFiber.alternate != nil { oldFiber = r.wipFiber.alternate.child } var prevSibling *fiber // Add newly generated child elements, as children to this fiber for index < len(elements) || oldFiber != nil { var element *fiber if len(elements) != 0 { element = elements[index] } var newFiber *fiber sameType := false if oldFiber != nil && element != nil && reflect.TypeOf(element.component) == reflect.TypeOf(oldFiber.component) { sameType = true } if sameType { // Update f.dirty = true newFiber = &fiber{ dirty: true, componentType: element.componentType, component: element.component, Properties: AddPropsIfNone(element.Properties, f.InnerBlockLayout), parent: f, alternate: oldFiber, effect: fiberEffectUpdate, renderToScreen: element.renderToScreen, calculateLayout: element.calculateLayout, BlockLayout: f.InnerBlockLayout, InnerBlockLayout: f.InnerBlockLayout, } } if element != nil && !sameType { // New Placement f.dirty = true newFiber = &fiber{ dirty: true, componentType: element.componentType, component: element.component, Properties: AddPropsIfNone(element.Properties, f.InnerBlockLayout), parent: f, alternate: nil, effect: fiberEffectPlacement, renderToScreen: element.renderToScreen, calculateLayout: element.calculateLayout, BlockLayout: f.InnerBlockLayout, InnerBlockLayout: f.InnerBlockLayout, } } if oldFiber != nil && !sameType { // Delete oldFiber.effect = fiberEffectDelete r.deletions = append(r.deletions, oldFiber) } if oldFiber != nil { // nothing to update oldFiber = oldFiber.sibling } if index == 0 { f.dirty = true f.child = newFiber } else if element != nil { f.dirty = true prevSibling.sibling = newFiber } prevSibling = newFiber index++ } }
package com.direwolf20.buildinggadgets.client.events; import com.direwolf20.buildinggadgets.client.renders.BGRenderers; import com.direwolf20.buildinggadgets.common.items.AbstractGadget; import net.fabricmc.fabric.api.client.rendering.v1.WorldRenderContext; import net.minecraft.client.Minecraft; import net.minecraft.world.entity.player.Player; import net.minecraft.world.item.ItemStack; public class EventRenderWorldLast { public static void renderAfterSetup(WorldRenderContext evt) { Player player = Minecraft.getInstance().player; if (player == null) { return; } ItemStack heldItem = AbstractGadget.getGadget(player); if (heldItem.isEmpty()) { return; } BGRenderers.find(heldItem.getItem()).renderAfterSetup(evt, player, heldItem); } public static void renderWorldLastEvent(WorldRenderContext evt) { Player player = Minecraft.getInstance().player; if (player == null) { return; } ItemStack heldItem = AbstractGadget.getGadget(player); if (heldItem.isEmpty()) { return; } BGRenderers.find(heldItem.getItem()).render(evt, player, heldItem); } }
//Preprocess runs the preprocessing of a ZKB++ proof on a circuit c with input input for nbIterations. //Returns a KKWContext for the main proof, and a KKWProof for the preprocessing commitment func Preprocess(c *Circuit, input []ZKBVar, nbIterations int) (ctx *KKWContext, p KKWProof) { masterSeed := make([]byte, SECURITY_LEVEL) rand.Read(masterSeed) masterPRNG, _ := utils.NewKeyedPRNG(masterSeed) ctx = &KKWContext{ nbIterations: nbIterations, offsets: make([]offset, nbIterations), seeds: make([][]byte, nbIterations), omegaRand: make([][]byte, nbIterations), gammaRand: make([][]byte, nbIterations), salt: make([]byte, SECURITY_LEVEL), omegaCom: make([][sha256.Size]byte, nbIterations), gammaiCom: make([][3][sha256.Size]byte, nbIterations), } randomness := make([]byte, SECURITY_LEVEL) masterPRNG.Clock(ctx.salt) omegaCom := make([]byte, 0) gammaCom := make([]byte, 0) for i := 0; i < nbIterations; i++ { iterationSeed := make([]byte, SECURITY_LEVEL) masterPRNG.Clock(iterationSeed) iterationPrng, _ := utils.NewKeyedPRNG(iterationSeed) c.preprocess(input, iterationPrng) ctx.offsets[i] = c.preprocessing ctx.seeds[i] = iterationSeed masterPRNG.Clock(randomness) inputOffsetCom := computeOffsetCommit(c.preprocessing.phis, randomness, ctx.salt) omegaCom = append(omegaCom, inputOffsetCom[:]...) ctx.omegaCom[i] = inputOffsetCom ctx.omegaRand[i] = make([]byte, SECURITY_LEVEL) copy(ctx.omegaRand[i], randomness) iterationPrng.Clock(randomness) triplesOffsetCom := computeOffsetCommit(c.preprocessing.deltas, randomness, ctx.salt) ctx.gammaRand[i] = make([]byte, SECURITY_LEVEL) copy(ctx.gammaRand[i], randomness) c.rand[0].Clock(randomness) seed1Com := computeSeedCommit(c.seeds[0], randomness, ctx.salt) ctx.gammaiCom[i][0] = seed1Com c.rand[1].Clock(randomness) seed2Com := computeSeedCommit(c.seeds[1], randomness, ctx.salt) ctx.gammaiCom[i][1] = seed2Com c.rand[2].Clock(randomness) seed3Com := computeSeedCommit(c.seeds[2], randomness, ctx.salt) ctx.gammaiCom[i][2] = seed3Com iterationCom := hash(triplesOffsetCom[:], seed1Com[:], seed2Com[:], seed3Com[:]) gammaCom = append(gammaCom, iterationCom[:]...) } p.hGamma = hash(gammaCom) p.hOmega = hash(omegaCom) return }
#include <bits/stdc++.h> typedef long long ll; const ll Mod=755100000023ll; const int base=1324327; const int mod=1e9+7; const int inf=1<<29; const int N=100005; int n,m=0; ll k,sum[N],pw[70],ans=0,lim=1; using namespace std; map<ll,int> S; int main() { #ifdef Kay freopen ("cf.in","r",stdin); freopen ("cf.out","w",stdout); #endif scanf ("%d %I64d",&n,&k); int i,j; for (i=1;i<=n;i++) scanf ("%I64d",&sum[i]),sum[i]+=sum[i-1]; for (i=1;i<=15;i++) lim*=10; if (k==1) pw[0]=1,m=1; if (k==-1) pw[0]=1,pw[1]=-1,m=2; if (k!=1 && k!=-1) { pw[0]=1; for (i=1;;i++) { pw[i]=pw[i-1]*k; if (pw[i]>lim) break; } m=i; } S[0]=1; for (i=1;i<=n;i++) { for (j=0;j<m;j++) ans+=S[sum[i]-pw[j]]; S[sum[i]]++; } cout<<ans; return 0; }
/** * This test class is time and date sensitive */ public class EventTaskTest { TaskDate today; TaskDate tomorrow; String defaultDescription; @Before public void setup() { today = TaskDate.getTodayDate(); tomorrow = TaskDate.getTomorrowDate(); defaultDescription = "Go meeting"; } /** * Compare string of both parameter and confirms both are the same. */ private void assertEqualEventTask(EventTaskTestExpectedResult expected, EventTask actual) { assertEquals(expected.toString(),(actual.toString())); } /** * Compare both messages and confirms both are the same */ private void assertResultMessage(String expected, String actual) { assertEquals(expected, actual); } @Test public void construct_fullDateAsEndDate_endDateAfterToday() throws IllegalValueException { //Go meeting by 1-1-2050 EventTaskTestHelper helper = new EventTaskTestHelper(); String endDate = "1-1-2050"; helper.taskComponentArray[Task.DESCRIPTION] = defaultDescription; helper.taskComponentArray[Task.END_DATE] = endDate; helper.hasTaskComponentArray[Task.END_DATE_COMPONENT] = true; EventTask actual = helper.getEventTask(); EventTaskTestExpectedResult expected = new EventTaskTestExpectedResult(defaultDescription, today.toString(), endDate, TaskTime.getTimeNow().toString(), TaskTime.DEFAULT_END_TIME.toString(), TaskPriority.DEFAULT_PRIORITY, RecurringType.DEFAULT_RECURRING); assertEqualEventTask(expected, actual); } @Test public void construct_fullDateAsEndDate_exceptionThrown() { //Go meeting by 1-1-2000 EventTaskTestHelper helper = new EventTaskTestHelper(); String endDate = "1-1-2000"; helper.taskComponentArray[Task.DESCRIPTION] = defaultDescription; helper.taskComponentArray[Task.END_DATE] = endDate; helper.hasTaskComponentArray[Task.END_DATE_COMPONENT] = true; try { EventTask actual = helper.getEventTask(); EventTaskTestExpectedResult expected = new EventTaskTestExpectedResult(defaultDescription, today.toString(), endDate, TaskTime.getTimeNow().toString(), TaskTime.DEFAULT_END_TIME.toString(), TaskPriority.DEFAULT_PRIORITY, RecurringType.DEFAULT_RECURRING); assertEqualEventTask(expected, actual); } catch (IllegalValueException ive) { assertResultMessage(EventTask.MESSAGE_EVENT_CONSTRAINTS, ive.getMessage()); } } @Test public void construct_dayNameInWeekAsEndDate_endDateAfterToday() throws IllegalValueException { //Go meeting by fri EventTaskTestHelper helper = new EventTaskTestHelper(); String endDate = "fri"; helper.taskComponentArray[Task.DESCRIPTION] = defaultDescription; helper.taskComponentArray[Task.END_DATE] = endDate; helper.hasTaskComponentArray[Task.END_DATE_COMPONENT] = true; EventTask actual = helper.getEventTask(); EventTaskTestExpectedResult expected = new EventTaskTestExpectedResult(defaultDescription, today.toString(), TaskDate.determineDayInWeekGivenName(endDate).toString(), TaskTime.getTimeNow().toString(), TaskTime.DEFAULT_END_TIME.toString(), TaskPriority.DEFAULT_PRIORITY, RecurringType.DEFAULT_RECURRING); assertEqualEventTask(expected, actual); } @Test public void construct_monthAndYearAsEndDate_exceptionThrown() { //Go meeting by may-2000 EventTaskTestHelper helper = new EventTaskTestHelper(); String endDate = "may-2000"; helper.taskComponentArray[Task.DESCRIPTION] = defaultDescription; helper.taskComponentArray[Task.END_DATE] = endDate; helper.hasTaskComponentArray[Task.END_DATE_COMPONENT] = true; try { EventTask actual = helper.getEventTask(); EventTaskTestExpectedResult expected = new EventTaskTestExpectedResult(defaultDescription, today.toString(), endDate, TaskTime.getTimeNow().toString(), TaskTime.DEFAULT_END_TIME.toString(), TaskPriority.DEFAULT_PRIORITY, RecurringType.DEFAULT_RECURRING); assertEqualEventTask(expected, actual); } catch (IllegalValueException ive) { assertResultMessage(EventTask.MESSAGE_EVENT_CONSTRAINTS, ive.getMessage()); } } @Test public void construct_monthAndYearAsStartDate_succesfulTaskCreated() throws IllegalValueException { //Go meeting on may-2020 EventTaskTestHelper helper = new EventTaskTestHelper(); String startDate = "may-2020"; helper.taskComponentArray[Task.DESCRIPTION] = defaultDescription; helper.taskComponentArray[Task.START_DATE] = startDate; helper.hasTaskComponentArray[Task.START_DATE_COMPONENT] = true; EventTask actual = helper.getEventTask(); EventTaskTestExpectedResult expected = new EventTaskTestExpectedResult(defaultDescription, startDate, startDate, TaskTime.DEFAULT_START_TIME.toString(), TaskTime.DEFAULT_END_TIME.toString(), TaskPriority.DEFAULT_PRIORITY, RecurringType.DEFAULT_RECURRING); assertEqualEventTask(expected, actual); } @Test public void construct_fullDateAsStartDate_startDateAfterToday() throws IllegalValueException { //Go meeting on may-2020 EventTaskTestHelper helper = new EventTaskTestHelper(); String startDate = "may-2020"; helper.taskComponentArray[Task.DESCRIPTION] = defaultDescription; helper.taskComponentArray[Task.START_DATE] = startDate; helper.hasTaskComponentArray[Task.START_DATE_COMPONENT] = true; EventTask actual = helper.getEventTask(); EventTaskTestExpectedResult expected = new EventTaskTestExpectedResult(defaultDescription, startDate, startDate, TaskTime.DEFAULT_START_TIME.toString(), TaskTime.DEFAULT_END_TIME.toString(), TaskPriority.DEFAULT_PRIORITY, RecurringType.DEFAULT_RECURRING); assertEqualEventTask(expected, actual); } @Test public void construct_noStartDateAndStartTimeBeforeCurrentTime_startDateIsNextDay() throws IllegalValueException { //Go meeting at 12.01am EventTaskTestHelper helper = new EventTaskTestHelper(); String startTime = "12.01am"; helper.taskComponentArray[Task.DESCRIPTION] = defaultDescription; helper.taskComponentArray[Task.START_TIME] = startTime; helper.hasTaskComponentArray[Task.START_TIME_COMPONENT] = true; EventTask actual = helper.getEventTask(); EventTaskTestExpectedResult expected = new EventTaskTestExpectedResult(defaultDescription, tomorrow.toString(), tomorrow.toString(), startTime, TaskTime.DEFAULT_END_TIME.toString(), TaskPriority.DEFAULT_PRIORITY, RecurringType.DEFAULT_RECURRING); assertEqualEventTask(expected, actual); } @Test public void construct_noEndDate_EndTimeBeforeStartTime_startDateIsNextDay() throws IllegalValueException { //Go meeting on 1-1-2050 from 2am to 1am EventTaskTestHelper helper = new EventTaskTestHelper(); String startDate = "1-1-2020"; String startTime = "2am"; String endTime = "1am"; helper.taskComponentArray[Task.DESCRIPTION] = defaultDescription; helper.taskComponentArray[Task.START_DATE] = startDate; helper.taskComponentArray[Task.START_TIME] = startTime; helper.taskComponentArray[Task.END_TIME] = endTime; helper.hasTaskComponentArray[Task.START_DATE_COMPONENT] = true; helper.hasTaskComponentArray[Task.START_TIME_COMPONENT] = true; helper.hasTaskComponentArray[Task.END_TIME_COMPONENT] = true; EventTask actual = helper.getEventTask(); TaskDate expectedEndDate = new TaskDate(startDate).getNextDay(); EventTaskTestExpectedResult expected = new EventTaskTestExpectedResult(defaultDescription, startDate, expectedEndDate.toString(), startTime, endTime, TaskPriority.DEFAULT_PRIORITY, RecurringType.DEFAULT_RECURRING); assertEqualEventTask(expected, actual); } @Test public void construct_validMonthBeforeToday_startDateSameMonthNextYear() throws IllegalValueException { //Go meeting on may EventTaskTestHelper helper = new EventTaskTestHelper(); String startDate = "may"; helper.taskComponentArray[Task.DESCRIPTION] = defaultDescription; helper.taskComponentArray[Task.START_DATE] = startDate; helper.hasTaskComponentArray[Task.START_DATE_COMPONENT] = true; EventTask actual = helper.getEventTask(); EventTaskTestExpectedResult expected = new EventTaskTestExpectedResult(defaultDescription, "may-" + today.getNextYear().getYear(), "may-" + today.getNextYear().getYear(), TaskTime.DEFAULT_START_TIME.toString(), TaskTime.DEFAULT_END_TIME.toString(), TaskPriority.DEFAULT_PRIORITY, RecurringType.DEFAULT_RECURRING); assertEqualEventTask(expected, actual); } @Test public void construct_endMonthBeforeStartMonth_SameDateTwoYearsLater() throws IllegalValueException { //Go meeting from may to april EventTaskTestHelper helper = new EventTaskTestHelper(); String startDate = "may"; String endDate = "april"; helper.taskComponentArray[Task.DESCRIPTION] = defaultDescription; helper.taskComponentArray[Task.START_DATE] = startDate; helper.hasTaskComponentArray[Task.START_DATE_COMPONENT] = true; helper.taskComponentArray[Task.END_DATE] = endDate; helper.hasTaskComponentArray[Task.END_DATE_COMPONENT] = true; EventTask actual = helper.getEventTask(); EventTaskTestExpectedResult expected = new EventTaskTestExpectedResult(defaultDescription, "may-" + today.getNextYear().getYear(), "april-" + today.getNextYear().getNextYear().getYear(), TaskTime.DEFAULT_START_TIME.toString(), TaskTime.DEFAULT_END_TIME.toString(), TaskPriority.DEFAULT_PRIORITY, RecurringType.DEFAULT_RECURRING); assertEqualEventTask(expected, actual); } @Test public void construct_endDayMonthBeforeStartDayMonth_SameDateTwoYearsLater() throws IllegalValueException { //Go meeting from 12-may to 15-april EventTaskTestHelper helper = new EventTaskTestHelper(); String startDate = "12-may"; String endDate = "15-april"; helper.taskComponentArray[Task.DESCRIPTION] = defaultDescription; helper.taskComponentArray[Task.START_DATE] = startDate; helper.hasTaskComponentArray[Task.START_DATE_COMPONENT] = true; helper.taskComponentArray[Task.END_DATE] = endDate; helper.hasTaskComponentArray[Task.END_DATE_COMPONENT] = true; EventTask actual = helper.getEventTask(); EventTaskTestExpectedResult expected = new EventTaskTestExpectedResult(defaultDescription, "12-may-" + today.getNextYear().getYear(), "15-april-" + today.getNextYear().getNextYear().getYear(), TaskTime.DEFAULT_START_TIME.toString(), TaskTime.DEFAULT_END_TIME.toString(), TaskPriority.DEFAULT_PRIORITY, RecurringType.DEFAULT_RECURRING); assertEqualEventTask(expected, actual); } @Test public void construct_taskRecurDaily_recurDailySuccessfully() throws IllegalValueException { //Go meeting on today r/daily EventTaskTestHelper helper = new EventTaskTestHelper(); String startDate = today.toString(); String recurringType = "daily"; helper.taskComponentArray[Task.DESCRIPTION] = defaultDescription; helper.taskComponentArray[Task.START_DATE] = startDate; helper.hasTaskComponentArray[Task.START_DATE_COMPONENT] = true; helper.taskComponentArray[Task.RECURRING_TYPE] = recurringType; EventTask actual = helper.getEventTask(); EventTaskTestExpectedResult expected = new EventTaskTestExpectedResult(defaultDescription, startDate, startDate, TaskTime.getTimeNow().toString(), TaskTime.DEFAULT_END_TIME.toString(), TaskPriority.DEFAULT_PRIORITY, recurringType); assertEqualEventTask(expected, actual); } @Test public void construct_taskRecurDaily_exceptionThrown() { //Go meeting by 1-1-2020 r/daily EventTaskTestHelper helper = new EventTaskTestHelper(); String endDate = "1-1-2020"; String recurringType = "daily"; helper.taskComponentArray[Task.DESCRIPTION] = defaultDescription; helper.taskComponentArray[Task.END_DATE] = endDate; helper.hasTaskComponentArray[Task.END_DATE_COMPONENT] = true; helper.taskComponentArray[Task.RECURRING_TYPE] = recurringType; try { EventTask actual = helper.getEventTask(); EventTaskTestExpectedResult expected = new EventTaskTestExpectedResult(defaultDescription, today.toString(), endDate, TaskTime.getTimeNow().toString(), TaskTime.DEFAULT_END_TIME.toString(), TaskPriority.DEFAULT_PRIORITY, recurringType); assertEqualEventTask(expected, actual); } catch (IllegalValueException ive) { assertResultMessage(RecurringType.MESSAGE_INVALID_RECURRING_DURATION, ive.getMessage()); } } @Test public void construct_taskRecurWeekly_successful() throws IllegalValueException { //Go meeting from 1-1-2020 to 3-1-2020 r/weekly EventTaskTestHelper helper = new EventTaskTestHelper(); String startDate = "1-1-2020"; String endDate = "3-1-2020"; String recurringType = "weekly"; helper.taskComponentArray[Task.DESCRIPTION] = defaultDescription; helper.taskComponentArray[Task.START_DATE] = startDate; helper.hasTaskComponentArray[Task.START_DATE_COMPONENT] = true; helper.taskComponentArray[Task.END_DATE] = endDate; helper.hasTaskComponentArray[Task.END_DATE_COMPONENT] = true; helper.taskComponentArray[Task.RECURRING_TYPE] = recurringType; EventTask actual = helper.getEventTask(); EventTaskTestExpectedResult expected = new EventTaskTestExpectedResult(defaultDescription, startDate, endDate, TaskTime.DEFAULT_START_TIME, TaskTime.DEFAULT_END_TIME.toString(), TaskPriority.DEFAULT_PRIORITY, recurringType); assertEqualEventTask(expected, actual); } @Test public void construct_taskRecurWeekly_exceptionThrown() { //Go meeting by 1-1-2020 r/weekly EventTaskTestHelper helper = new EventTaskTestHelper(); String endDate = "1-1-2020"; String recurringType = "weekly"; helper.taskComponentArray[Task.DESCRIPTION] = defaultDescription; helper.taskComponentArray[Task.END_DATE] = endDate; helper.hasTaskComponentArray[Task.END_DATE_COMPONENT] = true; helper.taskComponentArray[Task.RECURRING_TYPE] = recurringType; try { EventTask actual = helper.getEventTask(); EventTaskTestExpectedResult expected = new EventTaskTestExpectedResult(defaultDescription, today.toString(), endDate, TaskTime.getTimeNow().toString(), TaskTime.DEFAULT_END_TIME.toString(), TaskPriority.DEFAULT_PRIORITY, recurringType); assertEqualEventTask(expected, actual); } catch (IllegalValueException ive) { assertResultMessage(RecurringType.MESSAGE_INVALID_RECURRING_DURATION, ive.getMessage()); } } @Test public void construct_taskRecurMonthly_successful() throws IllegalValueException { //Go meeting from 1-1-2020 to 25-1-2020 r/monthly EventTaskTestHelper helper = new EventTaskTestHelper(); String startDate = "1-1-2020"; String endDate = "25-1-2020"; String recurringType = "monthly"; helper.taskComponentArray[Task.DESCRIPTION] = defaultDescription; helper.taskComponentArray[Task.START_DATE] = startDate; helper.hasTaskComponentArray[Task.START_DATE_COMPONENT] = true; helper.taskComponentArray[Task.END_DATE] = endDate; helper.hasTaskComponentArray[Task.END_DATE_COMPONENT] = true; helper.taskComponentArray[Task.RECURRING_TYPE] = recurringType; EventTask actual = helper.getEventTask(); EventTaskTestExpectedResult expected = new EventTaskTestExpectedResult(defaultDescription, startDate, endDate, TaskTime.DEFAULT_START_TIME, TaskTime.DEFAULT_END_TIME.toString(), TaskPriority.DEFAULT_PRIORITY, recurringType); assertEqualEventTask(expected, actual); } @Test public void construct_taskRecurMonthly_exceptionThrown() { //Go meeting by 1-1-2020 r/monthly EventTaskTestHelper helper = new EventTaskTestHelper(); String endDate = "1-1-2020"; String recurringType = "monthly"; helper.taskComponentArray[Task.DESCRIPTION] = defaultDescription; helper.taskComponentArray[Task.END_DATE] = endDate; helper.hasTaskComponentArray[Task.END_DATE_COMPONENT] = true; helper.taskComponentArray[Task.RECURRING_TYPE] = recurringType; try { EventTask actual = helper.getEventTask(); EventTaskTestExpectedResult expected = new EventTaskTestExpectedResult(defaultDescription, today.toString(), endDate, TaskTime.getTimeNow().toString(), TaskTime.DEFAULT_END_TIME.toString(), TaskPriority.DEFAULT_PRIORITY, recurringType); assertEqualEventTask(expected, actual); } catch (IllegalValueException ive) { assertResultMessage(RecurringType.MESSAGE_INVALID_RECURRING_DURATION, ive.getMessage()); } } }
/// Store a header into the DB. This method will return an error if the header is already stored. pub fn insert_header(&self, header: &BlockHeader) -> Result<i64, SPVError> { let hash = header.bitcoin_hash(); let id = self.insert_id(&hash)?; self.tx.execute("insert into header (id, data) values (?, ?)", &[&id, &encode(header)?])?; trace!("stored header {}", hash); Ok(id) }
/** * Create a new order by a user to a given restaurant. * * The order is initially empty and is characterized * by a desired delivery time. * * @param user user object * @param restaurantName restaurant name * @param h delivery time hour * @param m delivery time minutes * @return */ public Order createOrder(User user, String restaurantName, int h, int m) { Order result; if (mapUser.containsValue(user) == true) { result = mapResturant.get(restaurantName).addOrder(user.getFirstName()+ " " + user.getLastName(), h, m); return result; } else return null; }
from finbot.core import tracer class FinbotError(Exception): """Base class for all Finbot errors""" def __init__(self, error_message: str) -> None: super().__init__(error_message) self.tracer_step_key = tracer.current_key() class ApplicationError(FinbotError): def __init__(self, error_message: str, error_code: str) -> None: super().__init__(error_message) self.error_code = error_code class InvalidUserInput(ApplicationError): def __init__(self, error_message: str) -> None: super().__init__(error_message, "G001") class InvalidOperation(ApplicationError): def __init__(self, error_message: str) -> None: super().__init__(error_message, "G002") class MissingUserData(ApplicationError): def __init__(self, error_message: str) -> None: super().__init__(error_message, "G003")
module PowerMethod where import Control.Monad as M import Data.Complex import Data.List as L import Numeric.LinearAlgebra as NL import System.Environment import System.Random import Text.Printf {-# INLINE iterateM #-} iterateM :: Int -> (b -> a -> IO a) -> b -> a -> IO a iterateM 0 _ _ x = return x iterateM n f x y = do z <- f x y iterateM (n - 1) f x $! z {-# INLINE getComplexStr #-} getComplexStr :: (Complex Double) -> String getComplexStr (a :+ b) = printf "%.2f :+ %.2f" a b {-# INLINE getComplexStr' #-} getComplexStr' :: (Complex Double) -> String getComplexStr' x = let (a, b) = polar x in printf "(%.2f, %.2f)" a b {-# INLINE getListStr #-} getListStr :: (a -> String) -> [a] -> String getListStr f (x:xs) = printf "[%s%s]" (f x) . L.concatMap (\x -> " ," L.++ f x) $ xs main = do (nStr:_) <- getArgs let range = (-1, 1) matInit1 <- M.replicateM 4 (randomRIO range) :: IO [Double] matInit2 <- M.replicateM 4 (randomRIO range) :: IO [Double] vecInit1 <- M.replicateM 2 (randomRIO range) :: IO [Double] vecInit2 <- M.replicateM 2 (randomRIO range) :: IO [Double] let n = read nStr :: Int mat' = (2 >< 2) $ L.zipWith (:+) matInit1 matInit2 vec' = NL.fromList $ L.zipWith (:+) vecInit1 vecInit2 (eigVal, eigVec) = eig mat' eigVecs = toColumns eigVec print mat' out <- iterateM n (\mat vec -> do let newVec = mat #> vec maxMag = L.maximum . L.map magnitude . NL.toList $ newVec (a:b:_) = NL.toList newVec phaseNorm x = let (m, p) = polar x in mkPolar 1 p norm = if magnitude a > magnitude b then a else b normalizedNewVec = newVec / (scalar norm) printf "Maximum magnitude: %.2f %s\n" maxMag (getListStr getComplexStr' . NL.toList $ normalizedNewVec) return normalizedNewVec) mat' vec' let s = sqrt . L.sum . L.map (\x -> (magnitude x) ^ 2) . NL.toList $ out printf "%s\n" (getListStr getComplexStr' . L.map (/ (s :+ 0)) . NL.toList $ out) M.zipWithM_ (\val vec -> printf "%.2f %s\n" (magnitude val) (getListStr getComplexStr' . NL.toList $ vec)) (NL.toList eigVal) eigVecs print eigVal print eigVecs
<gh_stars>1-10 #!/usr/bin/env python """ fix_vasp_wrapped_types.py Usage: fix_vasp_wrapped_types.py [filename] [filename] [...] Faps can create POSCAR files with more atom types on a line than the outputs can handle and they end up getting wrapped. Use this script to put the types back on the line so it can be read by other programs. If filenames are given, only those files will be fixed, otherwise the CONTCAR CHGCAR LOCPOT files are fixed. Options: -h, --help display help and exit """ import sys from faps import fix_vasp_wrapped_types def main(): """ Run the fixer. Read filenames from argv and run them through the function. """ if '-h' in sys.argv or '--help' in sys.argv: print(__doc__) raise SystemExit if len(sys.argv) > 1: filenames = sys.argv[1:] else: filenames = ["CONTCAR", "CHGCAR", "LOCPOT"] for filename in filenames: try: if fix_vasp_wrapped_types(filename): # Print message only when files have been fixed print("Fixed file {}".format(filename)) except IOError: print("Error reading file {}".format(filename)) except IndexError: print("Couldn't convert file {}".format(filename)) if __name__ == '__main__': main()
// Actual class for I3FrameSequence implementation class I3FrameSequenceImpl { public: I3FrameSequenceImpl(const I3FrameSequenceImpl&); explicit I3FrameSequenceImpl(size_t); explicit I3FrameSequenceImpl(const std::vector<std::string>&, size_t); ~I3FrameSequenceImpl(); void add_file(const std::string&); void close(); void close_last_file(); void rewind(); bool more(); I3FramePtr pop_frame(I3Frame::Stream = I3Frame::None); void seek(size_t); std::vector<I3FramePtr> get_mixed_frames(); std::vector<I3FramePtr> get_current_frame_and_deps(); std::vector<std::string> get_paths() const; ssize_t get_frameno() const; ssize_t get_size() const; size_t get_cur_size() const; I3Frame::Stream get_stream() const; private: FileGroup files_; FrameCache cache_; ThreadRunner<std::vector<I3FramePtr>> tr_; std::unordered_map<size_t,std::future<std::vector<I3FramePtr>>> tr_cache_; size_t frameno_; }
<filename>src/main/java/ScheduleReader.java import com.google.api.core.ApiFuture; import com.google.cloud.firestore.CollectionReference; import com.google.cloud.firestore.DocumentReference; import com.google.cloud.firestore.Firestore; import com.google.cloud.firestore.WriteResult; import org.apache.poi.openxml4j.exceptions.InvalidFormatException; import org.apache.poi.ss.usermodel.*; import javax.swing.*; import javax.swing.filechooser.FileSystemView; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.concurrent.ExecutionException; public class ScheduleReader { // public static final String SAMPLE_XLSX_FILE_PATH = "BHT.xlsx"; public ScheduleReader(Firestore db) throws IOException, InvalidFormatException, InterruptedException, ExecutionException { JFileChooser jfc = new JFileChooser(FileSystemView.getFileSystemView().getHomeDirectory()); int returnValue = jfc.showOpenDialog(null); // int returnValue = jfc.showSaveDialog(null); if (returnValue == JFileChooser.APPROVE_OPTION) { File selectedFile = jfc.getSelectedFile(); String filePath = selectedFile.getAbsolutePath().toString(); // System.out.println(selectedFile.getAbsolutePath()); // InputStream serviceAccount = new FileInputStream("lict-6c37e-firebase-adminsdk-y0zlq-50d30c1ade.json"); // GoogleCredentials credentials = GoogleCredentials.fromStream(serviceAccount); // // FirebaseOptions options = new FirebaseOptions.Builder() // .setCredentials(credentials) // .build(); // FirebaseApp.initializeApp(options); // // Firestore db = FirestoreClient.getFirestore(); // DatabaseReference ref = FirebaseDatabase.getInstance().getReference(); Workbook workbook = WorkbookFactory.create(new File(filePath).getAbsoluteFile()); System.out.println("Workbook has " + workbook.getNumberOfSheets() + " Sheets : "); Iterator<Sheet> sheetIterator = workbook.sheetIterator(); System.out.println("Retrieving Sheets using Iterator"); while (sheetIterator.hasNext()) { Sheet sheet = sheetIterator.next(); System.out.println("=> " + sheet.getSheetName()); } System.out.println("Retrieving Sheets using for-each loop"); for(Sheet sheet: workbook) { System.out.println("=> " + sheet.getSheetName()); } System.out.println("Retrieving Sheets using Java 8 forEach with lambda"); workbook.forEach(sheet -> { System.out.println("=> " + sheet.getSheetName()); }); /* ================================================================== Iterating over all the rows and columns in a Sheet (Multiple ways) ================================================================== */ // Getting the Sheet at index zero Sheet sheet = workbook.getSheetAt(0); // Create a DataFormatter to format and get each cell's value as String DataFormatter dataFormatter = new DataFormatter(); System.out.println("\n\nIterating over Rows and Columns using Iterator\n"); Iterator<Row> rowIterator = sheet.rowIterator(); int count =1; while (rowIterator.hasNext()) { Row row = rowIterator.next(); if(count != 1){ // count++; Iterator<Cell> cellIterator = row.cellIterator(); ArrayList<String> data = new ArrayList<>(); while (cellIterator.hasNext() ) { Cell cell = cellIterator.next(); int idx = cell.getColumnIndex(); String cellValue = dataFormatter.formatCellValue(cell); data.add(cellValue.toString()); } if(data.size() == 8){ System.out.println("asd" + data.size()); CollectionReference docRef = db.collection("batch_status"); System.out.println(docRef.getId()); // Add document data with id "alovelace" using a hashmap Map<String, Object> datas = new HashMap<>(); datas.put("batch_code", data.get(0)); datas.put("trainer_name", data.get(1)); datas.put("university_name", data.get(2)); datas.put("date", data.get(3)); datas.put("day", data.get(4)); String start, end; if(data.get(5) != ""){ start = data.get(5); start = new StringBuilder(start).insert(start.length()-2, ":").toString(); } else{ start = ""; } if(data.get(6) != ""){ end = data.get(6); end = new StringBuilder(end).insert(end.length()-2, ":").toString(); } else{ end = ""; } datas.put("start", start); datas.put("end", end); datas.put("status", data.get(7)); datas.put("attendance", ""); datas.put("intime", ""); datas.put("outtime", ""); //asynchronously write data ApiFuture<DocumentReference> result = docRef.add(datas); // ... // result.get() blocks on response // System.out.println("Update time : " + result.get().getUpdateTime()); data.clear(); } } count++; } Sheet sheet2 = workbook.getSheetAt(1); DataFormatter dataFormatter2 = new DataFormatter(); System.out.println("\n\nIterating over Rows and Columns using Iterator\n"); Iterator<Row> rowIterator2 = sheet2.rowIterator(); while (rowIterator2.hasNext()) { Row row = rowIterator2.next(); Iterator<Cell> cellIterator = row.cellIterator(); ArrayList<String> data = new ArrayList<>(); while (cellIterator.hasNext()){ Cell cell = cellIterator.next(); int idx = cell.getColumnIndex(); String cellValue = dataFormatter2.formatCellValue(cell); data.add(cellValue.toString()); } DocumentReference docRef3 = db.collection("trainer_details").document(data.get(1)); // Add document data with id "alovelace" using a hashmap Map<String, Object> datas = new HashMap<>(); datas.put("name", data.get(1)); datas.put("mobile", data.get(2)); datas.put("email", data.get(3)); //asynchronously write data ApiFuture<WriteResult> result3 = docRef3.set(datas); // result.get() blocks on response System.out.println("Update time : " + result3.get().getUpdateTime()); data.clear(); } // Getting the Sheet at index zero Sheet sheet3 = workbook.getSheetAt(2); // Create a DataFormatter to format and get each cell's value as String DataFormatter dataFormatter3 = new DataFormatter(); System.out.println("\n\nIterating over Rows and Columns using Iterator\n"); Iterator<Row> rowIterator3 = sheet3.rowIterator(); while (rowIterator3.hasNext()){ Row row = rowIterator3.next(); Iterator<Cell> cellIterator = row.cellIterator(); ArrayList<String> data = new ArrayList<>(); while (cellIterator.hasNext()) { Cell cell = cellIterator.next(); int idx = cell.getColumnIndex(); String cellValue = dataFormatter3.formatCellValue(cell); data.add(cellValue.toString()); } DocumentReference docRef2 = db.collection("university_details").document(data.get(0)); System.out.println("Enter Uni:"); Map<String, Object> datas2 = new HashMap<>(); datas2.put("address", data.get(2)); datas2.put("lat_long", data.get(3)); datas2.put("location", data.get(1)); datas2.put("university_name", data.get(0)); ApiFuture<WriteResult> result2 = docRef2.set(datas2); System.out.println("Update time : " + result2.get().getUpdateTime()); data.clear(); } } } }
from baggingrnet.model.baggingpre import ensPrediction inPath='/testpath/data/pm25_covs_test.csv' gindex='gindex' feasList = ['lat', 'lon', 'ele', 'prs', 'tem', 'rhu', 'win', 'pblh_re', 'pre_re', 'o3_re', 'aod', 'merra2_re', 'haod', 'shaod', 'jd','lat2','lon2','latlon'] target='pm25_avg_log' bagpath='/testpath/baggingrnet' prepath="/testpath/bagprediction1" mbagpre=ensPrediction(bagpath,prepath) mbagpre.getInputSample(inPath, feasList,gindex) mbagpre.startMProcess(10) mbagpre.aggPredict(isval=True,tfld='pm25_davg')
t = int(input()) for _ in range(t): n, k = [int(s) for s in input().split()] ans = 0 l1, r1 = [int(s) for s in input().split()] l2, r2 = [int(s) for s in input().split()] if r1 > r2: l1, l2, r1, r2 = l2, l1, r2, r1 initial_k = max(0, (r1 - max(l1, l2))) k -= initial_k * n if(k < 0): print(0) continue in_cost = max(0, max(l1, l2) - r1) one_cost = r2 - min(l1, l2) - initial_k ans = 2 * k + in_cost for i in range(1, n + 1): cand = in_cost * i if one_cost * i >= k: cand += k else: cand += 2 * k - one_cost * i ans = min(ans, cand) print(ans)
/** * A registry for binary types. Eventually will do some caching in the future... * * @author Jan Vrany * */ public static class TypeRegistry { protected Map<String, IBinaryType> typeMap = new HashMap<String, IBinaryType>(); public void cleanup() { typeMap.clear(); } public IBinaryType get(String name) { return typeMap.get(name); } public void put(String name, IBinaryType type) { typeMap.put(name,type); } public void put(String name, InputStream classfile) { try { put(name, ClassFileReader.read(classfile, (new String(name).replace('.', '/') + ".class"))); } catch (ClassFormatException e) { throw new RuntimeException(e); } catch (IOException e) { throw new RuntimeException(e); } } public void put(String name, byte[] classfile) { put(name, new ByteArrayInputStream(classfile)); } }
import os import logging from pathlib import Path from typing import Callable, Dict, List, Optional, Type, Union from ray.air import Checkpoint, CheckpointConfig from ray.air._internal.checkpoint_manager import CheckpointStorage from ray.air._internal.checkpoint_manager import ( _CheckpointManager as CommonCheckpointManager, ) from ray.air._internal.checkpoint_manager import _TrackedCheckpoint from ray.train._internal import session from ray.train._internal.session import TrainingResult from ray.train._internal.utils import construct_path from ray.train.constants import ( CHECKPOINT_RANK_KEY, TRAIN_CHECKPOINT_SUBDIR, TUNE_CHECKPOINT_ID, TUNE_INSTALLED, CHECKPOINT_METADATA_KEY, LAZY_CHECKPOINT_MARKER_FILE, ) from ray.air.constants import TIMESTAMP if TUNE_INSTALLED: from ray import tune else: tune = None logger = logging.getLogger(__name__) def load_checkpoint_from_path(checkpoint_to_load: Union[str, Path]) -> Checkpoint: """Utility function to load a checkpoint from a path.""" checkpoint_path = Path(checkpoint_to_load).expanduser() if not checkpoint_path.exists(): raise ValueError(f"Checkpoint path {checkpoint_path} does not exist.") checkpoint = Checkpoint.from_directory(str(checkpoint_path)) return checkpoint class CheckpointManager(CommonCheckpointManager): """Manages checkpoint processing, writing, and loading. - A ``checkpoints`` directory is created in the ``run_dir`` and contains all the checkpoint files. The full default path will be: ~/ray_results/train_<datestring>/run_<run_id>/checkpoints/ checkpoint_<checkpoint_id> Attributes: latest_checkpoint_dir: Path to the file directory for the checkpoints from the latest run. Configured through ``start_training``. latest_checkpoint_filename: Filename for the latest checkpoint. next_checkpoint_path: Path to the next checkpoint to persist from the latest run. best_checkpoint_path: Path to the best persisted checkpoint from the latest run. latest_checkpoint_id: The id of the most recently saved checkpoint. latest_checkpoint: The latest saved checkpoint. This checkpoint may not be saved to disk. """ _persist_memory_checkpoints = True def __init__( self, run_dir: Optional[Path] = None, checkpoint_strategy: Optional[CheckpointConfig] = None, ): self.run_dir = run_dir super().__init__(checkpoint_strategy=checkpoint_strategy) self._validate_checkpoint_strategy() def _validate_checkpoint_strategy(self): if self._checkpoint_strategy.checkpoint_score_attribute is None: self._checkpoint_strategy.checkpoint_score_attribute = TIMESTAMP def _load_checkpoint( self, checkpoint_to_load: Optional[Union[Dict, str, Path, Checkpoint]] ) -> Optional[Checkpoint]: """Load the checkpoint dictionary from the input dict or path.""" if checkpoint_to_load is None: return None if isinstance(checkpoint_to_load, Dict): return Checkpoint.from_dict(checkpoint_to_load) if isinstance(checkpoint_to_load, Checkpoint): return checkpoint_to_load else: # Load checkpoint from path. return load_checkpoint_from_path(checkpoint_to_load) def _process_checkpoint( self, checkpoint_result: TrainingResult, decode_checkpoint_fn: Callable, ) -> _TrackedCheckpoint: checkpoint_data = checkpoint_result.data checkpoint_metadata = checkpoint_result.metadata or {} checkpoint_rank = checkpoint_metadata.get(CHECKPOINT_RANK_KEY, 0) if isinstance(checkpoint_data, str): checkpoint_class: Type[Checkpoint] = checkpoint_metadata[ CHECKPOINT_METADATA_KEY ].checkpoint_type checkpoint_data = checkpoint_class.from_directory(checkpoint_data) checkpoint_data._metadata = checkpoint_metadata[CHECKPOINT_METADATA_KEY] else: # TODO(ml-team): Remove once we remove Backend.decode_data checkpoint_data = decode_checkpoint_fn(checkpoint_data) score_attr = self._checkpoint_strategy.checkpoint_score_attribute if ( self._checkpoint_strategy.num_to_keep != 0 and score_attr not in checkpoint_metadata ): raise ValueError( f"Unable to persist checkpoint for " f"checkpoint_score_attribute: " f"{score_attr}. " f"Include this attribute in the call to " f"`session.report()`." ) return _TrackedCheckpoint( dir_or_data=checkpoint_data, checkpoint_id=self._latest_checkpoint_id, storage_mode=CheckpointStorage.MEMORY, metrics={score_attr: checkpoint_metadata.get(score_attr, 0.0)}, rank=checkpoint_rank, ) def _process_checkpoints( self, checkpoint_results: List[TrainingResult], decode_checkpoint_fn: Callable, ) -> None: """Ray Train entrypoint. Perform all processing for a checkpoint.""" if self._checkpoint_strategy._checkpoint_keep_all_ranks: tracked_checkpoints = [ self._process_checkpoint(checkpoint_result, decode_checkpoint_fn) for checkpoint_result in checkpoint_results ] else: # Get checkpoint from first worker. tracked_checkpoints = [ self._process_checkpoint(checkpoint_results[0], decode_checkpoint_fn) ] self.register_checkpoints(checkpoints=tracked_checkpoints) def _get_next_checkpoint_path(self) -> Optional[Path]: """Path to the next checkpoint to persist.""" checkpoint_path = _construct_checkpoint_path_name( self._latest_checkpoint_id + 1 ) return self.latest_checkpoint_dir.joinpath(checkpoint_path) def on_start_training( self, checkpoint_strategy: Optional[CheckpointConfig], run_dir: Path, latest_checkpoint_id: Optional[int] = 0, ): checkpoint_strategy = checkpoint_strategy or CheckpointConfig() self._checkpoint_strategy = checkpoint_strategy self._validate_checkpoint_strategy() self.run_dir = run_dir self._latest_checkpoint_id = latest_checkpoint_id or 0 # Train-specific attributes @property def latest_checkpoint(self): if not self._latest_memory_checkpoint: return None return self._latest_memory_checkpoint.dir_or_data @property def latest_checkpoint_dir(self) -> Optional[Path]: """Path to the latest checkpoint directory.""" checkpoint_dir = Path(TRAIN_CHECKPOINT_SUBDIR) return construct_path(checkpoint_dir, self.run_dir) @property def latest_checkpoint_file_name(self) -> Optional[str]: """Filename to use for the latest checkpoint.""" if self._latest_checkpoint_id > 0: return _construct_checkpoint_path_name(self._latest_checkpoint_id) else: return None @property def next_checkpoint_path(self) -> Optional[Path]: """Path to the next checkpoint to persist.""" checkpoint_file = _construct_checkpoint_path_name( self._latest_checkpoint_id + 1 ) return self.latest_checkpoint_dir.joinpath(checkpoint_file) @property def best_checkpoint_path(self) -> Optional[Path]: """Path to the best persisted checkpoint.""" if self._best_persisted_checkpoint: return Path(self._best_persisted_checkpoint.dir_or_data) else: return None @property def latest_checkpoint_id(self) -> Optional[int]: """The checkpoint id of most recently saved checkpoint. If no checkpoint has been saved yet, then return None. """ checkpoint_id = self._latest_checkpoint_id if checkpoint_id == 0: return None else: return checkpoint_id class TuneCheckpointManager(CheckpointManager): def __init__( self, run_dir: Optional[Path] = None, checkpoint_strategy: Optional[CheckpointConfig] = None, ): super().__init__(run_dir, checkpoint_strategy) # Name of the marker dropped by the Trainable. If a worker detects # the presence of the marker in the trial dir, it will use lazy # checkpointing. self._lazy_marker_path = None if tune.is_session_enabled(): self._lazy_marker_path = ( Path(session.get_trial_dir()) / LAZY_CHECKPOINT_MARKER_FILE ) with open(self._lazy_marker_path, "w"): pass def _load_checkpoint( self, checkpoint_to_load: Optional[Union[Dict, str, Path, Checkpoint]] ) -> Optional[Union[Dict, Checkpoint]]: loaded_checkpoint = super()._load_checkpoint(checkpoint_to_load) assert not loaded_checkpoint or isinstance(loaded_checkpoint, Checkpoint) # `latest_checkpoint_id` will be the id assigned to the next checkpoint, # which should be one more than the loaded checkpoint's id # If no checkpoint is loaded, initialize this to 0 self._latest_checkpoint_id = ( getattr(loaded_checkpoint, TUNE_CHECKPOINT_ID, -1) + 1 ) return loaded_checkpoint def add_tune_checkpoint_id(self, checkpoint: Checkpoint): # Store the checkpoint_id in the file so that the Tune trial can be # resumed after failure or cancellation. setattr(checkpoint, TUNE_CHECKPOINT_ID, self._latest_checkpoint_id) def _process_persistent_checkpoint(self, checkpoint: _TrackedCheckpoint): self.add_tune_checkpoint_id(checkpoint.dir_or_data) # Train may choose not to commit a checkpoint, but make sure the # checkpoint is always committed for Tuning purpose. # After this is committed, checkpoint.dir_or_path will become a string, # which will prevent this checkpoint from being commtted again in the # subsequent super()._process_persistent_checkpoint() call. with tune.checkpoint_dir(step=self._latest_checkpoint_id) as checkpoint_dir: path = Path(checkpoint_dir) checkpoint.commit(path) return super()._process_persistent_checkpoint(checkpoint) @property def latest_checkpoint_dir(self) -> Optional[Path]: raise NotImplementedError @property def next_checkpoint_path(self) -> Optional[Path]: return None def _get_next_checkpoint_path(self) -> Optional[Path]: return None def __del__(self): try: assert self._lazy_marker_path os.remove(str(self._lazy_marker_path)) except Exception: pass return super().__del__() def _construct_checkpoint_path_name(checkpoint_id: int) -> str: return f"checkpoint_{checkpoint_id:06d}"
package usereps //go:generate go install github.com/valyala/quicktemplate/qtc //go:generate qtc -file=usereps.sql -skipLineComments import ( "bytes" "io/ioutil" "math" "net/http" "regexp" "strings" "time" "image" _ "image/gif" _ "image/jpeg" "image/png" "github.com/0xor1/sqlx" . "github.com/0xor1/tlbx/pkg/core" "github.com/0xor1/tlbx/pkg/crypt" "github.com/0xor1/tlbx/pkg/json" "github.com/0xor1/tlbx/pkg/ptr" "github.com/0xor1/tlbx/pkg/sqlh" "github.com/0xor1/tlbx/pkg/store" "github.com/0xor1/tlbx/pkg/web/app" "github.com/0xor1/tlbx/pkg/web/app/service" "github.com/0xor1/tlbx/pkg/web/app/service/sql" "github.com/0xor1/tlbx/pkg/web/app/session/me" "github.com/0xor1/tlbx/pkg/web/app/user" "github.com/0xor1/tlbx/pkg/web/app/validate" "github.com/disintegration/imaging" "github.com/go-sql-driver/mysql" ) const ( AvatarBucket = "avatars" AvatarPrefix = "" ) var NopOnSetSocials = func(_ app.Tlbx, _ *user.User) {} func New( fromEmail string, activateFmtLink, loginLinkFmtLink, confirmChangeEmailFmtLink string, onDelete func(app.Tlbx, ID), onSetSocials func(app.Tlbx, *user.User), validateFcmTopic func(app.Tlbx, IDs) (sql.Tx, error), enableJin bool, ) []*app.Endpoint { enableSocials := onSetSocials != nil enableFCM := validateFcmTopic != nil eps := []*app.Endpoint{ { Description: "register a new account (requires email link)", Path: (&user.Register{}).Path(), Timeout: 1000, MaxBodyBytes: app.KB, IsPrivate: false, GetDefaultArgs: func() interface{} { d := &user.Register{} if enableSocials { d.Handle = ptr.String("") d.Alias = ptr.String("") } return d }, GetExampleArgs: func() interface{} { ex := &user.Register{ Email: "<EMAIL>", Pwd: "<PASSWORD>", } if enableSocials { ex.Handle = ptr.String("bloe_joggs") ex.Alias = ptr.String("<NAME>") } return ex }, GetExampleResponse: func() interface{} { return nil }, Handler: func(tlbx app.Tlbx, a interface{}) interface{} { app.BadReqIf(me.AuthedExists(tlbx), "already logged in") args := a.(*user.Register) args.Email = StrTrimWS(args.Email) validate.Str("email", args.Email, 0, emailMaxLen, emailRegex) if !enableSocials { args.Handle = nil args.Alias = nil } app.BadReqIf(enableSocials && args.Handle == nil, "social system requires a user handle") if args.Handle != nil { args.Handle = ptr.String( strings.ReplaceAll( StrLower( StrTrimWS(*args.Handle)), " ", "_")) validate.Str("handle", *args.Handle, handleMinLen, handleMaxLen, handleRegex) } if args.Alias != nil { args.Alias = ptr.String(StrTrimWS(*args.Alias)) validate.Str("alias", *args.Alias, 0, aliasMaxLen) } activateCode := crypt.UrlSafeString(250) id := me.Get(tlbx).ID() srv := service.Get(tlbx) var hasAvatar *bool if enableSocials { hasAvatar = ptr.Bool(false) } var fcmEnabled *bool if enableFCM { fcmEnabled = ptr.Bool(false) } usrtx := srv.User().BeginWrite() defer usrtx.Rollback() _, err := usrtx.Exec(qryUserInsert(), id, args.Email, args.Handle, args.Alias, hasAvatar, fcmEnabled, Now(), time.Time{}, activateCode) if err != nil { mySqlErr, ok := err.(*mysql.MySQLError) app.BadReqIf(ok && mySqlErr.Number == 1062, "email or handle already registered") PanicOn(err) } pwdtx := srv.Pwd().BeginWrite() defer pwdtx.Rollback() setPwd(tlbx, pwdtx, id, args.Pwd) sendActivateEmail(srv, args.Email, fromEmail, Strf(activateFmtLink, id, activateCode), args.Handle) usrtx.Commit() pwdtx.Commit() return nil }, }, { Description: "resend activate link", Path: (&user.ResendActivateLink{}).Path(), Timeout: 500, MaxBodyBytes: app.KB, IsPrivate: false, GetDefaultArgs: func() interface{} { return &user.ResendActivateLink{} }, GetExampleArgs: func() interface{} { return &user.ResendActivateLink{ Email: "<EMAIL>", } }, GetExampleResponse: func() interface{} { return nil }, Handler: func(tlbx app.Tlbx, a interface{}) interface{} { args := a.(*user.ResendActivateLink) srv := service.Get(tlbx) tx := srv.User().BeginRead() defer tx.Rollback() fullUser := getUser(tx, &args.Email, nil) tx.Commit() if fullUser == nil || fullUser.ActivateCode == nil { return nil } sendActivateEmail(srv, args.Email, fromEmail, Strf(activateFmtLink, fullUser.ID, *fullUser.ActivateCode), fullUser.Handle) return nil }, }, { Description: "activate a new account", Path: (&user.Activate{}).Path(), Timeout: 500, MaxBodyBytes: app.KB, IsPrivate: false, GetDefaultArgs: func() interface{} { return &user.Activate{} }, GetExampleArgs: func() interface{} { return &user.Activate{ Me: app.ExampleID(), Code: "123abc", } }, GetExampleResponse: func() interface{} { return nil }, Handler: func(tlbx app.Tlbx, a interface{}) interface{} { args := a.(*user.Activate) srv := service.Get(tlbx) tx := srv.User().BeginWrite() defer tx.Rollback() user := getUser(tx, nil, &args.Me) app.BadReqIf(*user.ActivateCode != args.Code, "") now := Now() user.ActivatedOn = now user.ActivateCode = nil updateUser(tx, user) tx.MustExec(qryJinDelete(), user.ID) tx.Commit() return nil }, }, { Description: "change email address (requires email link)", Path: (&user.ChangeEmail{}).Path(), Timeout: 500, MaxBodyBytes: app.KB, IsPrivate: false, GetDefaultArgs: func() interface{} { return &user.ChangeEmail{} }, GetExampleArgs: func() interface{} { return &user.ChangeEmail{ NewEmail: "<EMAIL>", } }, GetExampleResponse: func() interface{} { return nil }, Handler: func(tlbx app.Tlbx, a interface{}) interface{} { args := a.(*user.ChangeEmail) args.NewEmail = StrTrimWS(args.NewEmail) validate.Str("email", args.NewEmail, 0, emailMaxLen, emailRegex) srv := service.Get(tlbx) me := me.AuthedGet(tlbx) changeEmailCode := crypt.UrlSafeString(250) tx := srv.User().BeginWrite() defer tx.Rollback() existingUser := getUser(tx, &args.NewEmail, nil) app.BadReqIf(existingUser != nil, "email already registered") fullUser := getUser(tx, nil, &me) fullUser.NewEmail = &args.NewEmail fullUser.ChangeEmailCode = &changeEmailCode updateUser(tx, fullUser) tx.Commit() sendConfirmChangeEmailEmail(srv, args.NewEmail, fromEmail, Strf(confirmChangeEmailFmtLink, me, changeEmailCode)) return nil }, }, { Description: "resend change email link", Path: (&user.ResendChangeEmailLink{}).Path(), Timeout: 500, MaxBodyBytes: app.KB, IsPrivate: false, GetDefaultArgs: func() interface{} { return nil }, GetExampleArgs: func() interface{} { return nil }, GetExampleResponse: func() interface{} { return nil }, Handler: func(tlbx app.Tlbx, _ interface{}) interface{} { srv := service.Get(tlbx) me := me.AuthedGet(tlbx) tx := srv.User().BeginRead() defer tx.Rollback() fullUser := getUser(tx, nil, &me) tx.Commit() sendConfirmChangeEmailEmail(srv, *fullUser.NewEmail, fromEmail, Strf(confirmChangeEmailFmtLink, me, *fullUser.ChangeEmailCode)) return nil }, }, { Description: "confirm change email", Path: (&user.ConfirmChangeEmail{}).Path(), Timeout: 500, MaxBodyBytes: app.KB, IsPrivate: false, GetDefaultArgs: func() interface{} { return &user.ConfirmChangeEmail{} }, GetExampleArgs: func() interface{} { return &user.ConfirmChangeEmail{ Me: app.ExampleID(), Code: "123abc", } }, GetExampleResponse: func() interface{} { return nil }, Handler: func(tlbx app.Tlbx, a interface{}) interface{} { args := a.(*user.ConfirmChangeEmail) srv := service.Get(tlbx) tx := srv.User().BeginWrite() defer tx.Rollback() user := getUser(tx, nil, &args.Me) app.BadReqIf(*user.ChangeEmailCode != args.Code, "") user.ChangeEmailCode = nil user.Email = *user.NewEmail user.NewEmail = nil updateUser(tx, user) tx.Commit() return nil }, }, { Description: "reset password (requires email link)", Path: (&user.ResetPwd{}).Path(), Timeout: 1000, MaxBodyBytes: app.KB, IsPrivate: false, GetDefaultArgs: func() interface{} { return &user.ResetPwd{} }, GetExampleArgs: func() interface{} { return &user.ResetPwd{ Email: "<EMAIL>", } }, GetExampleResponse: func() interface{} { return nil }, Handler: func(tlbx app.Tlbx, a interface{}) interface{} { args := a.(*user.ResetPwd) srv := service.Get(tlbx) tx := srv.User().BeginWrite() defer tx.Rollback() user := getUser(tx, &args.Email, nil) if user != nil { now := Now() if user.LastPwdResetOn != nil { mustWaitDur := (10 * time.Minute) - Now().Sub(*user.LastPwdResetOn) app.BadReqIf(mustWaitDur > 0, "must wait %d seconds before reseting pwd again", int64(math.Ceil(mustWaitDur.Seconds()))) } user.LastPwdResetOn = &now updateUser(tx, user) pwdtx := srv.Pwd().BeginWrite() defer pwdtx.Rollback() newPwd := <PASSWORD>` + crypt.UrlSafeString(12) setPwd(tlbx, pwdtx, user.ID, newPwd) sendResetPwdEmail(srv, args.Email, fromEmail, newPwd) pwdtx.Commit() } tx.Commit() return nil }, }, { Description: "set password", Path: (&user.SetPwd{}).Path(), Timeout: 1000, MaxBodyBytes: app.KB, IsPrivate: false, GetDefaultArgs: func() interface{} { return &user.SetPwd{} }, GetExampleArgs: func() interface{} { return &user.SetPwd{ OldPwd: "<PASSWORD>", NewPwd: "<PASSWORD>", } }, GetExampleResponse: func() interface{} { return nil }, Handler: func(tlbx app.Tlbx, a interface{}) interface{} { args := a.(*user.SetPwd) srv := service.Get(tlbx) me := me.AuthedGet(tlbx) pwdtx := srv.Pwd().BeginWrite() defer pwdtx.Rollback() pwd := getPwd(pwdtx, me) app.BadReqIf(!bytes.Equal(crypt.ScryptKey([]byte(args.OldPwd), pwd.Salt, pwd.N, pwd.R, pwd.P, scryptKeyLen), pwd.Pwd), "current pwd does not match") setPwd(tlbx, pwdtx, me, args.NewPwd) pwdtx.Commit() return nil }, }, { Description: "delete account", Path: (&user.Delete{}).Path(), Timeout: 1000, MaxBodyBytes: app.KB, IsPrivate: false, GetDefaultArgs: func() interface{} { return &user.Delete{} }, GetExampleArgs: func() interface{} { return &user.Delete{ Pwd: "<PASSWORD>", } }, GetExampleResponse: func() interface{} { return nil }, Handler: func(tlbx app.Tlbx, a interface{}) interface{} { args := a.(*user.Delete) srv := service.Get(tlbx) m := me.AuthedGet(tlbx) pwdtx := srv.Pwd().BeginWrite() defer pwdtx.Rollback() pwd := getPwd(pwdtx, m) app.BadReqIf(!bytes.Equal(pwd.Pwd, crypt.ScryptKey([]byte(args.Pwd), pwd.Salt, pwd.N, pwd.R, pwd.P, scryptKeyLen)), "incorrect pwd") tx := srv.User().BeginWrite() defer tx.Rollback() // jin and fcm tokens tables are cleared by foreign key cascade tx.MustExec(qryUserDelete(), m) pwdtx.MustExec(qryPwdDelete(), m) if onDelete != nil { onDelete(tlbx, m) } me.Del(tlbx) tx.Commit() pwdtx.Commit() return nil }, }, { Description: "login", Path: (&user.Login{}).Path(), Timeout: 1000, MaxBodyBytes: app.KB, IsPrivate: false, GetDefaultArgs: func() interface{} { return &user.Login{} }, GetExampleArgs: func() interface{} { return &user.Login{ Email: "<EMAIL>", Pwd: "<PASSWORD>", } }, GetExampleResponse: func() interface{} { ex := &user.Me{} ex.ID = app.ExampleID() if enableSocials { ex.Handle = ptr.String("bloe_joggs") ex.Alias = ptr.String("<NAME>") ex.HasAvatar = ptr.Bool(true) } if enableFCM { ex.FcmEnabled = ptr.Bool(true) } return ex }, Handler: func(tlbx app.Tlbx, a interface{}) interface{} { emailOrPwdMismatch := func(condition bool) { app.ReturnIf(condition, http.StatusNotFound, "email and/or pwd are not valid") } args := a.(*user.Login) validate.Str("email", args.Email, 0, emailMaxLen, emailRegex) validate.Str("pwd", args.Pwd, pwdMinLen, pwdMaxLen, pwdRegexs...) srv := service.Get(tlbx) tx := srv.User().BeginRead() defer tx.Rollback() user := getUser(tx, &args.Email, nil) emailOrPwdMismatch(user == nil) pwdtx := srv.Pwd().BeginWrite() defer pwdtx.Rollback() pwd := getPwd(pwdtx, user.ID) emailOrPwdMismatch(!bytes.Equal(pwd.Pwd, crypt.ScryptKey([]byte(args.Pwd), pwd.Salt, pwd.N, pwd.R, pwd.P, scryptKeyLen))) // if encryption params have changed re encrypt on successful login if len(pwd.Salt) != scryptSaltLen || len(pwd.Pwd) != scryptKeyLen || pwd.N != scryptN || pwd.R != scryptR || pwd.P != scryptP { setPwd(tlbx, pwdtx, user.ID, args.Pwd) } tx.Commit() pwdtx.Commit() me.AuthedSet(tlbx, user.ID) return &user.Me }, }, { Description: "send login link email", Path: (&user.SendLoginLinkEmail{}).Path(), Timeout: 1000, MaxBodyBytes: app.KB, IsPrivate: false, GetDefaultArgs: func() interface{} { return &user.SendLoginLinkEmail{} }, GetExampleArgs: func() interface{} { return &user.SendLoginLinkEmail{ Email: "<EMAIL>", } }, GetExampleResponse: func() interface{} { return nil }, Handler: func(tlbx app.Tlbx, a interface{}) interface{} { args := a.(*user.SendLoginLinkEmail) validate.Str("email", args.Email, 0, emailMaxLen, emailRegex) srv := service.Get(tlbx) tx := srv.User().BeginWrite() defer tx.Rollback() user := getUser(tx, &args.Email, nil) app.BadReqIf(user == nil, "unknown email") app.BadReqIf(user.LoginLinkCodeCreatedOn != nil && user.LoginLinkCodeCreatedOn.After(Now().Add(-8*time.Minute)), "An unused login link code still exists") user.LoginLinkCodeCreatedOn = ptr.Time(NowMilli()) user.LoginLinkCode = ptr.String(crypt.UrlSafeString(250)) updateUser(tx, user) sendLoginLinkEmail(srv, user.Email, fromEmail, Strf(loginLinkFmtLink, user.ID, *user.LoginLinkCode), user.Handle) tx.Commit() return nil }, }, { Description: "login link login", Path: (&user.LoginLinkLogin{}).Path(), Timeout: 1000, MaxBodyBytes: app.KB, IsPrivate: false, GetDefaultArgs: func() interface{} { return &user.LoginLinkLogin{} }, GetExampleArgs: func() interface{} { return &user.LoginLinkLogin{ Me: app.ExampleID(), Code: "123abc", } }, GetExampleResponse: func() interface{} { ex := &user.Me{} ex.ID = app.ExampleID() if enableSocials { ex.Handle = ptr.String("bloe_joggs") ex.Alias = ptr.String("<NAME>") ex.HasAvatar = ptr.Bool(true) } if enableFCM { ex.FcmEnabled = ptr.Bool(true) } return ex }, Handler: func(tlbx app.Tlbx, a interface{}) interface{} { args := a.(*user.LoginLinkLogin) srv := service.Get(tlbx) tx := srv.User().BeginWrite() defer tx.Rollback() user := getUser(tx, nil, &args.Me) app.BadReqIf(user == nil, "unknown user") app.BadReqIf(user.LoginLinkCodeCreatedOn == nil || user.LoginLinkCodeCreatedOn.Before(Now().Add(-10*time.Minute)) || *user.LoginLinkCode != args.Code, "login code invalid (only valid for 10 minutes from time of creation)") user.LoginLinkCodeCreatedOn = nil user.LoginLinkCode = nil updateUser(tx, user) tx.Commit() me.AuthedSet(tlbx, user.ID) return &user.Me }, }, { Description: "logout", Path: (&user.Logout{}).Path(), Timeout: 500, MaxBodyBytes: app.KB, IsPrivate: false, GetDefaultArgs: func() interface{} { return nil }, GetExampleArgs: func() interface{} { return nil }, GetExampleResponse: func() interface{} { return nil }, Handler: func(tlbx app.Tlbx, _ interface{}) interface{} { if me.AuthedExists(tlbx) { m := me.AuthedGet(tlbx) srv := service.Get(tlbx) tokens := make([]string, 0, 5) tx := srv.User().BeginWrite() defer tx.Rollback() tx.MustGetN(&tokens, qryDistinctFCMTokens(), m) tx.MustExec(qryFCMTokensDelete(), m) srv.FCM().RawAsyncSend("logout", tokens, map[string]string{}, 0) tx.Commit() me.Del(tlbx) } return nil }, }, { Description: "get me", Path: (&user.GetMe{}).Path(), Timeout: 500, MaxBodyBytes: app.KB, IsPrivate: false, GetDefaultArgs: func() interface{} { return nil }, GetExampleArgs: func() interface{} { return nil }, GetExampleResponse: func() interface{} { ex := &user.Me{} ex.ID = app.ExampleID() if enableSocials { ex.Handle = ptr.String("bloe_joggs") ex.Alias = ptr.String("<NAME>") ex.HasAvatar = ptr.Bool(true) } if enableFCM { ex.FcmEnabled = ptr.Bool(true) } return ex }, Handler: func(tlbx app.Tlbx, _ interface{}) interface{} { if !me.AuthedExists(tlbx) { return nil } me := me.AuthedGet(tlbx) tx := service.Get(tlbx).User().BeginRead() defer tx.Rollback() user := getUser(tx, nil, &me) tx.Commit() return &user.Me }, }, } if enableJin { eps = append(eps, &app.Endpoint{ Description: "set users jin (json bin), adhoc json content", Path: (&user.SetJin{}).Path(), Timeout: 500, MaxBodyBytes: 10 * app.KB, IsPrivate: false, GetDefaultArgs: func() interface{} { return &user.SetJin{} }, GetExampleArgs: func() interface{} { return &user.SetJin{ Val: exampleJin, } }, GetExampleResponse: func() interface{} { return nil }, Handler: func(tlbx app.Tlbx, a interface{}) interface{} { args := a.(*user.SetJin) me := me.AuthedGet(tlbx) srv := service.Get(tlbx) if args.Val == nil { srv.User().MustExec(qryJinDelete(), me) } else { // if app requires init ctx data store it in jin srv.User().MustExec(qryJinInsert(), me, args.Val) } return nil }, }, &app.Endpoint{ Description: "get users jin (json bin), adhoc json content", Path: (&user.GetJin{}).Path(), Timeout: 500, MaxBodyBytes: app.KB, IsPrivate: false, GetDefaultArgs: func() interface{} { return nil }, GetExampleArgs: func() interface{} { return nil }, GetExampleResponse: func() interface{} { return exampleJin }, Handler: func(tlbx app.Tlbx, _ interface{}) interface{} { me := me.AuthedGet(tlbx) srv := service.Get(tlbx) res := &json.Json{} tx := srv.User().BeginRead() defer tx.Rollback() getJin(tx, me, res) tx.Commit() return res }, }) } if enableSocials { eps = append(eps, &app.Endpoint{ Description: "get users", Path: (&user.Get{}).Path(), Timeout: 500, MaxBodyBytes: app.KB, IsPrivate: false, GetDefaultArgs: func() interface{} { return &user.Get{} }, GetExampleArgs: func() interface{} { return &user.Get{ Users: []ID{app.ExampleID()}, } }, GetExampleResponse: func() interface{} { return []user.User{ { ID: app.ExampleID(), Handle: ptr.String("bloe_joggs"), Alias: ptr.String("<NAME>"), HasAvatar: ptr.Bool(true), }, } }, Handler: func(tlbx app.Tlbx, a interface{}) interface{} { args := a.(*user.Get) if len(args.Users) == 0 { return nil } validate.MaxIDs("users", args.Users, 1000) srv := service.Get(tlbx) res := make([]*user.User, 0, len(args.Users)) srv.User().MustQuery(func(r *sqlx.Rows) { for r.Next() { u := &user.User{} PanicOn(r.Scan(&u.ID, &u.Handle, &u.Alias, &u.HasAvatar)) res = append(res, u) } }, qryUsersGet(len(args.Users)), args.Users.ToIs()...) return res }, }, &app.Endpoint{ Description: "set handle", Path: (&user.SetHandle{}).Path(), Timeout: 500, MaxBodyBytes: app.KB, IsPrivate: false, GetDefaultArgs: func() interface{} { return &user.SetHandle{} }, GetExampleArgs: func() interface{} { return &user.SetHandle{ Handle: "joe_bloggs", } }, GetExampleResponse: func() interface{} { return nil }, Handler: func(tlbx app.Tlbx, a interface{}) interface{} { args := a.(*user.SetHandle) validate.Str("handle", args.Handle, handleMinLen, handleMaxLen, handleRegex) srv := service.Get(tlbx) me := me.AuthedGet(tlbx) tx := srv.User().BeginWrite() defer tx.Rollback() user := getUser(tx, nil, &me) user.Handle = &args.Handle updateUser(tx, user) if onSetSocials != nil { onSetSocials(tlbx, &user.User) } tx.Commit() return nil }, }, &app.Endpoint{ Description: "set alias", Path: (&user.SetAlias{}).Path(), Timeout: 500, MaxBodyBytes: app.KB, IsPrivate: false, GetDefaultArgs: func() interface{} { return &user.SetAlias{} }, GetExampleArgs: func() interface{} { return &user.SetAlias{ Alias: ptr.String("<NAME>"), } }, GetExampleResponse: func() interface{} { return nil }, Handler: func(tlbx app.Tlbx, a interface{}) interface{} { args := a.(*user.SetAlias) if args.Alias != nil { validate.Str("alias", *args.Alias, 0, aliasMaxLen) } srv := service.Get(tlbx) me := me.AuthedGet(tlbx) tx := srv.User().BeginWrite() defer tx.Rollback() user := getUser(tx, nil, &me) user.Alias = args.Alias updateUser(tx, user) if onSetSocials != nil { onSetSocials(tlbx, &user.User) } tx.Commit() return nil }, }, &app.Endpoint{ Description: "set avatar", Path: (&user.SetAvatar{}).Path(), Timeout: 500, MaxBodyBytes: app.MB, IsPrivate: false, GetDefaultArgs: func() interface{} { return &app.UpStream{} }, GetExampleArgs: func() interface{} { return &app.UpStream{} }, GetExampleResponse: func() interface{} { return nil }, Handler: func(tlbx app.Tlbx, a interface{}) interface{} { args := a.(*app.UpStream) defer args.Content.Close() me := me.AuthedGet(tlbx) srv := service.Get(tlbx) tx := srv.User().BeginWrite() defer tx.Rollback() user := getUser(tx, nil, &me) content, err := ioutil.ReadAll(args.Content) PanicOn(err) args.Size = int64(len(content)) if args.Size > 0 { if *user.HasAvatar { srv.Store().MustDelete(AvatarBucket, store.GenKey(AvatarPrefix, me)) } avatar, _, err := image.Decode(bytes.NewBuffer(content)) PanicOn(err) bounds := avatar.Bounds() xDiff := bounds.Max.X - bounds.Min.X yDiff := bounds.Max.Y - bounds.Min.Y if xDiff != yDiff || xDiff != avatarDim || yDiff != avatarDim { avatar = imaging.Fill(avatar, avatarDim, avatarDim, imaging.Center, imaging.Lanczos) } buff := &bytes.Buffer{} PanicOn(png.Encode(buff, avatar)) srv.Store().MustPut( AvatarBucket, store.GenKey(AvatarPrefix, me), args.Name, "image/png", int64(buff.Len()), true, false, bytes.NewReader(buff.Bytes())) } else if *user.HasAvatar == true { srv.Store().MustDelete(AvatarBucket, store.GenKey(AvatarPrefix, me)) } nowHasAvatar := args.Size > 0 if *user.HasAvatar != nowHasAvatar { user.HasAvatar = ptr.Bool(nowHasAvatar) if onSetSocials != nil { onSetSocials(tlbx, &user.User) } } updateUser(tx, user) tx.Commit() return nil }, }, &app.Endpoint{ Description: "get avatar", Path: (&user.GetAvatar{}).Path(), Timeout: 500, MaxBodyBytes: app.KB, SkipXClientCheck: true, IsPrivate: false, GetDefaultArgs: func() interface{} { return &user.GetAvatar{} }, GetExampleArgs: func() interface{} { return &user.GetAvatar{ User: app.ExampleID(), } }, GetExampleResponse: func() interface{} { return &app.DownStream{} }, Handler: func(tlbx app.Tlbx, a interface{}) interface{} { args := a.(*user.GetAvatar) srv := service.Get(tlbx) name, mimeType, size, content := srv.Store().MustGet(AvatarBucket, store.GenKey(AvatarPrefix, args.User)) ds := &app.DownStream{} ds.ID = args.User ds.Name = name ds.Type = mimeType ds.Size = size ds.Content = content return ds }, }) } if validateFcmTopic != nil { eps = append(eps, &app.Endpoint{ Description: "set fcm enabled", Path: (&user.SetFCMEnabled{}).Path(), Timeout: 500, MaxBodyBytes: app.KB, IsPrivate: false, GetDefaultArgs: func() interface{} { return &user.SetFCMEnabled{ Val: true, } }, GetExampleArgs: func() interface{} { return &user.SetFCMEnabled{ Val: true, } }, GetExampleResponse: func() interface{} { return nil }, Handler: func(tlbx app.Tlbx, a interface{}) interface{} { args := a.(*user.SetFCMEnabled) me := me.AuthedGet(tlbx) tx := service.Get(tlbx).User().BeginWrite() defer tx.Rollback() u := getUser(tx, nil, &me) if *u.FcmEnabled == args.Val { // not changing anything return nil } u.FcmEnabled = &args.Val updateUser(tx, u) tokens := make([]string, 0, 5) tx.MustGetN(&tokens, qryDistinctFCMTokens(), me) tx.Commit() if len(tokens) == 0 { // no tokens to notify return nil } fcmType := "enabled" if !args.Val { fcmType = "disabled" } service.Get(tlbx).FCM().RawAsyncSend(fcmType, tokens, map[string]string{}, 0) return nil }, }, &app.Endpoint{ Description: "register for fcm", Path: (&user.RegisterForFCM{}).Path(), Timeout: 500, MaxBodyBytes: app.KB, IsPrivate: false, GetDefaultArgs: func() interface{} { return &user.RegisterForFCM{} }, GetExampleArgs: func() interface{} { return &user.RegisterForFCM{ Topic: IDs{app.ExampleID()}, Client: ptr.ID(app.ExampleID()), Token: "<PASSWORD>", } }, GetExampleResponse: func() interface{} { return app.ExampleID() }, Handler: func(tlbx app.Tlbx, a interface{}) interface{} { args := a.(*user.RegisterForFCM) app.BadReqIf(len(args.Topic) == 0 || len(args.Topic) > 5, "topic must contain 1 to 5 ids") app.BadReqIf(args.Token == "", "empty string is not a valid fcm token") client := args.Client if client == nil { client = ptr.ID(tlbx.NewID()) } me := me.AuthedGet(tlbx) tx := service.Get(tlbx).User().BeginWrite() defer tx.Rollback() u := getUser(tx, nil, &me) app.BadReqIf(u.FcmEnabled == nil || !*u.FcmEnabled, "fcm not enabled for user, please enable first then register for topics") // this query is used to get a users 5th token createdOn value if they have one row := tx.QueryRow(qryFifthOldestTokenCreatedOn(), me) fifthYoungestTokenCreatedOn := time.Time{} sqlh.PanicIfIsntNoRows(row.Scan(&fifthYoungestTokenCreatedOn)) if !fifthYoungestTokenCreatedOn.IsZero() { // this user has 5 topics they're subscribed too already so delete the older ones // to make room for this new one tx.MustExec(qryFCMTokensDeleteOldest(), me, fifthYoungestTokenCreatedOn) } appTx, err := validateFcmTopic(tlbx, args.Topic) if appTx != nil { defer appTx.Rollback() } PanicOn(err) tx.MustExec(qryFCMTokenInsert(), args.Topic.StrJoin("_"), args.Token, me, client, tlbx.Start()) tx.Commit() if appTx != nil { appTx.Commit() } return client }, }, &app.Endpoint{ Description: "unregister from fcm", SkipXClientCheck: true, Path: (&user.UnregisterFromFCM{}).Path(), Timeout: 500, MaxBodyBytes: app.KB, IsPrivate: false, GetDefaultArgs: func() interface{} { return &user.UnregisterFromFCM{} }, GetExampleArgs: func() interface{} { return &user.UnregisterFromFCM{ Client: app.ExampleID(), } }, GetExampleResponse: func() interface{} { return nil }, Handler: func(tlbx app.Tlbx, a interface{}) interface{} { args := a.(*user.UnregisterFromFCM) me := me.AuthedGet(tlbx) service.Get(tlbx).User().MustExec(qryFCMUnregister(), me, args.Client) return nil }, }) } return eps } var ( handleRegex = regexp.MustCompile(`\A[_a-z0-9]{1,20}\z`) handleMinLen = 1 handleMaxLen = 20 emailRegex = regexp.MustCompile(`\A.+@.+\..+\z`) emailMaxLen = 250 aliasMaxLen = 50 pwdRegexs = []*regexp.Regexp{ regexp.MustCompile(`[0-9]`), regexp.MustCompile(`[a-z]`), regexp.MustCompile(`[A-Z]`), regexp.MustCompile(`[\w]`), } pwdMinLen = 8 pwdMaxLen = 100 scryptN = 32768 scryptR = 8 scryptP = 1 scryptSaltLen = 256 scryptKeyLen = 256 avatarDim = 250 exampleJin = json.MustFromString(`{"v":1, "saveDir":"/my/save/dir", "startTab":"favourites"}`) ) func sendActivateEmail(srv service.Layer, sendTo, from string, link string, handle *string) { html := `<p>Thank you for registering.</p><p>Click this link to activate your account:</p><p><a href="` + link + `">Activate</a></p><p>If you didn't register for this account you can simply ignore this email.</p>` txt := "Thank you for registering.\nClick this link to activate your account:\n\n" + link + "\n\nIf you didn't register for this account you can simply ignore this email." if handle != nil { html = Strf("Hi %s,\n\n%s", *handle, html) txt = Strf("Hi %s,\n\n%s", *handle, txt) } srv.Email().MustSend([]string{sendTo}, from, "Activate", html, txt) } func sendLoginLinkEmail(srv service.Layer, sendTo, from string, link string, handle *string) { html := `<p>Here is the login link you requested.</p><p>Click this link to login to your account:</p><p><a href="` + link + `">Login</a></p><p>This link will only be valid for 10 minutes.</p><p>If you didn't request this link you can simply ignore this email.</p>` txt := "Here is the login link you requested.\nClick this link to login to your account:\n\n" + link + "\n\nThis link will only be valid for 10 minutes.\n\nIf you didn't request this link you can simply ignore this email." if handle != nil { html = Strf("Hi %s,\n\n%s", *handle, html) txt = Strf("Hi %s,\n\n%s", *handle, txt) } srv.Email().MustSend([]string{sendTo}, from, "Login Link", html, txt) } func sendConfirmChangeEmailEmail(srv service.Layer, sendTo, from string, link string) { srv.Email().MustSend([]string{sendTo}, from, "Confirm change email", `<p>Click this link to change the email associated with your account:</p><p><a href="`+link+`">Confirm change email</a></p>`, "Confirm change email:\n\n"+link) } func sendResetPwdEmail(srv service.Layer, sendTo, from string, newPwd string) { srv.Email().MustSend([]string{sendTo}, from, "Pwd Reset", `<p>New Pwd: `+newPwd+`</p>`, `New Pwd: `+newPwd) } type fullUser struct { user.Me Email string RegisteredOn time.Time ActivatedOn time.Time NewEmail *string ActivateCode *string ChangeEmailCode *string LastPwdResetOn *time.Time LoginLinkCodeCreatedOn *time.Time LoginLinkCode *string } func getUser(tx sql.Tx, email *string, id *ID) *fullUser { PanicIf(email == nil && id == nil, "one of email or id must not be nil") var arg interface{} if email != nil { arg = *email } else { arg = *id } res := &fullUser{} err := tx.Get1(res, qryUserFullGet(id != nil), arg) if sqlh.IsNoRows(err) { return nil } PanicOn(err) return res } func updateUser(tx sql.Tx, user *fullUser) { tx.MustExec(qryUserUpdate(), user.Email, user.Handle, user.Alias, user.HasAvatar, user.FcmEnabled, user.RegisteredOn, user.ActivatedOn, user.NewEmail, user.ActivateCode, user.ChangeEmailCode, user.LastPwdResetOn, user.LoginLinkCodeCreatedOn, user.LoginLinkCode, user.ID) } type pwd struct { ID ID Salt []byte Pwd []byte N int R int P int } func getPwd(pwdtx sql.Tx, id ID) *pwd { res := &pwd{} err := pwdtx.Get1(res, qryPwdGet(), id) if sqlh.IsNoRows(err) { return nil } PanicOn(err) return res } func setPwd(tlbx app.Tlbx, pwdtx sql.Tx, id ID, pwd string) { validate.Str("pwd", pwd, pwdMinLen, pwdMaxLen, pwdRegexs...) salt := crypt.Bytes(scryptSaltLen) pwdBs := crypt.ScryptKey([]byte(pwd), salt, scryptN, scryptR, scryptP, scryptKeyLen) _, err := pwdtx.Exec(qryPwdUpdate(), id, salt, pwdBs, scryptN, scryptR, scryptP) PanicOn(err) } func getJin(tx sql.Tx, me ID, dst interface{}) { if js, ok := dst.(*json.Json); ok { sqlh.PanicIfIsntNoRows(tx.Get1(js, qryJinSelect(), me)) } else { bs := []byte{} sqlh.PanicIfIsntNoRows(tx.Get1(&bs, qryJinSelect(), me)) json.MustUnmarshal(bs, &dst) } }
/** * Configure this application * * @author simplity.org */ public class Application { private static final Logger logger = LoggerFactory.getLogger(Application.class); private static final String FOLDER_STR = "/"; /** * we use a default user id during testing */ private static final Value DEFAULT_NUMERIC_USER_ID = Value.newIntegerValue(420); private static final Value DEFAULT_TEXT_USER_ID = Value.newTextValue("420"); /** * any exception thrown by service may need to be reported to a central * system. */ private static ExceptionListenerInterface currentExceptionListener = new DefaultExceptionListener(); private static AppDataCacherInterface currentAppDataCacher = null; /** * instance of a UserTransaction for JTA/JCA based transaction management */ private static Object userTransactionInstance; private static boolean userIdIsNumeric; /* * batch and thread management */ private static ThreadFactory threadFactory; private static ScheduledExecutorService threadPoolExecutor; private static int batchPoolSize; /** * * @return app data cacher that is set up for this app. null if no acher is * configured */ public static AppDataCacherInterface getAppDataCacher() { return currentAppDataCacher; } /** * report an application error that needs attention from admin * * @param e */ public static void reportApplicationError(ApplicationError e) { currentExceptionListener.listen(e); } /** * report an application error that needs attention from admin * * @param e */ public static void reportApplicationError(Exception e) { currentExceptionListener.listen(e); } /** * report an exception that needs attention from admin * * @param request * data with which service was invoked. null if the error has no * such reference * @param e */ public static void reportApplicationError(ServiceRequest request, Exception e) { currentExceptionListener.listen(request, new ApplicationError(e, "")); } /** @return get a UserTrnsaction instance */ public static UserTransaction getUserTransaction() { if (userTransactionInstance == null) { throw new ApplicationError("Application is not set up for a JTA based user transaction"); } return (UserTransaction) userTransactionInstance; } /** * @return default user id, typically during tests. null if it is not set */ public static Value getDefaultUserId() { if (userIdIsNumeric) { return DEFAULT_NUMERIC_USER_ID; } return DEFAULT_TEXT_USER_ID; } /** @return is the userId a number? default is text/string */ public static boolean userIdIsNumeric() { return userIdIsNumeric; } /** * get a managed thread as per the container * * @param runnable * @return thread */ public static Thread createThread(Runnable runnable) { if (threadFactory == null) { return new Thread(runnable); } return threadFactory.newThread(runnable); } /** * get a managed thread as per the container * * @return executor */ public static ScheduledExecutorService getScheduledExecutor() { if (threadPoolExecutor != null) { return threadPoolExecutor; } int nbr = batchPoolSize; if (nbr == 0) { nbr = 2; } if (threadFactory == null) { return new ScheduledThreadPoolExecutor(nbr); } threadPoolExecutor = new ScheduledThreadPoolExecutor(nbr, threadFactory); return threadPoolExecutor; } /** name of configuration file, including extension */ public static final String CONFIG_FILE_NAME = "application.xml"; /** * One and the only method that is to be executed to set-up this container * for service execution Configuration resources and application components * are made accessible using this method. <br /> * Resources can be exposed as files under a file-system or as resources * accessible through java class loader. Typically, a file-system is * convenient during development as the files can be changed without need to * re-deploy/reboot the app. * * * @param resourceRoot * if components are available on the file system, this is the * full path of the root folder where application.xml Otherwise * this is the prefix to application.xml. for example c:/a/a/c/ * or /a/b/c such that c:/a/b/c/application.xml or * a.b.c.application.xml are available * * @return true if all OK. False in case of any set-up issue. * @throws Exception * in case the root folder does not exist, or does not required * resources */ public static boolean bootStrap(String resourceRoot) throws Exception { String componentFolder = resourceRoot; if (componentFolder.endsWith(FOLDER_STR) == false) { componentFolder += FOLDER_STR; } logger.info("Bootstrapping with " + componentFolder); /* * is this a folder * */ Application app = new Application(); String msg = null; try (InputStream ins = IoUtil.getStream(componentFolder + CONFIG_FILE_NAME)) { XmlUtil.xmlToObject(ins, app); if (app.applicationId == null) { msg = "Unable to load the configuration component " + CONFIG_FILE_NAME + ". This file is expected to be inside folder " + componentFolder; } else { msg = app.configure(componentFolder); } } catch (Exception e) { msg = e.getMessage(); } if (msg == null) { return true; } ApplicationError e = new ApplicationError(msg); ServiceRequest req = null; currentExceptionListener.listen(req, e); throw e; } /** * unique name of this application within a corporate. This may be used as * identity while trying to communicate with other applications within the * corporate cluster */ @FieldMetaData(isRequired = true) String applicationId; /** * list of modules in this application. We have made it mandatory to have a * module, even if there is only one module. This is to enforce some * discipline that retains flexibility for the app to be put into a context * along with other apps. */ @FieldMetaData(isRequired = true) String[] modules; /** * user id is a mandatory concept. Every service is meant to be executed for * a specified (logged-in) user id. Apps can choose it to be either string * or number */ boolean userIdIsNumber; /** * do we cache components as they are loaded. typically true in production, * and false in development environment */ boolean cacheComponents; /** * during development/testing,we can simulate service executions with local * data. service.xml is used for input/output, but the execution is skipped. * json from data folder is used to populate serviceContext */ boolean simulateWithLocalData; /* * app specific implementations of infrastructure/utility features */ /** * Utility class that gets an instance of a Bean. Like the context in * Spring. Useful when you want to work within a Spring container. Must * implement <code>BeanFinderInterface</code> This is also used to get * instance of any fully qualified name provided for configuration */ @FieldMetaData(superClass = BeanFinderInterface.class) String beanFinderClassName; /** * Cache manager to be used by <code>ServiceAgent</code> to cache responses * to services that are designed for caching. This class should implement * <code>ServiceCacherInterface</code> null if caching is not to be enabled. */ @FieldMetaData(superClass = ServiceCacherInterface.class) String serviceCacherClassName; /** * Service level access control to be implemented by * <code>ServiceAgent</code> null if service agent is not responsible for * this. Any service specific access control is to be managed by the service * itself. must implement <code>AccessControllerInterface</code> */ @FieldMetaData(superClass = AccessControllerInterface.class) String accessControllerClassName; /** * App specific hooks during service invocation life-cycle used by * <code>ServiceAgent</code> null if no service agent is not responsible for * this. Any service specific access control is to be managed by the service * itself. must implement <code>ServicePrePostProcessorInterface</code> */ @FieldMetaData(superClass = ServicePrePostProcessorInterface.class) String servicePrePostProcessorClassName; /** * way to wire exception to corporate utility. null if so such requirement. * must implement <code>ExceptionListenerInterface</code> */ @FieldMetaData(superClass = ExceptionListenerInterface.class) String exceptionListenerClassName; /** * class that can be used for caching app data. must implement * <code>AppDataCacherInterface</code> */ @FieldMetaData(superClass = AppDataCacherInterface.class) String appDataCacherClassName; /** * fully qualified class name that can be used for getting value for * parameter/Property at run time. must implement * <code>ParameterRetrieverInterface</code> */ @FieldMetaData(superClass = ParameterRetrieverInterface.class) String parameterRetrieverClassName; /** * fully qualified class name that can be used for getting data/list source * for dataAdapters. must implement <code>DataAdapterExtension</code> */ @FieldMetaData(superClass = DataAdapterExtension.class) String dataAdapterExtensionClassName; /** * class name that implements <code>CommonCodeValidatorInterface</code>. * null is no such concept used in this app */ @FieldMetaData(superClass = CommonCodeValidatorInterface.class) String commonCodeValidatorClassName; /** * if attachments are managed by a custom code, specify the class name to * wire it. It should implement <code>AttachmentAssistantInterface</code> */ @FieldMetaData(irrelevantBasedOnField = "attachmentsFolderPath", superClass = AttachmentAssistantInterface.class) String attachmentAssistantClassName; /** * Simplity provides a rudimentary, folder-based system that can be used for * storing and retrieving attachments. If you want to use that, provide the * folder that is available for the server instance */ String attachmentsFolderPath; /** jndi name for user transaction for using JTA based transactions */ String jtaUserTransaction; /** * if JMS is used by this application, connection factory for local/session * managed operations */ String jmsConnectionFactory; /** properties of jms connection, like user name password and other flags */ Property[] jmsProperties; /** * if JMS is used by this application, connection factory for JTA/JCA/XA * managed operations */ String xaJmsConnectionFactory; /** batch job to fire after bootstrapping. */ @FieldMetaData(isReferenceToComp = true, referredCompType = ComponentType.JOBS) String jobsToRunOnStartup; /** Configure the Mail Setup for the application */ MailProperties mailProperties; /** * OAuth parameters */ OAuthParameters oauthparameters; /* * for batch jobs */ /** jndi name the container has created to get a threadFactory instance */ String threadFactoryJndiName; /** * jndi name the container has created to get a managed schedule thread * pooled executor instance */ String scheduledExecutorJndiName; /** number of threads to keep in the pool even if they are idle */ int corePoolSize; RdbDriver rdbDriver; private static OAuthParameters oauthparametersInternal; /** * * @return auth parameters */ public static OAuthParameters getOAuthParameters() { return oauthparametersInternal; } private static BeanFinderInterface classManagerInternal; /** * gateways for external applications, indexed by id */ @FieldMetaData(packageName = "org.simplity.gateway", indexFieldName = "applicationName") Map<String, Gateway> externalApplications = new HashMap<>(); /** * configure application based on the settings. This MUST be triggered * before using the app. Typically this would be triggered from start-up * servlet in a web-app * * @param rootPath * * @return null if all OK. Else message that described why we could not * succeed. */ private String configure(String rootPath) { ComponentManager.bootstrap(rootPath, this.modules); List<String> msgs = new ArrayList<String>(); if (this.beanFinderClassName != null) { try { classManagerInternal = (BeanFinderInterface) (Class.forName(this.beanFinderClassName)).newInstance(); } catch (Exception e) { msgs.add(this.beanFinderClassName + " could not be used to instantiate a Class Manager. " + e.getMessage()); } } else { logger.info( "No app specific class manager/bean creater configured. Normal java class.forName() will be used to instantiate objects/beans"); } int nbrErrors = 0; ExceptionListenerInterface listener = null; if (this.exceptionListenerClassName != null) { try { listener = Application.getBean(this.exceptionListenerClassName, ExceptionListenerInterface.class); currentExceptionListener = listener; } catch (Exception e) { nbrErrors++; } } else { logger.info("No exception listener configured."); } ServiceCacherInterface casher = null; if (this.serviceCacherClassName != null) { try { casher = Application.getBean(this.serviceCacherClassName, ServiceCacherInterface.class); logger.info("{} is configured as class manager/bean finder.", this.serviceCacherClassName); } catch (Exception e) { nbrErrors++; } } else { logger.info("No service cacher configured."); } AccessControllerInterface gard = null; if (this.accessControllerClassName != null) { try { gard = Application.getBean(this.accessControllerClassName, AccessControllerInterface.class); logger.info("{} is configured as access controller.", this.accessControllerClassName); } catch (Exception e) { nbrErrors++; } } else { logger.info("No access controller configured."); } ServicePrePostProcessorInterface prePost = null; if (this.servicePrePostProcessorClassName != null) { try { prePost = Application.getBean(this.servicePrePostProcessorClassName, ServicePrePostProcessorInterface.class); logger.info("{} is configured as service pre-post-processor.", this.servicePrePostProcessorClassName); } catch (Exception e) { nbrErrors++; } } else { logger.info("No service pre-post processor configured."); } /* * rdb set up */ if (this.rdbDriver == null) { logger.info("No rdb has been set up for this app."); } else { String msg = this.rdbDriver.setup(); if (msg != null) { msgs.add(msg); } } if (this.appDataCacherClassName != null) { try { currentAppDataCacher = Application.getBean(this.appDataCacherClassName, AppDataCacherInterface.class); logger.info("{} is used as app data cacher", this.appDataCacherClassName); } catch (Exception e) { nbrErrors++; } } else { logger.info("No app data cacher configured."); } if (this.commonCodeValidatorClassName != null) { try { CommonCodeValidatorInterface val = Application.getBean(this.commonCodeValidatorClassName, CommonCodeValidatorInterface.class); if (val != null) { CommonCodeValidator.setValidator(val); logger.info("{} is used as common code validator", this.commonCodeValidatorClassName); } } catch (Exception e) { nbrErrors++; } } else { logger.info("No common code validator configured."); } if (this.parameterRetrieverClassName != null) { try { ParameterRetrieverInterface val = Application.getBean(this.parameterRetrieverClassName, ParameterRetrieverInterface.class); if (val != null) { ParameterRetriever.setRetriever(val); logger.info("{} is used as parameter retriever", this.parameterRetrieverClassName); } } catch (Exception e) { nbrErrors++; } } else { logger.info("No parameter retriever configured."); } if (this.dataAdapterExtensionClassName != null) { try { DataAdapterExtension val = Application.getBean(this.dataAdapterExtensionClassName, DataAdapterExtension.class); if (val != null) { ServiceContext.setDataAdapterExtension(val); logger.info("{} is used as data adapter extension ", this.dataAdapterExtensionClassName); } } catch (Exception e) { nbrErrors++; } } else { logger.info("No data adapter extension configured."); } if (this.jtaUserTransaction != null) { try { userTransactionInstance = new InitialContext().lookup(this.jtaUserTransaction); if (userTransactionInstance instanceof UserTransaction == false) { msgs.add(this.jtaUserTransaction + " is located but it is not UserTransaction but " + userTransactionInstance.getClass().getName()); } else { logger.info("userTransactionInstance set to " + userTransactionInstance.getClass().getName()); } } catch (Exception e) { msgs.add("Error while instantiating UserTransaction using jndi name " + this.jtaUserTransaction + ". " + e.getMessage()); } } /* * Setup JMS Connection factory */ if (this.jmsConnectionFactory != null || this.xaJmsConnectionFactory != null) { String msg = JmsConnector.setup(this.jmsConnectionFactory, this.xaJmsConnectionFactory, this.jmsProperties); if (msg != null) { msgs.add(msg); } } /* * Setup Mail Agent */ if (this.mailProperties != null) { try { MailConnector.initialize(this.mailProperties); } catch (Exception e) { msgs.add("Error while setting up MailAgent." + e.getMessage() + " Application will not work properly."); } } /* * in production, we cache components as they are loaded, but in * development we prefer to load the latest */ if (this.cacheComponents) { ComponentManager.startCaching(); } /* * what about file/media/attachment storage assistant? */ AttachmentAssistantInterface ast = null; if (this.attachmentsFolderPath != null) { ast = new FileBasedAssistant(this.attachmentsFolderPath); } else if (this.attachmentAssistantClassName != null) { try { ast = Application.getBean(this.attachmentAssistantClassName, AttachmentAssistantInterface.class); } catch (Exception e) { nbrErrors++; } } if (ast != null) { AttachmentManager.setAssistant(ast); } userIdIsNumeric = this.userIdIsNumber; /* * initialize service agent */ ServiceAgent.setUp(this.userIdIsNumber, casher, gard, prePost, this.simulateWithLocalData); /* * batch job, thread pools etc.. */ if (this.corePoolSize == 0) { batchPoolSize = 1; } else { batchPoolSize = this.corePoolSize; } if (this.threadFactoryJndiName != null) { try { threadFactory = (ThreadFactory) new InitialContext().lookup(this.threadFactoryJndiName); logger.info("Thread factory instantiated as " + threadFactory.getClass().getName()); } catch (Exception e) { msgs.add("Error while looking up " + this.threadFactoryJndiName + ". " + e.getLocalizedMessage()); } } if (this.scheduledExecutorJndiName != null) { try { threadPoolExecutor = (ScheduledExecutorService) new InitialContext() .lookup(this.scheduledExecutorJndiName); logger.info("ScheduledThreadPoolExecutor instantiated as " + threadPoolExecutor.getClass().getName()); } catch (Exception e) { msgs.add("Error while looking up " + this.scheduledExecutorJndiName + ". " + e.getLocalizedMessage()); } } String result = null; /* * gate ways */ if (this.externalApplications.isEmpty() == false) { Gateways.setGateways(this.externalApplications); this.externalApplications = null; } if (msgs.size() > 0) { /* * we got errors. */ StringBuilder err = new StringBuilder("Error while bootstrapping\n"); for (String msg : msgs) { err.append(msg).append('\n'); } /* * we run the background batch job only if everything has gone well. */ if (this.jobsToRunOnStartup != null) { err.append("Scheduler NOT started for batch " + this.jobsToRunOnStartup + " because of issues with applicaiton set up."); err.append('\n'); } result = err.toString(); logger.info(result); } else if (nbrErrors > 0) { result = " one or more error while using class names for object instantiation. Refer to erro rlogs"; } else if (this.jobsToRunOnStartup != null) { /* * we run the background batch job only if everything has gone well. */ BatchJobs.startJobs(this.jobsToRunOnStartup); logger.info("Scheduler started for Batch " + this.jobsToRunOnStartup); } if (this.oauthparameters != null) { oauthparametersInternal = this.oauthparameters; } return result; } /** * validate all field values of this as a component * * @param vtx * validation context */ public void validate(ValidationContext vtx) { ValidationUtil.validateMeta(vtx, this); if (this.rdbDriver != null) { this.rdbDriver.validate(vtx); } if (this.attachmentsFolderPath != null) { File file = new File(this.attachmentsFolderPath); if (file.exists() == false) { vtx.message(new ValidationMessage(this, ValidationMessage.SEVERITY_ERROR, this.attachmentsFolderPath + " is not a valid path in the system file system.", "attachmentsFolderPath")); } } } /** * get a bean from the container * * @param className * @param clazz * @return instance of the class, or null if such an object could not be * located */ @SuppressWarnings("unchecked") public static <T> T getBean(String className, Class<T> clazz) { T tb = null; if (classManagerInternal != null) { tb = classManagerInternal.getBean(className, clazz); } if (tb != null) { return tb; } try { tb = (T) (Class.forName(className)).newInstance(); } catch (Exception e) { throw new ApplicationError(className + " is not a valid class that implements " + clazz.getName()); } return tb; } /** * @param userId * @return app user for this user id */ public static AppUser ceateAppUser(String userId) { Value uid = null; if (userIdIsNumeric) { uid = Value.parseValue(userId, ValueType.INTEGER); } else { uid = Value.newTextValue(userId); } return new AppUser(uid); } }
Millimeter Wave CARM Amplifier Experiment A high power, 140 GHz, cyclotron autoresonance maser (or CARM) amplifier is under development at the Plasma Fusion Center at M.I.T. Theory, simulations, and the experimental design are discussed. The experiment will employ a high voltage (450 - 700 kV) Pierce-type beam and a helical wiggler to produce the required electron beam. The initial experiment will be carried out using a 450 kV electron gun; the second phase of the experimental program will employ a 700 kV electron gun. First experimental operation is anticipated shortly. The details of the experimental design and theoretical analysis will be presented.
import React from 'react' import {AMOUNT_SLUGS, AMOUNT_SO_PREFIXED, ALLOWED_AMOUNTS} from '../data/constants' export function getCrookedText({amount, what, reason = ''}) { const amountSlug = amount.toLowerCase() const index = AMOUNT_SLUGS.indexOf(amountSlug) const amountText = lcFirst(ALLOWED_AMOUNTS[index]) const prefix = AMOUNT_SO_PREFIXED[index] ? `så ` : '' const withReason = getReasonText(reason) return `${what || 'Dette'} er ${prefix}${amountText} krøkkete ${withReason}`.trim() } export function getReasonText(reason = '') { const because = lcFirst((reason || '').trim().replace(/^fordi/i, '')).trim() const withReason = because ? `fordi ${because}` : '' return withReason.trim() } export function CrookedText({amount, what, reason}) { return <>{getCrookedText({amount, what, reason})}</> } function lcFirst(str: string): string { return (str[0] || '').toLowerCase() + str.slice(1) }
<reponame>spincast/spincast-framework<filename>spincast-plugins/spincast-plugins-hotswap-parent/spincast-plugins-hotswap/src/main/java/org/spincast/plugins/hotswap/classeswatcher/HotSwapClassesRedefinitionsWatcherDefault.java package org.spincast.plugins.hotswap.classeswatcher; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Objects; import java.util.Set; import javax.annotation.Nullable; import org.hotswap.agent.annotation.Init; import org.hotswap.agent.annotation.LoadEvent; import org.hotswap.agent.annotation.OnClassLoadEvent; import org.hotswap.agent.annotation.Plugin; import org.hotswap.agent.command.Command; import org.hotswap.agent.command.Scheduler; import org.hotswap.agent.javassist.CannotCompileException; import org.hotswap.agent.javassist.CtClass; import org.hotswap.agent.javassist.NotFoundException; import org.hotswap.agent.util.PluginManagerInvoker; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.spincast.core.config.SpincastConfig; import com.google.inject.Inject; /** * The instance of this class will be created by * HotSwap Agent. * <p> * We bind it to the Guice context using * <code>toInstance()</code> and this will inject * the required dependencies on members * ("@Inject" annotated), directly. */ @Plugin(name = "HotSwapClassesRedefinitionsWatcher", testedVersions = "") public class HotSwapClassesRedefinitionsWatcherDefault implements HotSwapClassesRedefinitionsWatcher { protected final static Logger logger = LoggerFactory.getLogger(HotSwapClassesRedefinitionsWatcherDefault.class); protected final static String THIS_CLASS_NAME = "org.spincast.plugins.hotswap.classeswatcher.HotSwapClassesRedefinitionsWatcherDefault"; private boolean guiceInjected; private Map<Class<?>, Set<HotSwapClassesRedefinitionsListener>> listenersByClassToWatch; /** * Flag to know that dependencies * have been injected by Guice. */ @Inject public void guiceInjected() { this.guiceInjected = true; } protected boolean isGuiceInjected() { return this.guiceInjected; } /** * This will be called by HotSwap Agent to inject * the Scheduler instance. */ @Init protected Scheduler scheduler; protected Scheduler getScheduler() { return this.scheduler; } @Inject private SpincastConfig spincastConfig; protected SpincastConfig getSpincastConfig() { return this.spincastConfig; } /** * Guice injects the bound listeners */ @Inject public void setBoundListerns(@Nullable Set<HotSwapClassesRedefinitionsListener> boundListeners) { if (boundListeners != null) { for (HotSwapClassesRedefinitionsListener listener : boundListeners) { if (listener.isEnabled()) { registerListener(listener); } } } } protected Map<Class<?>, Set<HotSwapClassesRedefinitionsListener>> getListenersByClassToWatch() { if (this.listenersByClassToWatch == null) { this.listenersByClassToWatch = new HashMap<Class<?>, Set<HotSwapClassesRedefinitionsListener>>(); } return this.listenersByClassToWatch; } //========================================== // Inits the plugin. // This is required and has to be a static method... //========================================== @OnClassLoadEvent(classNameRegexp = THIS_CLASS_NAME, events = LoadEvent.DEFINE) public static void init(CtClass ctClass) throws NotFoundException, CannotCompileException { String src = PluginManagerInvoker.buildInitializePlugin(HotSwapClassesRedefinitionsWatcherDefault.class); //========================================== // All methods of the plugin must be registered here... // We only have one, "classRedefined" //========================================== src += PluginManagerInvoker.buildCallPluginMethod(HotSwapClassesRedefinitionsWatcherDefault.class, "classRedefined"); ctClass.getDeclaredConstructor(new CtClass[0]).insertAfter(src); logger.info(HotSwapClassesRedefinitionsWatcherDefault.class.getSimpleName() + " is now initialized..."); } @Override public void registerListener(HotSwapClassesRedefinitionsListener listener) { Objects.requireNonNull(listener, "The listener can't be NULL"); Set<Class<?>> classesToWatch = listener.getClassesToWatch(); if (classesToWatch == null) { return; } for (Class<?> classToWatch : classesToWatch) { Set<HotSwapClassesRedefinitionsListener> listeners = getListenersByClassToWatch().get(classToWatch); if (listeners == null) { listeners = new HashSet<HotSwapClassesRedefinitionsListener>(); getListenersByClassToWatch().put(classToWatch, listeners); } listeners.add(listener); } } @Override public void removeListener(HotSwapClassesRedefinitionsListener listener) { Objects.requireNonNull(listener, "The listener can't be NULL"); Set<Class<?>> classesToWatch = listener.getClassesToWatch(); if (classesToWatch == null) { return; } for (Class<?> classToWatch : classesToWatch) { Set<HotSwapClassesRedefinitionsListener> listeners = getListenersByClassToWatch().get(classToWatch); if (listeners != null) { listeners.remove(listener); if (listeners.size() == 0) { getListenersByClassToWatch().remove(classToWatch); } } } } @Override public void removeAllListeners() { getListenersByClassToWatch().clear(); } /** * Listen to redefinitions of any class. */ @OnClassLoadEvent(classNameRegexp = ".+", events = LoadEvent.REDEFINE) public void classRedefined(Class<?> classBeingRedefined) { if (!isGuiceInjected()) { logger.warn("Required dependencies still not injected by Guice, returning..."); return; } Set<HotSwapClassesRedefinitionsListener> listeners = getListenersByClassToWatch().get(classBeingRedefined); if (listeners == null || listeners.size() == 0) { logger.info("Class \"" + classBeingRedefined.getName() + "\" redefined, no listeners."); return; } logger.info("Class \"" + classBeingRedefined.getName() + "\" redefined, " + listeners.size() + " listeners."); for (HotSwapClassesRedefinitionsListener listener : listeners) { //========================================== // Listener enabled? //========================================== if (!listener.isEnabled()) { continue; } //========================================== // The "@OnClassLoadEvent" method is // called *before* the class is actually reloaded. // By using the agent's scheduler, we can set // an action to be run *after* the class has been // redefined. //========================================== getScheduler().scheduleCommand(new Command() { @Override public void executeCommand() { Thread listenerThread = new Thread(new Runnable() { @Override public void run() { listener.classRedefined(classBeingRedefined); } }); listenerThread.start(); } }); } } }
/** * Dialog to allow the user to define the sort order for the * {@link AttributeViewer} * * <pre> * * SOFTWARE HISTORY * * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * Nov 29, 2012 randerso Initial creation * Mar 21, 2013 1638 mschenke Created Pair class internal so no dependencies on GFE * * </pre> * * @author randerso * @version 1.0 */ public class SortOrderDialog extends CaveJFACEDialog { private class SortField { public static final int ASCENDING_VALUE = 1; public static final int DESCENDING_VALUE = -1; private static final String ASCENDING_TEXT = "Ascending"; private static final String DESCENDING_TEXT = "Descending"; private Group group; private Combo columnCombo; private Combo orderCombo; public SortField(Composite parent) { group = new Group(parent, SWT.DEFAULT); GridData layoutData = new GridData(SWT.FILL, SWT.DEFAULT, true, false); group.setLayoutData(layoutData); GridLayout layout = new GridLayout(2, false); group.setLayout(layout); group.setText("Then by:"); columnCombo = new Combo(group, SWT.DROP_DOWN); layoutData = new GridData(SWT.DEFAULT, SWT.DEFAULT, false, false); columnCombo.setLayoutData(layoutData); columnCombo.add("", 0); columnCombo.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { updateFields(); } }); orderCombo = new Combo(group, SWT.DROP_DOWN); layoutData = new GridData(SWT.DEFAULT, SWT.DEFAULT, false, false); orderCombo.setLayoutData(layoutData); orderCombo.add(ASCENDING_TEXT); orderCombo.add(DESCENDING_TEXT); orderCombo.setText(orderCombo.getItem(0)); } public void dispose() { group.dispose(); } public String getField() { return columnCombo.getText(); } public void setField(String field) { this.columnCombo.setText(field); } public int getOrder() { return (orderCombo.getText().equals(ASCENDING_TEXT) ? ASCENDING_VALUE : DESCENDING_VALUE); } public void setOrder(int order) { if (order > 0) { orderCombo.setText(ASCENDING_TEXT); } else if (order < 0) { orderCombo.setText(DESCENDING_TEXT); } } public void setFields(List<String> fields) { String currentField = getField(); columnCombo.removeAll(); columnCombo.add(""); for (String f : fields) { columnCombo.add(f); } if (fields.contains(currentField)) { columnCombo.setText(currentField); } else { columnCombo.setText(""); } } public void setLabel(String label) { group.setText(label); } } private String[] columns; private List<Pair<String, Integer>> initialOrder; private Composite mainComp; private List<SortField> sortFields; protected SortOrderDialog(Shell parentShell, String[] columns, List<Pair<String, Integer>> order) { super(parentShell); this.columns = columns; this.initialOrder = order; } @Override protected void configureShell(Shell newShell) { super.configureShell(newShell); newShell.setText("Sort Order"); } @Override protected Control createDialogArea(Composite parent) { mainComp = (Composite) super.createDialogArea(parent); this.sortFields = new ArrayList<SortField>(); for (Pair<String, Integer> p : initialOrder) { addField(p.getFirst(), p.getSecond()); } updateFields(); return mainComp; } private SortField addField(String field, int order) { SortField newField = new SortField(mainComp); newField.setField(field); newField.setOrder(order); sortFields.add(newField); return newField; } private void removeField(SortField field) { sortFields.remove(field); field.dispose(); } private void updateFields() { List<String> availableFields = new ArrayList<String>( Arrays.asList(columns)); for (int i = 0; i < sortFields.size(); i++) { SortField sf = sortFields.get(i); sf.setFields(availableFields); String s = sf.getField(); if (s.isEmpty()) { if (i < sortFields.size() - 1) { removeField(sf); i--; } } else { availableFields.remove(s); } } if (sortFields.isEmpty() || !sortFields.get(sortFields.size() - 1).getField().isEmpty()) { SortField sf = addField("", SortField.ASCENDING_VALUE); sf.setFields(availableFields); } sortFields.get(0).setLabel("Sort by:"); mainComp.getShell().pack(); } public List<Pair<String, Integer>> getSortOrder() { List<Pair<String, Integer>> sortOrder = new ArrayList<Pair<String, Integer>>(); for (SortField sf : sortFields) { if (sf.getField().isEmpty()) { continue; } sortOrder.add(new Pair<String, Integer>(sf.getField(), sf .getOrder())); } return sortOrder; } }
def joint_pairs(self): return [[0, 5], [1, 4], [2, 3], [10, 15], [11, 14], [12, 13]]
// Tests that datagram transport data channels can do in-band negotiation. TEST_P(PeerConnectionIntegrationTest, DatagramTransportDataChannelConfigSentToOtherSide) { PeerConnectionInterface::RTCConfiguration rtc_config; rtc_config.use_datagram_transport_for_data_channels = true; rtc_config.enable_dtls_srtp = false; ASSERT_TRUE(CreatePeerConnectionWrappersWithConfigAndMediaTransportFactory( rtc_config, rtc_config, loopback_media_transports()->first_factory(), loopback_media_transports()->second_factory())); ConnectFakeSignaling(); webrtc::DataChannelInit init; init.id = 53; init.maxRetransmits = 52; caller()->CreateDataChannel("data-channel", &init); caller()->CreateAndSetAndSignalOffer(); ASSERT_TRUE_WAIT(SignalingStateStable(), kDefaultTimeout); loopback_media_transports()->SetState(webrtc::MediaTransportState::kWritable); loopback_media_transports()->FlushAsyncInvokes(); ASSERT_TRUE_WAIT(callee()->data_channel() != nullptr, kDefaultTimeout); ASSERT_TRUE_WAIT(callee()->data_observer()->IsOpen(), kDefaultTimeout); EXPECT_NE(init.id, callee()->data_channel()->id()); EXPECT_EQ("data-channel", callee()->data_channel()->label()); EXPECT_EQ(init.maxRetransmits, callee()->data_channel()->maxRetransmits()); EXPECT_FALSE(callee()->data_channel()->negotiated()); }
As Tesco clothing shoppers rifle through the chain's apparel assortment, they'll be sharing the aisles with six-foot-tall RFID robots, rolling up and down scanning clothing tags for inventory. (Personally, I think a Texas approach — where the robots would be equipped with automatic weapons and paid for out of the loss prevention budget — would be more interesting.) F&F, which is the name of the apparel unit of Tesco, the world's second-largest retailer by revenue, is running the robots as part of a five-store trial. Officially called RFspot Pro and nicknamed Robbie by the F&F team, the robots roam the floor, continually scanning tens of thousands of passive UHF EPC Gen2 tags, strolling up the aisles at about one meter/second on three sets of wheels, reading tags from as many as 30 feet away. Without the robots — which more closely resemble tall canister-type vacuum cleaners than movie-style robots — Tesco would have dealt with two choices for RFID scanning: having store associates do manual scans, or installing stationary RFID readers in shelves, walls and ceilings. Compared with manual scans, the robots are much faster, according to Myles Sutherland, director of business development for RFspot, which provided the robots to Tesco. With the F&F stores, for example, the full store can be robot-scanned in about an hour, compared with associate-scanned in about 8-9 hours, Sutherland said in an interview. The stationary readers, in theory, could do the job even more quickly, but a store would need to install a huge number of them. More importantly, the readers would have to be positioned precisely in relation to the merchandise. Given that stores constantly move merchandise to different aisles — and sometimes have to move store locations, such as in a mall — the cost in cash and labor of taking the readers out and then reinstalling them would almost certainly wipe out any savings. "Having a mobile infrastructure is a much more flexible way," Sutherland said. An RFID Journal story about the robots noted some other Robbie advantages. "Each robot also comes with multiple antenna arrays to enable the interrogation of tags at all angles around the machine, from 6 inches above the floor to 12 feet above the floor," the story said. "RFspot is also working on automated tools for the robots to open doors and operate elevators in situations in which they must move from one room to another through a door, or to a different floor." That scanning flexibility allows the robots, in theory, to deliver much more precise information back to the store's servers, which is really helpful given that the tags being used in these trials, for cost reasons, are passive and not active and the chain is not reusing the tags. "This gives us the ability to localize the tag, not just to the section of the store, but to localize it down to the shelf. That's really important," Sutherland said. The robots wirelessly transmit data back to the servers, but the communication is not just one-way. The robots have large screens, and shoppers and store associates can talk with the robots. There's no artificial intelligence or voice recognition involved. Even when operating in an autonomous mode, there is a person who is wirelessly controlling the robot from a remote location. That person's face will appear on the screen, allowing for live video chats with anyone who approaches the robot. It's a good thing I'm not one of the people managing the robots, as I'd be far too tempted to tell shoppers, "Out of my way, human. I am preparing your planet for robot domination, when we shall enslave the few human survivors. Now give me your iPhone. You won't be needing it." Tesco, however, takes the human interactions much more seriously. Although it is not the intended role of the robots, Sutherland said, all operators are briefed on the stores they will be in — virtually — so that they can answer questions about where products can be found, the location of lavatories and other items. "The primary purpose it not to be engaging" shoppers, but to instead map the environment and keep the robot operating as efficiently as possible. "So they don't get a ton of training, but we make sure that they are trained" just in case, Sutherland said. Not sure how comforting it would be to see dozens of these robots waltzing down retail aisles — heck, at Best Buy or Sears, the robots could easily outnumber shoppers — but given all of the attempts made to economically leverage RFID data, this is one of the better ones.
<reponame>michel-slm/breakpad // Copyright (c) 2007, Google Inc. // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // ms_symbol_server_converter.cc: Obtain symbol files from a Microsoft // symbol server, and convert them to Breakpad's dumped format. // // See ms_symbol_server_converter.h for documentation. // // Author: <NAME> #include <windows.h> #include <dbghelp.h> #include <pathcch.h> #include <cassert> #include <cstdio> #include "tools/windows/converter/ms_symbol_server_converter.h" #include "common/windows/pdb_source_line_writer.h" #include "common/windows/pe_source_line_writer.h" #include "common/windows/string_utils-inl.h" // SYMOPT_NO_PROMPTS is not defined in earlier platform SDKs. Define it // in that case, in the event that this code is used with a newer version // of DbgHelp at runtime that recognizes the option. The presence of this // bit in the symbol options should not harm earlier versions of DbgHelp. #ifndef SYMOPT_NO_PROMPTS #define SYMOPT_NO_PROMPTS 0x00080000 #endif // SYMOPT_NO_PROMPTS namespace { std::wstring GetExeDirectory() { wchar_t directory[MAX_PATH]; // Get path to this process exe. DWORD result = GetModuleFileName(/*hModule=*/nullptr, directory, MAX_PATH); if (result <= 0 || result == MAX_PATH) { fprintf(stderr, "GetExeDirectory: failed to get path to process exe.\n"); return L""; } HRESULT hr = PathCchRemoveFileSpec(directory, result + 1); if (hr != S_OK) { fprintf(stderr, "GetExeDirectory: failed to remove basename from path '%ls'.\n", directory); return L""; } return std::wstring(directory); } } // namespace namespace google_breakpad { // Use sscanf_s if it is available, to quench the warning about scanf being // deprecated. Use scanf where sscanf_is not available. Note that the // parameters passed to sscanf and sscanf_s are only compatible as long as // fields of type c, C, s, S, and [ are not used. #if _MSC_VER >= 1400 // MSVC 2005/8 #define SSCANF sscanf_s #else // _MSC_VER >= 1400 #define SSCANF sscanf #endif // _MSC_VER >= 1400 bool GUIDOrSignatureIdentifier::InitializeFromString( const string& identifier) { type_ = TYPE_NONE; size_t length = identifier.length(); if (length > 32 && length <= 40) { // GUID if (SSCANF(identifier.c_str(), "%08X%04hX%04hX%02hhX%02hhX%02hhX%02hhX%02hhX%02hhX%02hhX%02hhX%X", &guid_.Data1, &guid_.Data2, &guid_.Data3, &guid_.Data4[0], &guid_.Data4[1], &guid_.Data4[2], &guid_.Data4[3], &guid_.Data4[4], &guid_.Data4[5], &guid_.Data4[6], &guid_.Data4[7], &age_) != 12) { return false; } type_ = TYPE_GUID; } else if (length > 8 && length <= 15) { // Signature if (SSCANF(identifier.c_str(), "%08X%x", &signature_, &age_) != 2) { return false; } type_ = TYPE_SIGNATURE; } else { return false; } return true; } #undef SSCANF MSSymbolServerConverter::MSSymbolServerConverter( const string& local_cache, const vector<string>& symbol_servers) : symbol_path_(), fail_dns_(false), fail_timeout_(false), fail_not_found_(false) { // Setting local_cache can be done without verifying that it exists because // SymSrv will create it if it is missing - any creation failures will occur // at that time, so there's nothing to check here, making it safe to // assign this in the constructor. assert(symbol_servers.size() > 0); #if !defined(NDEBUG) // These are characters that are interpreted as having special meanings in // symbol_path_. const char kInvalidCharacters[] = "*;"; assert(local_cache.find_first_of(kInvalidCharacters) == string::npos); #endif // !defined(NDEBUG) for (vector<string>::const_iterator symbol_server = symbol_servers.begin(); symbol_server != symbol_servers.end(); ++symbol_server) { // The symbol path format is explained by // http://msdn.microsoft.com/library/en-us/debug/base/using_symsrv.asp . // "srv*" is the same as "symsrv*symsrv.dll*", which means that // symsrv.dll is to be responsible for locating symbols. symsrv.dll // interprets the rest of the string as a series of symbol stores separated // by '*'. "srv*local_cache*symbol_server" means to check local_cache // first for the symbol file, and if it is not found there, to check // symbol_server. Symbol files found on the symbol server will be placed // in the local cache, decompressed. // // Multiple specifications in this format may be presented, separated by // semicolons. assert((*symbol_server).find_first_of(kInvalidCharacters) == string::npos); symbol_path_ += "srv*" + local_cache + "*" + *symbol_server + ";"; } // Strip the trailing semicolon. symbol_path_.erase(symbol_path_.length() - 1); } // A stack-based class that manages SymInitialize and SymCleanup calls. class AutoSymSrv { public: AutoSymSrv() : initialized_(false) {} ~AutoSymSrv() { if (!Cleanup()) { // Print the error message here, because destructors have no return // value. fprintf(stderr, "~AutoSymSrv: SymCleanup: error %lu\n", GetLastError()); } } bool Initialize(HANDLE process, char* path, bool invade_process) { process_ = process; // TODO(nbilling): Figure out why dbghelp.dll is being loaded from // system32/SysWOW64 before exe folder. // Attempt to locate and load dbghelp.dll beside the process exe. This is // somewhat of a workaround to loader delay load behavior that is occurring // when we call into symsrv APIs. dbghelp.dll must be loaded from beside // the process exe so that we are guaranteed to find symsrv.dll alongside // dbghelp.dll (a security requirement of dbghelp.dll) and so that the // symsrv.dll file that is loaded has a symsrv.yes file alongside it (a // requirement of symsrv.dll when accessing Microsoft-owned symbol // servers). // 'static local' because we don't care about the value but we need the // initialization to happen exactly once. static HMODULE dbghelp_module = [] () -> HMODULE { std::wstring exe_directory = GetExeDirectory(); if (exe_directory.empty()) { return nullptr; } std::wstring dbghelp_path = exe_directory + L"\\dbghelp.dll"; return LoadLibrary(dbghelp_path.c_str()); }(); if (dbghelp_module == nullptr) { fprintf(stderr, "AutoSymSrv::Initialize: failed to load dbghelp.dll beside exe."); return false; } initialized_ = SymInitialize(process, path, invade_process) == TRUE; return initialized_; } bool Cleanup() { if (initialized_) { if (SymCleanup(process_)) { initialized_ = false; return true; } return false; } return true; } private: HANDLE process_; bool initialized_; }; // A stack-based class that "owns" a pathname and deletes it when destroyed, // unless told not to by having its Release() method called. Early deletions // are supported by calling Delete(). class AutoDeleter { public: explicit AutoDeleter(const string& path) : path_(path) {} ~AutoDeleter() { int error; if ((error = Delete()) != 0) { // Print the error message here, because destructors have no return // value. fprintf(stderr, "~AutoDeleter: Delete: error %d for %s\n", error, path_.c_str()); } } int Delete() { if (path_.empty()) return 0; int error = remove(path_.c_str()); Release(); return error; } void Release() { path_.clear(); } private: string path_; }; MSSymbolServerConverter::LocateResult MSSymbolServerConverter::LocateFile(const string& debug_or_code_file, const string& debug_or_code_id, const string& version, string* file_name) { assert(file_name); file_name->clear(); GUIDOrSignatureIdentifier identifier; if (!identifier.InitializeFromString(debug_or_code_id)) { fprintf(stderr, "LocateFile: Unparseable identifier for %s %s %s\n", debug_or_code_file.c_str(), debug_or_code_id.c_str(), version.c_str()); return LOCATE_FAILURE; } HANDLE process = GetCurrentProcess(); // CloseHandle is not needed. AutoSymSrv symsrv; if (!symsrv.Initialize(process, const_cast<char*>(symbol_path_.c_str()), false)) { fprintf(stderr, "LocateFile: SymInitialize: error %lu for %s %s %s\n", GetLastError(), debug_or_code_file.c_str(), debug_or_code_id.c_str(), version.c_str()); return LOCATE_FAILURE; } if (!SymRegisterCallback64(process, SymCallback, reinterpret_cast<ULONG64>(this))) { fprintf(stderr, "LocateFile: SymRegisterCallback64: error %lu for %s %s %s\n", GetLastError(), debug_or_code_file.c_str(), debug_or_code_id.c_str(), version.c_str()); return LOCATE_FAILURE; } // SYMOPT_DEBUG arranges for SymCallback to be called with additional // debugging information. This is used to determine the nature of failures. DWORD options = SymGetOptions() | SYMOPT_DEBUG | SYMOPT_NO_PROMPTS | SYMOPT_FAIL_CRITICAL_ERRORS | SYMOPT_SECURE; SymSetOptions(options); // SymCallback will set these as needed inisde the SymFindFileInPath call. fail_dns_ = false; fail_timeout_ = false; fail_not_found_ = false; // Do the lookup. char path[MAX_PATH]; if (!SymFindFileInPath( process, NULL, const_cast<char*>(debug_or_code_file.c_str()), const_cast<void*>(identifier.guid_or_signature_pointer()), identifier.age(), 0, identifier.type() == GUIDOrSignatureIdentifier::TYPE_GUID ? SSRVOPT_GUIDPTR : SSRVOPT_DWORDPTR, path, SymFindFileInPathCallback, this)) { DWORD error = GetLastError(); if (error == ERROR_FILE_NOT_FOUND) { // This can be returned for a number of reasons. Use the crumbs // collected by SymCallback to determine which one is relevant. // These errors are possibly transient. if (fail_dns_ || fail_timeout_) { return LOCATE_RETRY; } // This is an authoritiative file-not-found message. if (fail_not_found_) { fprintf(stderr, "LocateFile: SymFindFileInPath: LOCATE_NOT_FOUND error " "for %s %s %s\n", debug_or_code_file.c_str(), debug_or_code_id.c_str(), version.c_str()); return LOCATE_NOT_FOUND; } // If the error is FILE_NOT_FOUND but none of the known error // conditions are matched, fall through to LOCATE_FAILURE. } fprintf(stderr, "LocateFile: SymFindFileInPath: error %lu for %s %s %s\n", error, debug_or_code_file.c_str(), debug_or_code_id.c_str(), version.c_str()); return LOCATE_FAILURE; } // Making sure path is null-terminated. path[MAX_PATH - 1] = '\0'; // The AutoDeleter ensures that the file is only kept when returning // LOCATE_SUCCESS. AutoDeleter deleter(path); // Do the cleanup here even though it will happen when symsrv goes out of // scope, to allow it to influence the return value. if (!symsrv.Cleanup()) { fprintf(stderr, "LocateFile: SymCleanup: error %lu for %s %s %s\n", GetLastError(), debug_or_code_file.c_str(), debug_or_code_id.c_str(), version.c_str()); return LOCATE_FAILURE; } deleter.Release(); printf("Downloaded: %s\n", path); *file_name = path; return LOCATE_SUCCESS; } MSSymbolServerConverter::LocateResult MSSymbolServerConverter::LocatePEFile(const MissingSymbolInfo& missing, string* pe_file) { return LocateFile(missing.code_file, missing.code_identifier, missing.version, pe_file); } MSSymbolServerConverter::LocateResult MSSymbolServerConverter::LocateSymbolFile(const MissingSymbolInfo& missing, string* symbol_file) { return LocateFile(missing.debug_file, missing.debug_identifier, missing.version, symbol_file); } // static BOOL CALLBACK MSSymbolServerConverter::SymCallback(HANDLE process, ULONG action, ULONG64 data, ULONG64 context) { MSSymbolServerConverter* self = reinterpret_cast<MSSymbolServerConverter*>(context); switch (action) { case CBA_EVENT: { IMAGEHLP_CBA_EVENT* cba_event = reinterpret_cast<IMAGEHLP_CBA_EVENT*>(data); // Put the string into a string object to be able to use string::find // for substring matching. This is important because the not-found // message does not use the entire string but is appended to the URL // that SymSrv attempted to retrieve. string desc(cba_event->desc); // desc_action maps strings (in desc) to boolean pointers that are to // be set to true if the string matches. struct desc_action { const char* desc; // The substring to match. bool* action; // On match, this pointer will be set to true. }; static const desc_action desc_actions[] = { // When a DNS error occurs, it could be indiciative of network // problems. { "SYMSRV: The server name or address could not be resolved\n", &self->fail_dns_ }, // This message is produced if no connection is opened. { "SYMSRV: A connection with the server could not be established\n", &self->fail_timeout_ }, // This message is produced if a connection is established but the // server fails to respond to the HTTP request. { "SYMSRV: The operation timed out\n", &self->fail_timeout_ }, // This message is produced when the requested file is not found, // even if one or more of the above messages are also produced. // It's trapped to distinguish between not-found and unknown-failure // conditions. Note that this message will not be produced if a // connection is established and the server begins to respond to the // HTTP request but does not finish transmitting the file. { " not found\n", &self->fail_not_found_ } }; for (int desc_action_index = 0; desc_action_index < static_cast<int>(sizeof(desc_actions) / sizeof(desc_action)); ++desc_action_index) { if (desc.find(desc_actions[desc_action_index].desc) != string::npos) { *(desc_actions[desc_action_index].action) = true; break; } } break; } } // This function is a mere fly on the wall. Treat everything as unhandled. return FALSE; } // static BOOL CALLBACK MSSymbolServerConverter::SymFindFileInPathCallback( const char* filename, void* context) { // FALSE ends the search, indicating that the located symbol file is // satisfactory. return FALSE; } MSSymbolServerConverter::LocateResult MSSymbolServerConverter::LocateAndConvertSymbolFile( const MissingSymbolInfo& missing, bool keep_symbol_file, bool keep_pe_file, string* converted_symbol_file, string* symbol_file, string* out_pe_file) { assert(converted_symbol_file); converted_symbol_file->clear(); if (symbol_file) { symbol_file->clear(); } string pdb_file; LocateResult result = LocateSymbolFile(missing, &pdb_file); if (result != LOCATE_SUCCESS) { fprintf(stderr, "Fallback to PE-only symbol generation for: %s\n", missing.debug_file.c_str()); return LocateAndConvertPEFile(missing, keep_pe_file, converted_symbol_file, out_pe_file); } if (symbol_file && keep_symbol_file) { *symbol_file = pdb_file; } // The conversion of a symbol file for a Windows 64-bit module requires // loading of the executable file. If there is no executable file, convert // using only the PDB file. Without an executable file, the conversion will // fail for 64-bit modules but it should succeed for 32-bit modules. string pe_file; result = LocatePEFile(missing, &pe_file); if (result != LOCATE_SUCCESS) { fprintf(stderr, "WARNING: Could not download: %s\n", pe_file.c_str()); } if (out_pe_file && keep_pe_file) { *out_pe_file = pe_file; } // Conversion may fail because the file is corrupt. If a broken file is // kept in the local cache, LocateSymbolFile will not hit the network again // to attempt to locate it. To guard against problems like this, the // symbol file in the local cache will be removed if conversion fails. AutoDeleter pdb_deleter(pdb_file); AutoDeleter pe_deleter(pe_file); // Be sure that it's a .pdb file, since we'll be replacing .pdb with .sym // for the converted file's name. string pdb_extension = pdb_file.substr(pdb_file.length() - 4); // strcasecmp is called _stricmp here. if (_stricmp(pdb_extension.c_str(), ".pdb") != 0) { fprintf(stderr, "LocateAndConvertSymbolFile: " "no .pdb extension for %s %s %s %s\n", missing.debug_file.c_str(), missing.debug_identifier.c_str(), missing.version.c_str(), pdb_file.c_str()); return LOCATE_FAILURE; } PDBSourceLineWriter writer; wstring pe_file_w; if (!WindowsStringUtils::safe_mbstowcs(pe_file, &pe_file_w)) { fprintf(stderr, "LocateAndConvertSymbolFile: " "WindowsStringUtils::safe_mbstowcs failed for %s\n", pe_file.c_str()); return LOCATE_FAILURE; } wstring pdb_file_w; if (!WindowsStringUtils::safe_mbstowcs(pdb_file, &pdb_file_w)) { fprintf(stderr, "LocateAndConvertSymbolFile: " "WindowsStringUtils::safe_mbstowcs failed for %ws\n", pdb_file_w.c_str()); return LOCATE_FAILURE; } if (!writer.Open(pdb_file_w, PDBSourceLineWriter::PDB_FILE)) { fprintf(stderr, "ERROR: PDBSourceLineWriter::Open failed for %s %s %s %ws\n", missing.debug_file.c_str(), missing.debug_identifier.c_str(), missing.version.c_str(), pdb_file_w.c_str()); return LOCATE_FAILURE; } if (!writer.SetCodeFile(pe_file_w)) { fprintf(stderr, "ERROR: PDBSourceLineWriter::SetCodeFile failed for %s %s %s %ws\n", missing.debug_file.c_str(), missing.debug_identifier.c_str(), missing.version.c_str(), pe_file_w.c_str()); return LOCATE_FAILURE; } *converted_symbol_file = pdb_file.substr(0, pdb_file.length() - 4) + ".sym"; FILE* converted_output = NULL; #if _MSC_VER >= 1400 // MSVC 2005/8 errno_t err; if ((err = fopen_s(&converted_output, converted_symbol_file->c_str(), "w")) != 0) { #else // _MSC_VER >= 1400 // fopen_s and errno_t were introduced in MSVC8. Use fopen for earlier // environments. Don't use fopen with MSVC8 and later, because it's // deprecated. fopen does not provide reliable error codes, so just use // -1 in the event of a failure. int err; if (!(converted_output = fopen(converted_symbol_file->c_str(), "w"))) { err = -1; #endif // _MSC_VER >= 1400 fprintf(stderr, "LocateAndConvertSymbolFile: " "fopen_s: error %d for %s %s %s %s\n", err, missing.debug_file.c_str(), missing.debug_identifier.c_str(), missing.version.c_str(), converted_symbol_file->c_str()); return LOCATE_FAILURE; } AutoDeleter sym_deleter(*converted_symbol_file); bool success = writer.WriteSymbols(converted_output); fclose(converted_output); if (!success) { fprintf(stderr, "LocateAndConvertSymbolFile: " "PDBSourceLineWriter::WriteMap failed for %s %s %s %s\n", missing.debug_file.c_str(), missing.debug_identifier.c_str(), missing.version.c_str(), pdb_file.c_str()); return LOCATE_FAILURE; } if (keep_symbol_file) { pdb_deleter.Release(); } if (keep_pe_file) { pe_deleter.Release(); } sym_deleter.Release(); return LOCATE_SUCCESS; } MSSymbolServerConverter::LocateResult MSSymbolServerConverter::LocateAndConvertPEFile( const MissingSymbolInfo& missing, bool keep_pe_file, string* converted_symbol_file, string* out_pe_file) { assert(converted_symbol_file); converted_symbol_file->clear(); string pe_file; MSSymbolServerConverter::LocateResult result = LocatePEFile(missing, &pe_file); if (result != LOCATE_SUCCESS) { fprintf(stderr, "WARNING: Could not download: %s\n", pe_file.c_str()); return result; } if (out_pe_file && keep_pe_file) { *out_pe_file = pe_file; } // Conversion may fail because the file is corrupt. If a broken file is // kept in the local cache, LocatePEFile will not hit the network again // to attempt to locate it. To guard against problems like this, the // PE file in the local cache will be removed if conversion fails. AutoDeleter pe_deleter(pe_file); // Be sure that it's a .exe or .dll file, since we'll be replacing extension // with .sym for the converted file's name. string pe_extension = pe_file.substr(pe_file.length() - 4); // strcasecmp is called _stricmp here. if (_stricmp(pe_extension.c_str(), ".exe") != 0 && _stricmp(pe_extension.c_str(), ".dll") != 0) { fprintf(stderr, "LocateAndConvertPEFile: " "no .dll/.exe extension for %s %s %s %s\n", missing.debug_file.c_str(), missing.debug_identifier.c_str(), missing.version.c_str(), pe_file.c_str()); return LOCATE_FAILURE; } *converted_symbol_file = pe_file.substr(0, pe_file.length() - 4) + ".sym"; FILE* converted_output = NULL; #if _MSC_VER >= 1400 // MSVC 2005/8 errno_t err; if ((err = fopen_s(&converted_output, converted_symbol_file->c_str(), "w")) != 0) { #else // _MSC_VER >= 1400 // fopen_s and errno_t were introduced in MSVC8. Use fopen for earlier // environments. Don't use fopen with MSVC8 and later, because it's // deprecated. fopen does not provide reliable error codes, so just use // -1 in the event of a failure. int err; if (!(converted_output = fopen(converted_symbol_file->c_str(), "w"))) { err = -1; #endif // _MSC_VER >= 1400 fprintf(stderr, "LocateAndConvertPEFile: " "fopen_s: error %d for %s %s %s %s\n", err, missing.debug_file.c_str(), missing.debug_identifier.c_str(), missing.version.c_str(), converted_symbol_file->c_str()); return LOCATE_FAILURE; } AutoDeleter sym_deleter(*converted_symbol_file); wstring pe_file_w; if (!WindowsStringUtils::safe_mbstowcs(pe_file, &pe_file_w)) { fprintf(stderr, "LocateAndConvertPEFile: " "WindowsStringUtils::safe_mbstowcs failed for %s\n", pe_file.c_str()); return LOCATE_FAILURE; } PESourceLineWriter writer(pe_file_w); PDBModuleInfo module_info; if (!writer.GetModuleInfo(&module_info)) { fprintf(stderr, "LocateAndConvertPEFile: " "PESourceLineWriter::GetModuleInfo failed for %s %s %s %s\n", missing.debug_file.c_str(), missing.debug_identifier.c_str(), missing.version.c_str(), pe_file.c_str()); return LOCATE_FAILURE; } if (module_info.cpu.compare(L"x86_64") != 0) { // This module is not x64 so we cannot generate Breakpad symbols from the // PE alone. Don't delete PE-- no need to retry download. pe_deleter.Release(); return LOCATE_FAILURE; } bool success = writer.WriteSymbols(converted_output); fclose(converted_output); if (!success) { fprintf(stderr, "LocateAndConvertPEFile: " "PESourceLineWriter::WriteMap failed for %s %s %s %s\n", missing.debug_file.c_str(), missing.debug_identifier.c_str(), missing.version.c_str(), pe_file.c_str()); return LOCATE_FAILURE; } if (keep_pe_file) { pe_deleter.Release(); } sym_deleter.Release(); return LOCATE_SUCCESS; } } // namespace google_breakpad
import {Pack} from "./pack"; import {NumberUtil} from "../../helpers/number.util"; export class NormalPack extends Pack { commonAmount: number; uncommonAmount: number; constructor() { super(); const commonAmount = NumberUtil.randomBetween(1, 8); this.commonAmount = commonAmount; this.uncommonAmount = 8 - commonAmount; } get packRate() { return { COMMON: this.commonAmount, UNCOMMON: this.uncommonAmount, HOLO: 1, RARES: 1 } } }
/** * @brief enter in low power mode * @note called by cellular service task automaton * @param none * @retval error code */ void CSP_DataIdleManagment(void) { PRINT_CELLULAR_SERVICE("++++++++++++++++ CSP_DataIdleManagment\n\r") switch (csp_dc_power_config.power_mode) { case DC_POWER_RUN_REAL_TIME: CST_set_state(CST_MODEM_POWER_DATA_IDLE_STATE); break; case DC_POWER_RUN_INTERACTIVE_0: case DC_POWER_RUN_INTERACTIVE_1: case DC_POWER_RUN_INTERACTIVE_2: case DC_POWER_RUN_INTERACTIVE_3: CST_set_state(CST_MODEM_POWER_DATA_IDLE_STATE); CSP_SleepRequest(csp_dc_power_config.sleep_request_timeout); break; case DC_POWER_IDLE: CST_set_state(CST_MODEM_POWER_DATA_IDLE_STATE); CSP_SleepRequest(csp_dc_power_config.sleep_request_timeout); break; case DC_POWER_IDLE_LP: CST_set_state(CST_MODEM_POWER_DATA_IDLE_STATE); CSP_SleepRequest(csp_dc_power_config.sleep_request_timeout); break; case DC_POWER_LP: CST_set_state(CST_MODEM_POWER_DATA_IDLE_STATE); CSP_SleepRequest(csp_dc_power_config.sleep_request_timeout); break; case DC_POWER_ULP: CST_set_state(CST_MODEM_POWER_DATA_IDLE_STATE); CSP_SleepRequest(csp_dc_power_config.sleep_request_timeout); break; default: __NOP(); break; } }