content
stringlengths
10
4.9M
/** * Native bindings to the <a target="_blank" href="https://www.khronos.org/registry/OpenGL/extensions/MESA/MESA_program_binary_formats.txt">MESA_program_binary_formats</a> extension. * * <p>The {@code get_program_binary} exensions require a {@code GLenum} {@code binaryFormat}. This extension documents that format for use with Mesa.</p> */ public final class MESAProgramBinaryFormats { /** * For {@code ARB_get_program_binary}, {@code GL_PROGRAM_BINARY_FORMAT_MESA} may be returned from {@link GLES30#glGetProgramBinary GetProgramBinary} calls in the {@code binaryFormat} * parameter and when retrieving the value of {@link GLES30#GL_PROGRAM_BINARY_FORMATS PROGRAM_BINARY_FORMATS}. * * <p>For {@link OESGetProgramBinary OES_get_program_binary}, {@code GL_PROGRAM_BINARY_FORMAT_MESA} may be returned from {@link OESGetProgramBinary#glGetProgramBinaryOES GetProgramBinaryOES} calls in the {@code binaryFormat} * parameter and when retrieving the value of {@link OESGetProgramBinary#GL_PROGRAM_BINARY_FORMATS_OES PROGRAM_BINARY_FORMATS_OES}.</p> */ public static final int GL_PROGRAM_BINARY_FORMAT_MESA = 0x875F; private MESAProgramBinaryFormats() {} }
-- | Transaction metadata module Blockfrost.Types.Cardano.Metadata ( TxMeta (..) , TxMetaJSON (..) , TxMetaCBOR (..) ) where import Data.Aeson (Value, object, (.=)) import Data.Text (Text) import Deriving.Aeson import Servant.Docs (ToSample (..), samples) import Blockfrost.Types.Shared -- | Transaction metadata label in use data TxMeta = TxMeta { _txMetaLabel :: Text -- ^ Metadata label , _txMetaCip10 :: Maybe Text -- ^ CIP10 defined description , _txMetaCount :: Quantity -- ^ The count of metadata entries with a specific label } deriving stock (Show, Eq, Generic) deriving (FromJSON, ToJSON) via CustomJSON '[FieldLabelModifier '[StripPrefix "_txMeta", CamelToSnake]] TxMeta instance ToSample TxMeta where toSamples = pure $ samples [ TxMeta "1990" Nothing 1 , TxMeta "1967" (Just "nut.link metadata oracles registry") 3 , TxMeta "1968" (Just "nut.link metadata oracles data points") 16321 ] -- | Transaction metadata content in JSON data TxMetaJSON = TxMetaJSON { _txMetaJSONTxHash :: TxHash -- ^ Transaction hash that contains the specific metadata , _txMetaJSONJSONMetadata :: Maybe Value -- ^ Content of the JSON metadata } deriving stock (Show, Eq, Generic) deriving (FromJSON, ToJSON) via CustomJSON '[FieldLabelModifier '[StripPrefix "_txMetaJSON", CamelToSnake]] TxMetaJSON instance ToSample TxMetaJSON where toSamples = let oracleMeta val = object [ "ADAUSD" .= [ object [ "value" .= (val :: Text) , "source" .= ("ergoOracles" :: Text) ] ] ] in pure $ samples [ TxMetaJSON "257d75c8ddb0434e9b63e29ebb6241add2b835a307aa33aedba2effe09ed4ec8" (Just $ oracleMeta "0.10409800535729975") , TxMetaJSON "e865f2cc01ca7381cf98dcdc4de07a5e8674b8ea16e6a18e3ed60c186fde2b9c" (Just $ oracleMeta "0.15409850555139935") , TxMetaJSON "4237501da3cfdd53ade91e8911e764bd0699d88fd43b12f44a1f459b89bc91be" Nothing ] -- | Transaction metadata content in CBOR data TxMetaCBOR = TxMetaCBOR { _txMetaCBORTxHash :: TxHash -- ^ Transaction hash that contains the specific metadata , _txMetaCBORMetadata :: Maybe Text -- ^ Content of the CBOR metadata } deriving stock (Show, Eq, Generic) deriving (FromJSON, ToJSON) via CustomJSON '[FieldLabelModifier '[StripPrefix "_txMetaCBOR", CamelToSnake]] TxMetaCBOR instance ToSample TxMetaCBOR where toSamples = pure $ samples [ TxMetaCBOR "257d75c8ddb0434e9b63e29ebb6241add2b835a307aa33aedba2effe09ed4ec8" Nothing , TxMetaCBOR "e865f2cc01ca7381cf98dcdc4de07a5e8674b8ea16e6a18e3ed60c186fde2b9c" Nothing , TxMetaCBOR "4237501da3cfdd53ade91e8911e764bd0699d88fd43b12f44a1f459b89bc91be" (Just "a100a16b436f6d62696e6174696f6e8601010101010c") ]
n=int(input()) v_list=[int(i) for i in input().split()] c_list=[int(i) for i in input().split()] total_list=[] for i in range(n): total_list.append(v_list[i]-c_list[i]) total_list.sort(reverse=True) #print(total_list) sum=0 for i in total_list: if i>0: sum+=i else: break print(sum)
We're learning more about the third suspect in the murder of a Millbrae man. The third suspect is Olivier Adella. He's a little known figure in what's turned out to be a high profile homicide case.Adella, who's a mixed martial arts fighter and has had some success in the ring, was arrested last Friday at his apartment in Burlingame.The two other suspects - the victim's ex-girlfriend Tiffany Li and her current boyfriend Kayveh Bayat - were taken in the day before by a SWAT team at Li's Hillsborough mansion.Prosecutors have not said what Adella's alleged connection is to the murder of Keith Green.He's a mystery man; big and buffed, seen in photos with his wife.Adella also goes by the name of Olivier Bredenstein.On Facebook, he says they're the new parents of a baby boy.Adella works out of a 24 Hour Fitness club in Millbrae.People there did not want to go on camera, but said he was nice and friendly.He lists himself as a limo operator.His business office turned out to be a PO Box in San Bruno with mail still inside.His bio says he's from the Ivory Coast and served with the French Foreign Legion.The other two suspects, Li and Bayat, were in a relationship. Green's former attorney told ABC7 News that Green and Li broke up last year."It all began with Keith's discovery that Tiffany had an affair with his friend Kavi, that he caught him," said attorney Mitri HananiaABC7 News obtained a document, a mutual separation agreement that Li had asked Green to sign in October.She offered him $4,000 a month plus a lump sum of $20,000 and a Range Rover.In return, she wanted Green to move out of the mansion, which her wealthy Chinese mother had bought her and resolve any financial claims he might have had.A source says Li got angry when he refused to sign.Green went missing in April. His body was found two weeks ago in Sonoma County with a bullet through his neck.
<filename>python-siren/siren/views/classLView.py from classBasisView import BasisView import pdb class LView(BasisView): TID = "L" SDESC = "LViz" ordN = 0 title_str = "List View" geo = False typesI = "r" @classmethod def suitableView(tcl, geo=False, what=None, tabT=None): return tabT is None or tabT in tcl.typesI def __init__(self, parent, vid, more=None): self.initVars(parent, vid, more) self.reds = {} self.srids = [] self.initView() def getReds(self): ### the actual queries, not copies, to test, etc. not for modifications return self.reds def refresh(self): self.autoShowSplitsBoxes() self.updateMap() if self.isIntab(): self._SetSize() def setCurrent(self, reds_map): self.reds = dict(reds_map) self.srids = [rid for (rid, red) in reds_map] pass
/* * Copyright 2006-2008 The FLWOR Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ //#include "stdafx.h" #include "debugger/socket.h" #include <sstream> #if defined WIN32 || defined WINCE #include <winsock2.h> typedef int socklen_t; typedef char raw_type; #ifdef WIN32 static bool initialized = false; #endif #else #include <sys/types.h> #include <sys/socket.h> #include <netdb.h> #include <arpa/inet.h> #include <unistd.h> #include <netinet/in.h> #include <cstring> #include <cstdlib> typedef void raw_type; #endif #include <errno.h> #include "zorbautils/lock.h" namespace zorba { // Function to fill in address structure given an address and port static void fillAddr(const std::string &address, unsigned short port, sockaddr_in &addr) { // we have to lock calls to gethostbyname because // it's not thread-safe static zorba::Lock theLock; memset(&addr, 0, sizeof(addr)); // Zero out address structure addr.sin_family = AF_INET; // Internet address theLock.rlock(); hostent *host; // Resolve name if ((host = gethostbyname(address.c_str())) == NULL) { // strerror() will not work for gethostbyname() and hstrerror() // is supposedly obsolete theLock.unlock(); throw DebuggerSocketException("Failed to resolve name (gethostbyname())"); } addr.sin_addr.s_addr = *((unsigned long *) host->h_addr_list[0]); addr.sin_port = htons(port); // Assign port in network byte order theLock.unlock(); } // Socket Code Socket::Socket(int aType, int aProtocol) { #ifdef WIN32 if (!initialized) { WORD wVersionRequested; WSADATA wsaData; wVersionRequested = MAKEWORD(2, 0); // Request WinSock v2.0 if (WSAStartup(wVersionRequested, &wsaData) != 0) { // Load WinSock DLL throw DebuggerSocketException("Unable to load WinSock DLL"); } initialized = true; } #endif // Make a new socket theDescriptor = socket(PF_INET, aType, aProtocol); if (theDescriptor == INVALID_SOCKET) { throw DebuggerSocketException("Socket creation failed (socket())", true); } } Socket::Socket(SOCKET aDescriptor) { this->theDescriptor = aDescriptor; } Socket::~Socket() { close(); } void Socket::close() { #ifdef WIN32 ::closesocket(theDescriptor); #else ::close(theDescriptor); #endif } std::string Socket::getLocalAddress() { sockaddr_in addr; unsigned int addr_len = sizeof(addr); if (getsockname(theDescriptor, (sockaddr *) &addr, (socklen_t *) &addr_len) < 0) { throw DebuggerSocketException("Fetch of local address failed (getsockname())", true); } return inet_ntoa(addr.sin_addr); } unsigned short Socket::getLocalPort() { sockaddr_in addr; unsigned int addr_len = sizeof(addr); if (getsockname(theDescriptor, (sockaddr *) &addr, (socklen_t *) &addr_len) < 0) { throw DebuggerSocketException("Fetch of local port failed (getsockname())", true); } return ntohs(addr.sin_port); } void Socket::setLocalPort(unsigned short localPort) { // Bind the socket to its port sockaddr_in localAddr; memset(&localAddr, 0, sizeof(localAddr)); localAddr.sin_family = AF_INET; localAddr.sin_addr.s_addr = htonl(INADDR_ANY); localAddr.sin_port = htons(localPort); int opt = 1; setsockopt(theDescriptor, SOL_SOCKET,SO_REUSEADDR, (char *)&opt, (socklen_t)sizeof(opt)); if (bind(theDescriptor, (sockaddr *) &localAddr, sizeof(sockaddr_in)) < 0) { std::stringstream lMsg; lMsg << "Set of local port failed: " << localPort; throw DebuggerSocketException(lMsg.str(), true); } } void Socket::setLocalAddressAndPort(const std::string &localAddress, unsigned short localPort) { // Get the address of the requested host sockaddr_in localAddr; fillAddr(localAddress, localPort, localAddr); int opt = 1; setsockopt(theDescriptor, SOL_SOCKET,SO_REUSEADDR, (char *)&opt, (socklen_t)sizeof(opt)); if (bind(theDescriptor, (sockaddr *) &localAddr, sizeof(sockaddr_in)) < 0) { throw DebuggerSocketException("Set of local address and port failed (bind())", true); } } void Socket::cleanUp() { #ifdef WIN32 if (WSACleanup() != 0) { throw DebuggerSocketException("WSACleanup() failed"); } #endif } unsigned short Socket::resolveService(const std::string &service, const std::string &protocol) { struct servent *serv; /* Structure containing service information */ if ((serv = getservbyname(service.c_str(), protocol.c_str())) == NULL) return atoi(service.c_str()); /* Service is port number */ else return ntohs(serv->s_port); /* Found port (network byte order) by name */ } // CommunicatingSocket Code CommunicatingSocket::CommunicatingSocket(int type, int protocol): Socket(type, protocol) { } CommunicatingSocket::CommunicatingSocket(SOCKET newConnSD) : Socket(newConnSD) { } void CommunicatingSocket::connect(const std::string &foreignAddress, unsigned short foreignPort) { // Get the address of the requested host sockaddr_in destAddr; fillAddr(foreignAddress, foreignPort, destAddr); // Try to connect to the given port if (::connect(theDescriptor, (sockaddr *) &destAddr, sizeof(destAddr)) < 0) { std::stringstream lMsg; lMsg << "Connection @" << foreignAddress << ":" << foreignPort << " failed."; throw DebuggerSocketException( lMsg.str(), true); } } void CommunicatingSocket::send(const void *buffer, int bufferLen) { if (::send(theDescriptor, (raw_type *) buffer, bufferLen, 0) < 0) { throw DebuggerSocketException("Send failed (send())", true); } } int CommunicatingSocket::recv(void *buffer, int bufferLen) { int rtn; if ((rtn = ::recv(theDescriptor, (raw_type *) buffer, bufferLen, 0)) < 0) { throw DebuggerSocketException("Received failed (recv())", true); } return rtn; } std::string CommunicatingSocket::getForeignAddress() { sockaddr_in addr; unsigned int addr_len = sizeof(addr); if (getpeername(theDescriptor, (sockaddr *) &addr,(socklen_t *) &addr_len) < 0) { throw DebuggerSocketException("Fetch of foreign address failed (getpeername())", true); } return inet_ntoa(addr.sin_addr); } unsigned short CommunicatingSocket::getForeignPort() { sockaddr_in addr; unsigned int addr_len = sizeof(addr); if (getpeername(theDescriptor, (sockaddr *) &addr, (socklen_t *) &addr_len) < 0) { throw DebuggerSocketException("Fetch of foreign port failed (getpeername())", true); } return ntohs(addr.sin_port); } // TCPSocket Code TCPSocket::TCPSocket(): CommunicatingSocket(SOCK_STREAM, IPPROTO_TCP) { } TCPSocket::TCPSocket(const std::string &foreignAddress, unsigned short foreignPort): CommunicatingSocket(SOCK_STREAM, IPPROTO_TCP) { connect(foreignAddress, foreignPort); } TCPSocket::TCPSocket(SOCKET newConnSD) : CommunicatingSocket(newConnSD) { } // TCPServerSocket Code TCPServerSocket::TCPServerSocket(unsigned short localPort, int queueLen): Socket(SOCK_STREAM, IPPROTO_TCP) { setLocalPort(localPort); setListen(queueLen); } TCPServerSocket::TCPServerSocket(const std::string &localAddress, unsigned short localPort, int queueLen): Socket(SOCK_STREAM, IPPROTO_TCP) { setLocalAddressAndPort(localAddress, localPort); setListen(queueLen); } TCPSocket *TCPServerSocket::accept() { SOCKET newConnSD = ::accept(theDescriptor, NULL, 0); if (newConnSD == INVALID_SOCKET) { throw DebuggerSocketException("Accept failed (accept())", true); } return new TCPSocket(newConnSD); } void TCPServerSocket::setListen(int queueLen) { if (listen(theDescriptor, queueLen) < 0) { throw DebuggerSocketException("Set listening socket failed (listen())", true); } } }//end of namespace
/** * This checks the modification for containers without assigned CIDs and adds them. * This changes values inside the modification and this *must be called before the modification * is applied to the prism object*. * * Theoretically, the changes may affect the prism object after the fact, but if any cloning * is involved this may not be true, so preferably use this *before* applying the modification. */ public void processModification(ItemDelta<?, ?> modification) throws SchemaException { if (modification.isReplace()) { freeIdsFromReplacedContainer(modification); } if (modification.isAdd()) { identifyReplacedContainers(modification); } try { processModificationValues(modification.getValuesToAdd()); processModificationValues(modification.getValuesToReplace()); } catch (DuplicateContainerIdException e) { throw new SchemaException("CID " + maxUsedId + " is used repeatedly in the object: " + object); } assignMissingContainerIds(); }
<reponame>proxium/script.module.lambdascrapers # -*- coding: utf-8 -*- ''' Covenant Add-on This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ''' import urllib,urlparse,json from resources.lib.modules import control from resources.lib.modules import cleantitle class source: def __init__(self): self.priority = 1 self.language = ['en', 'de', 'fr', 'ko', 'pl', 'pt', 'ru'] self.domains = [] def movie(self, imdb, title, localtitle, aliases, year): try: return urllib.urlencode({'imdb': imdb, 'title': title, 'localtitle': localtitle,'year': year}) except: return def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year): try: return urllib.urlencode({'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'localtvshowtitle': localtvshowtitle, 'year': year}) except: return def episode(self, url, imdb, tvdb, title, premiered, season, episode): try: if url is None: return url = urlparse.parse_qs(url) url = dict([(i, url[i][0]) if url[i] else (i, '') for i in url]) url.update({'premiered': premiered, 'season': season, 'episode': episode}) return urllib.urlencode(url) except: return def sources(self, url, hostDict, hostprDict): sources = [] try: if url is None: return sources data = urlparse.parse_qs(url) data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data]) content_type = 'episode' if 'tvshowtitle' in data else 'movie' years = (data['year'], str(int(data['year'])+1), str(int(data['year'])-1)) if content_type == 'movie': title = cleantitle.get(data['title']) localtitle = cleantitle.get(data['localtitle']) ids = [data['imdb']] r = control.jsonrpc('{"jsonrpc": "2.0", "method": "VideoLibrary.GetMovies", "params": {"filter":{"or": [{"field": "year", "operator": "is", "value": "%s"}, {"field": "year", "operator": "is", "value": "%s"}, {"field": "year", "operator": "is", "value": "%s"}]}, "properties": ["imdbnumber", "title", "originaltitle", "file"]}, "id": 1}' % years) r = unicode(r, 'utf-8', errors='ignore') r = json.loads(r)['result']['movies'] r = [i for i in r if str(i['imdbnumber']) in ids or title in [cleantitle.get(i['title'].encode('utf-8')), cleantitle.get(i['originaltitle'].encode('utf-8'))]] r = [i for i in r if not i['file'].encode('utf-8').endswith('.strm')][0] r = control.jsonrpc('{"jsonrpc": "2.0", "method": "VideoLibrary.GetMovieDetails", "params": {"properties": ["streamdetails", "file"], "movieid": %s }, "id": 1}' % str(r['movieid'])) r = unicode(r, 'utf-8', errors='ignore') r = json.loads(r)['result']['moviedetails'] elif content_type == 'episode': title = cleantitle.get(data['tvshowtitle']) localtitle = cleantitle.get(data['localtvshowtitle']) season, episode = data['season'], data['episode'] ids = [data['imdb'], data['tvdb']] r = control.jsonrpc('{"jsonrpc": "2.0", "method": "VideoLibrary.GetTVShows", "params": {"filter":{"or": [{"field": "year", "operator": "is", "value": "%s"}, {"field": "year", "operator": "is", "value": "%s"}, {"field": "year", "operator": "is", "value": "%s"}]}, "properties": ["imdbnumber", "title"]}, "id": 1}' % years) r = unicode(r, 'utf-8', errors='ignore') r = json.loads(r)['result']['tvshows'] r = [i for i in r if str(i['imdbnumber']) in ids or title in [cleantitle.get(i['title'].encode('utf-8')), cleantitle.get(i['originaltitle'].encode('utf-8'))]][0] r = control.jsonrpc('{"jsonrpc": "2.0", "method": "VideoLibrary.GetEpisodes", "params": {"filter":{"and": [{"field": "season", "operator": "is", "value": "%s"}, {"field": "episode", "operator": "is", "value": "%s"}]}, "properties": ["file"], "tvshowid": %s }, "id": 1}' % (str(season), str(episode), str(r['tvshowid']))) r = unicode(r, 'utf-8', errors='ignore') r = json.loads(r)['result']['episodes'] r = [i for i in r if not i['file'].encode('utf-8').endswith('.strm')][0] r = control.jsonrpc('{"jsonrpc": "2.0", "method": "VideoLibrary.GetEpisodeDetails", "params": {"properties": ["streamdetails", "file"], "episodeid": %s }, "id": 1}' % str(r['episodeid'])) r = unicode(r, 'utf-8', errors='ignore') r = json.loads(r)['result']['episodedetails'] url = r['file'].encode('utf-8') try: quality = int(r['streamdetails']['video'][0]['width']) except: quality = -1 if quality >= 2160: quality = '4K' if quality >= 1440: quality = '1440p' if quality >= 1080: quality = '1080p' if 720 <= quality < 1080: quality = 'HD' if quality < 720: quality = 'SD' info = [] try: f = control.openFile(url) ; s = f.size() ; f.close() s = '%.2f GB' % (float(s)/1024/1024/1024) info.append(s) except: pass try: e = urlparse.urlparse(url).path.split('.')[-1].upper() info.append(e) except: pass info = ' | '.join(info) info = info.encode('utf-8') sources.append({'source': '0', 'quality': quality, 'language': 'en', 'url': url, 'info': info, 'local': True, 'direct': True, 'debridonly': False}) return sources except: return sources def resolve(self, url): return url
One of the strengths of Final Cut Pro X is its comprehensive organization system. From tagging and grouping clips, to reconnecting offline media, it’s a pretty impressive setup. When I took the FCPX certification course, a large part of the curriculum was dedicated to early-stage organization, and I spent a lot of time getting the hang of it. Once you do have the hang of it, there’s really no excuse for not organizing your stuff properly at the first stage of an editing workflow. Unless, of course, you are an imperfect human being. It happens. Sometimes you are on a ridiculous deadline and you have footage coming in from 6 different places all 6 different times. Maybe you’re working on a project that combines footage from archived or otherwise unrelated shoots. There are countless reasons why you might find yourself editing a project that includes media from multiple drives, cards, photo libraries, whatever. If this is the case, do not lose hope; you can still consolidate it all in one place, after the fact. Print designers have it easy. They are human, too; a photo may be on the desktop or a logo in their Dropbox folder. Fortunately, programs like Quark and InDesign have simple one-click methods for packaging projects in a single, dedicated folder. Web designers may have it even easier; not only do most web authoring programs have similar functions, these guys are working with images of 150kb and such. Final Cut Pro has never had such a simple method for consolidating all your assets, after the fact. “Media Management” in FCP7 was awkward and buggy. And yes, FCPX does have a system in place, but it’s far from intuitive. Maybe you can make sense of Apple’s online support, but I don’t think it is either sufficient or clear. So I’m here to help out, drawing from my own experience. I hope this is useful to others who have been, until now, utterly confounded. Here’s how the file consolidation process works in real life. Click on the screenshots for full-size images. The Scenario: You started out fine. You shot your footage, converted it, labeled it, and dumped it all on a Firewire or Thunderbolt drive. You did that, right? You imported it properly into FCPX, leaving your ProRes footage in its existing location so as not to clog up that annoying “Movies” folder on you system drive. Well done and good on ya’. But then your client came along with a new hard drive full of new Pro Res footage and they needed you to include it in a new version and deliver it by the end of the day. You don’t have time to move all the media to your drive, so you just plug theirs in and start cutting. Let’s make this even uglier: you also have several new photos on your desktop and several new clips on your local hard drive, and they all need to be in your edit, too. Of course, ya jerk, you should have imported and organized everything properly up front, but you were short on time and, like I said, you’re human. At the end of the day, you’ve sent off the new draft, but you still have the problem of your media living in different neighborhoods. Now you need to consolidate it all in one place. Step One—“Duplicating” is just “Re-Referencing”: First thing’s first: Leave everything plugged in! With all your relevant drives/media connected and your project open and free of errors, go back to the project library panel at the lower left corner of the screen (see above). Control click on your project and from the drop down menu click “Duplicate Project”. When FCPX asks you how you want to do this, choose your final destination drive and select the middle option (“Duplicate Project and Referenced Events”). This process can take a little while. If you get the spinning pinwheel of doom, don’t panic, this is not FCPX beta. If it says that FCPX is “preparing to duplicate” your project, it probably is. Don’t force quit or go searching for preferences to trash, just go get a coffee and relax. Eventually, a new project will appear in you project library. When you click on it, it will take you to your new edit where everything seems to be in place. But it isn’t. Step Two—Actually MOVE Your Media: What FCPX did in the last step was create a new project and event library on your destination drive, just as you asked it to. But the media is still not really there. If you start unplugging drives and pulling out cards, you’ll find half your project is offline again and you will start tearing your hair out. To confirm this in Finder, look at the new event library on the destination drive. You’ll see that your “original media” is just a bunch of reference files. They are aliases (maybe 10kb apiece) that still direct FCPX to the original location of your footage. So this part is important: within FCPX, you now need to select the new event on your destination drive. Go to “File > Organize Event Files”. You will get another warning. Click “Continue”. Now FCPX starts doing the real work of copying the actual media to your destination drive. This process can really take a long time depending on what you’re working with. So get another cup of coffee. Screw it, forego the coffee and take a nap. If you want to see the process in action, go back in Finder to that “Original Media” folder on your destination drive, and you’ll see the alias files, one by one, becoming real media files. This is what you wanted all along. What’s Happening: The Finder screenshots below should give you an idea of what’s going on behind the scenes: Before “organization”, aliases were created and your new project was re-referenced: During organization, the aliases—one-by-one—became real media files. They start getting bigger: Following organization, all your media is there, right where you want it: Step Three—Test It: Quit FCPX. Unplug your client’s drive. Drag the footage out of your “Movies” folder and put it somewhere else. Eject your SD cards. In other words, manually disconnect any media you can think of except for the destination drive where you are hoping everything now lives. Restart FCPX and, in your event library, click on the new project on the destination drive. If your project is intact with no disconnected media, you are good to go. Empty the trash or whatever you gotta do. A Couple Warnings: Of course, I’m not saying you should delete everything in a hurry. Make sure you’re not trashing media you haven’t used yet. Remember: FCPX only copied the footage that is already in use in your edit. Yes, there are ways to do this all from within Finder, and then use FCPX’s “reconnect media” function. But this is a headache and never worked that well for me. Files get renamed or missed altogether. In my experience, FCPX does not want you to use Finder because Apple is a tyrannical monster that aims to take our power and souls from us. The method above is actually the way you’re supposed to do it, only Apple doesn’t explain it well at all in their documentation. Admittedly, I have had to do this on more than one occasion. I’m working on a fun little short now from a motorbike trip me and my buddy Gary did around the southern suburbs of Saigon, Vietnam. Believe me, the footage comes from everywhere (GoPros, iPhones, DSLR, etc.) Now I want to take that project to a pub, watch the futbol, and finish up this goofy little edit. The above process is exactly what I did, using a small—but fast— little G-Force firewire drive. All good. And it will happen again. I’m sure. Let me know if this helped you as an editor. Of course, if you have a better way of consolidating and cleaning up a convoluted FCPX edit, I’m all ears. Please share it. And do be nice.
from flax import linen as nn import logging import jax import jax.numpy as jnp import itertools import functools from typing import Tuple, Callable, List, Optional, Iterable, Any from flax.struct import dataclass from evojax.task.base import TaskState from evojax.policy.base import PolicyNetwork from evojax.policy.base import PolicyState from evojax.util import create_logger from evojax.util import get_params_format_fn #@dataclass #class LSTMState: # h: jnp.array # c: jnp.array @dataclass class LayerState: #lstm_state: LSTMState lstm_h: jnp.array lstm_c: jnp.array fwd_msg: jnp.ndarray bwd_msg: jnp.ndarray @dataclass class SymlaPolicyState: layerState:LayerState keys:jnp.array class VSMLRNN(nn.Module): num_micro_ticks:int msg_size:int output_idx:int layer_norm:bool reduce:str output_fn:str def forward_rnn(self,inc_fwd_msg: jnp.ndarray, inc_bwd_msg: jnp.ndarray,fwd_msg:jnp.ndarray,bwd_msg:jnp.ndarray,reward:jnp.ndarray, h:jnp.array,c:jnp.array): carry=(h,c) inputs = jnp.concatenate([inc_fwd_msg,inc_bwd_msg,fwd_msg, bwd_msg,reward], axis=-1) carry,outputs= self._lstm(carry,inputs) h,c=carry fwd_msg = self._fwd_messenger(outputs) bwd_msg = self._bwd_messenger(outputs) # replace layer norm if self.layer_norm: fwd_msg = self._fwd_layer_norm(fwd_msg) bwd_msg = self._bwd_layer_norm(bwd_msg) return h,c,fwd_msg, bwd_msg def setup(self): self._lstm = nn.recurrent.LSTMCell() self._fwd_messenger = nn.Dense(self.msg_size) self._bwd_messenger = nn.Dense(self.msg_size) if self.layer_norm: self._fwd_layer_norm = nn.LayerNorm((-1,), use_scale=True, use_bias=True) self._bwd_layer_norm = nn.LayerNorm((-1,), use_scale=True, use_bias=True) dense_vsml= jax.vmap(self.forward_rnn, in_axes=( 0,None,None,0,None,0,0)) self.dense_vsml = jax.vmap(dense_vsml, in_axes=(None,0,0,None,None,0,0)) if(self.reduce=="mean"): self.reduce_fn=jnp.mean def __call__(self, layer_state: LayerState, reward: jnp.ndarray,last_action: jnp.ndarray, inp: jnp.ndarray): inp=jnp.expand_dims(inp,axis=-1) last_action=jnp.expand_dims(last_action,axis=-1) incoming_fwd_msg = jnp.pad(inp,((0,0),(0,self.msg_size - 1))) incoming_bwd_msg = jnp.pad(last_action, ((0, 0), (0, self.msg_size - 1))) ls=layer_state lstm_h,lstm_c, fwd_msg, bwd_msg = (ls.lstm_h,ls.lstm_c, ls.fwd_msg, ls.bwd_msg) for _ in range(self.num_micro_ticks): lstm_h,lstm_c,fwd_msg, bwd_msg = self.dense_vsml( incoming_fwd_msg,incoming_bwd_msg, fwd_msg, bwd_msg, reward,lstm_h,lstm_c) fwd_msg = self.reduce_fn(fwd_msg, axis=1) bwd_msg = self.reduce_fn(bwd_msg, axis=0) layer_state=LayerState(lstm_h=lstm_h,lstm_c=lstm_c,fwd_msg=fwd_msg,bwd_msg=bwd_msg) out = fwd_msg[:, self.output_idx] if self.output_fn == 'tanh': out = nn.tanh(out) elif self.output_fn == 'softmax': out = nn.softmax(out, axis=-1) else: if(self.output_fn!='categorical'): raise ValueError( 'Unsupported output activation: {}'.format(self.out_fn)) return layer_state, out class SymLA_Policy(PolicyNetwork): def __init__(self,input_dim: int, msg_dim: int, hidden_dim:int, output_dim: int, num_micro_ticks: int, output_act_fn: str ="tanh", logger: logging.Logger=None): if logger is None: self._logger = create_logger(name='SymLAPolicy') else: self._logger = logger model=VSMLRNN(num_micro_ticks=num_micro_ticks,msg_size=msg_dim,output_idx=0,output_fn=output_act_fn,reduce="mean",layer_norm=False) self.hidden_dim=hidden_dim self.msg_dim=msg_dim self.input_dim=input_dim self.output_dim=output_dim self._forward_fn = (jax.vmap(model.apply)) #init h= jnp.zeros((self.output_dim,self.input_dim,self.hidden_dim)) c= jnp.zeros((self.output_dim,self.input_dim,self.hidden_dim)) fwd_msg=jnp.zeros((self.output_dim,self.msg_dim)) bwd_msg=jnp.zeros((self.input_dim,self.msg_dim)) layer_state=LayerState(lstm_h=h,lstm_c=c,fwd_msg=fwd_msg,bwd_msg=bwd_msg) reward=jnp.zeros((1)) last_action=jnp.zeros((output_dim)) inp=jnp.zeros((input_dim)) self.params = model.init(jax.random.PRNGKey(0),layer_state=layer_state,reward=reward,last_action=last_action,inp=inp) self.num_params, format_params_fn = get_params_format_fn(self.params) self._logger.info('SymLAPolicy.num_params = {}'.format(self.num_params)) self._format_params_fn = (jax.vmap(format_params_fn)) def reset(self, states: TaskState) -> PolicyState: """Reset the policy. Args: TaskState - Initial observations. Returns: PolicyState. Policy internal states. """ keys = jax.random.split(jax.random.PRNGKey(0), states.obs.shape[0]) h= jnp.zeros((states.obs.shape[0],self.output_dim,self.input_dim,self.hidden_dim)) c= jnp.zeros((states.obs.shape[0],self.output_dim,self.input_dim,self.hidden_dim)) fwd_msg=jnp.zeros((states.obs.shape[0],self.output_dim,self.msg_dim)) bwd_msg=jnp.zeros((states.obs.shape[0],self.input_dim,self.msg_dim)) layer_state=LayerState(lstm_h=h,lstm_c=c,fwd_msg=fwd_msg,bwd_msg=bwd_msg) return SymlaPolicyState(layerState=layer_state,keys=keys) def get_actions(self,t_states: TaskState,params: jnp.ndarray,p_states: PolicyState): params = self._format_params_fn(params) layer_state,out=self._forward_fn(params,p_states.layerState, t_states.reward,t_states.last_action,t_states.obs) return out, SymlaPolicyState(keys=p_states.keys,layerState=layer_state)
package empire import ( "io" "github.com/remind101/empire/pkg/service" "golang.org/x/net/context" ) type ProcessRunOpts struct { Command string // If provided, input will be read from this. Input io.Reader // If provided, output will be written to this. Output io.Writer // Extra environment variables to set. Env map[string]string } type runnerService struct { store *store manager service.Manager } func (r *runnerService) Run(ctx context.Context, app *App, opts ProcessRunOpts) error { release, err := r.store.ReleasesFirst(ReleasesQuery{App: app}) if err != nil { return err } a := newServiceApp(release) p := newServiceProcess(release, NewProcess("run", Command(opts.Command))) for k, v := range opts.Env { p.Env[k] = v } return r.manager.Run(ctx, a, p, opts.Input, opts.Output) }
<reponame>JQHxx/OnePush-OC<filename>OnePush/OnePush/3rd/BDCloudAVFilters.framework/Headers/BDCloudAVARFaceBeautyFilter.h // // BDCloudAVARFaceBeautyFilter.h // BDCloudAVFaceSticker // // Created by baidu on 2019/2/25. // Copyright © 2019 Baidu. All rights reserved. // #import <GPUImage/GPUImageFilter.h> NS_ASSUME_NONNULL_BEGIN @interface BDCloudAVARFaceBeautyFilter : GPUImageFilter /** 美肤程度 */ @property(nonatomic, assign) CGFloat beautyLevel; /** 磨皮程度 */ @property(nonatomic, assign) CGFloat skinLevel; /**大眼程度**/ @property(nonatomic, assign) CGFloat adjustEyeLevel; /**瘦脸程度**/ @property(nonatomic, assign) CGFloat adjustFaceLevel; /*颜色滤镜*/ @property(nonatomic, assign) CGFloat adjustNormalLevel; /** 人脸贴纸 @param stickerPath sticker. */ - (void)setSticker:(NSString *)stickerPath type:(NSString *)type; /** 切换滤镜 @param filterId 滤镜id */ - (void)switchFilter:(NSString *)filterId; @end NS_ASSUME_NONNULL_END
President Obama on Monday will meet with financial regulators to discuss progress on the implementation of the Dodd-Frank reform bill, the White House announced. In his first day back in Washington after a weeklong vacation in Martha's Vineyard, Obama will meet in the West Wing with directors from a slew of federal agencies tasked with regulating the nation's financial system. ADVERTISEMENT Representatives from the Consumer Financial Protection Bureau, Federal Housing Finance Agency, Federal Reserve system, Commodity Futures Trading Commission (CFTC), and the Federal Deposit Insurance Corporation (FDIC) will be among those in attendance. The Treasury Department said that Secretary Jack Lew Jacob (Jack) Joseph LewOvernight Finance: US reaches deal with ZTE | Lawmakers look to block it | Trump blasts Macron, Trudeau ahead of G-7 | Mexico files WTO complaint Obama-era Treasury secretary: Tax law will make bipartisan deficit-reduction talks harder GOP Senate report says Obama officials gave Iran access to US financial system MORE would also attend the closed-press meeting. The gathering comes just over three years after Obama signed the Dodd-Frank reform bill into law, and amid a renewed push by the administration to fully implement the measure’s regulations. Of the roughly 400 new industry rules to be written under the 3,200 page bill, only 39 percent have been completed, according to CNBC. Lew told the network last month that fully implementing the law would be a top priority of the administration in the coming months. "By the end of this year, the core elements of the Dodd-Frank Act will be substantially in place," Lew said. Obama also reportedly spent time during his vacation weighing whom to tap as the next head of the Federal Reserve, with speculation about who will replace Ben Bernanke swirling around Washington. Congressional Democrats have voiced support for current Fed Vice Chairwoman Janet Yellen, while the president has acknowledged also considering former Treasury Secretary Larry Summers for the job. In a closed-door meeting with Senate Democrats last month, Obama defended Summers from criticism over remarks he made about women while serving as president of Harvard and for his role in the deregulatory push under President Clinton. Summers has come under particular fire for his role blocking the CFTC from publicly examining derivatives trading. In a press conference shortly before leaving for vacation, Obama said his nomination would be "definitely one of the most important economic decisions that I’ll make in the remainder of my presidency" and called both Yellen and Summers "highly qualified candidates." "I want a Fed chairman who’s able to look at those issues and have a perspective that keeps an eye on inflation, makes sure that we’re not seeing artificial bubbles in place, but also recognizing, you know what, a big part of my job right now is to make sure the economy is growing quickly and robustly, and is sustained and durable, so that people who work hard in this country are able to find a job," Obama said. Monday’s meeting with financial regulators also comes as the president is making a push on the economy, with campaign-style events around the country to tout new proposals. In a speech earlier this month in Phoenix, Obama called for a reduced federal role in the mortgage market. The president proposed winding down Fannie Mae and Freddie Mac, a move that would dramatically reshape how borrowing for home purchases would operate.
package io.github.itstaylz.hexlib.storage.files; import org.bukkit.configuration.file.YamlConfiguration; import java.io.File; import java.io.IOException; public class YamlFile extends FileBase { private YamlConfiguration config; public YamlFile(File file) { super(file); reloadConfig(); } public void reloadConfig() { this.config = YamlConfiguration.loadConfiguration(getFile()); } public void save() { try { this.config.save(getFile()); } catch (IOException e) { e.printStackTrace(); } } public boolean contains(String path) { return this.config.contains(path); } public void set(String path, Object value) { this.config.set(path, value); } public void setIfNull(String path, Object value) { if (!contains(path)) set(path, value); } public Object get(String path) { return this.config.get(path); } public <T> T get(String path, Class<T> returnTypeClass) { return (T) get(path); } public <T> T getOrDefault(String path, T defaultValue) { if (contains(path)) return (T) get(path); return defaultValue; } public YamlConfiguration getConfig() { return config; } }
package cmds import ( "go/ast" "github.com/buypal/oapi-go/internal/oapi/resolver" "github.com/buypal/oapi-go/internal/oapi/spec" "github.com/pkg/errors" "golang.org/x/tools/go/packages" ) // Scanner allow to scan go code for commands. // Commands are specific comments allowing to alter // behaviour of scanner. type Scanner struct { Commands Map } // NewScanner creates new scanner. func NewScanner() *Scanner { return &Scanner{ Commands: make(Map), } } // Scan will scan package and store info. func (r *Scanner) Scan(pkg *packages.Package) (err error) { groups := []*ast.CommentGroup{} comments := []string{} for _, s := range pkg.Syntax { groups = append(groups, s.Comments...) } for _, g := range groups { comments = append(comments, ParseCommentGroup(g)...) } var cc List for _, c := range comments { x, err := Parse(pkg, c) if err != nil { return errors.Wrapf(err, "failed to parse openapi sytnax") } cc = append(cc, x) } r.Commands[pkg.PkgPath] = cc return } // ExportedComponents will provide exported components in a form // of resolver.Exports. func (r *Scanner) ExportedComponents() (exports resolver.Exports, err error) { var cc List for _, cmd := range r.Commands { cc = append(cc, cmd...) } for _, cmd := range cc { switch x := cmd.(type) { case CmdSchema: if _, ok := exports.Get(x.Ptr); ok { err = errors.Errorf("schema of type %q already registered", x.Name) return } entity := resolver.Entity{ Entity: spec.SchemaKind, Name: x.Name, } exp := resolver.Pointer{ Pointer: x.Ptr, Entity: entity, } exports = append(exports, exp) } } return }
IDIOPATHIC SCLEROCHOROIDAL CALCIFICATIONS. A CASE REPORT. AIM Sclerochoroidal calcifications (SCHC) are an uncommon benign ocular condition that occurs in elderly patients. SCHC usually manifest as multiple placoid yellow lesions in the midperipheral fundus, most often in the upper temporal quadrant. They are asymptomatic and often discovered during routine eye examinations in a patient with good visual acuity and visual field. According to the etiology, SCHC are divided into idiopathic, metastatic and dystrophic. CASE REPORTS This is case report of 2 patients with idiopathic SCHC, who underwent basic eye examinations, fundus photography, optical coherence tomography, ultrasonography, fluorescein angiography, fundusautofluorescence, laboratory screening and in the second case also CT head scan. CONCLUSION The aim of this publication is to point out the typical features of SCHC and their distinction from more serious conditions that they may resemble.
/** * For an option opt, recreate the command-line option, appending it to * opt->value which must be a argv_array. This is useful when we need to pass * the command-line option, which can be specified multiple times, to another * command. */ int parse_opt_passthru_argv(const struct option *opt, const char *arg, int unset) { static struct strbuf sb = STRBUF_INIT; struct argv_array *opt_value = opt->value; if (recreate_opt(&sb, opt, arg, unset) < 0) return -1; argv_array_push(opt_value, sb.buf); return 0; }
/** * The MediaAPI is a set of API calls that will deal with getting information * out of the {@link IMediaFile} and {@link IMediaFolder} objects. * <p/> * In time, we should have completely wrapped the SageTV {@link MediaFileAPI}. * The reason is that we want to abstract the fact that we are dealing with sage * tv media files, since we may not always be dealing with SageTV media files. * For example, if a trailers application is written, they may provider a vfs * implementation for there {@link IMediaFile} and {@link IMediaFolder} * implementation and they would still work with these apis and be navigatable * and playable via the UI * * @author seans */ @API(group = "media") public class MediaAPI { private final Logger log = Logger.getLogger(MediaAPI.class); private static class MetadataResolver implements IVariableResolver<IMediaFile> { private Map<String, SageProperty> props = MetadataUtil.getProperties(IMetadata.class); @Override public String resolve(IMediaFile file, String varName) { if (file == null || file.getMediaObject() == null || varName == null) { return null; } IMetadata md = file.getMetadata(); SageProperty key = props.get(varName); if (key == null) { Loggers.LOG.warn("MetadataResolver: Not a valid property name " + varName); return null; } String s = md.get(key); // sometimes the Title is blank for, so we'll use the EpisodeName if (StringUtils.isEmpty(s) && "EpisodeName".equals(varName)) { s = md.get(props.get("Title")); } return s; } } private MetadataResolver metadataResolver = new MetadataResolver(); /** * Given the media file, return a formatted title. The title is formatted * according to several different criteria. A title can have a title mask if * it's a Recording, TV Show, Video or Multi-CD video. * <p/> * Title Masks are processed by the {@link TextReplacement} tool and masks * can contain formatting instructions, based on {@link MessageFormat}, * {@link SimpleDateFormat}, or {@link DecimalFormat}. * <p/> * Media Masks can be configured using the {@link MediaConfiguration} fields * * @param mediaFile {@link IMediaFile} or native SageTV media file object * @return */ public String GetFormattedTitle(Object mediaFile) { IMediaFile mf = GetMediaFile(mediaFile); if (mf == null) return null; String title = null; try { MediaType mt = null; MediaConfiguration cfg = GroupProxy.get(MediaConfiguration.class); String mask = null; if (mf.isType(MediaResourceType.RECORDING.value()) || mf.isType(MediaResourceType.EPG_AIRING.value())) { if (MetadataUtil.isRecordedMovie(mf)) { mask = cfg.getMovieTitleMask(); mt = MediaType.MOVIE; } else { mask = cfg.getRecordingTitleMask(); mt = MediaType.TV; } } else if (mf.isType(MediaResourceType.TV.value())) { if (mf.getMetadata().getDiscNumber() > 0) { mask = cfg.getTvTitleMaskMultiCD(); } else { mask = cfg.getTvTitleMask(); } mt = MediaType.TV; } else { if (mf.getMetadata().getDiscNumber() > 0) { mask = cfg.getMovieTitleMaskMultiCD(); } else { mask = cfg.getMovieTitleMask(); } mt = MediaType.MOVIE; } if (mask == null) { log.warn("Failed to get a title mask for " + mediaFile); return sagex.phoenix.util.StringUtils.fixTitle(mf.getTitle()); } if (mt == MediaType.MOVIE) { if (mf.getMetadata().getYear() <= 1800) { log.warn("Invalid year for Movie, so ignoring Title Mask for " + mediaFile); if (!StringUtils.isEmpty(mf.getMetadata().getEpisodeName())) { return sagex.phoenix.util.StringUtils.fixTitle(mf.getMetadata().getEpisodeName()); } else { // just return the title return sagex.phoenix.util.StringUtils.fixTitle(mf.getTitle()); } } } if (mt == MediaType.TV) { if (mf.getMetadata().getSeasonNumber() <= 0) { log.warn("Invalid Season for TV, so ignoring Title Mask for " + mediaFile); // use the recording mask, because the recording mask uses // title // and episode name mask = cfg.getRecordingTitleMask(); // return mf.getTitle(); } } title = TextReplacement.replaceVariables(mask, mf, metadataResolver); } catch (Throwable t) { log.warn("GetFormattedTitle failed for " + mediaFile, t); title = mf.getTitle(); } return sagex.phoenix.util.StringUtils.fixTitle(title); } /** * Given a mediaFile (or any type) return a native Sage MediaFile object. If * the input is a sage media object, then it is returned. If it's not, then * the object is converted into a sage media file, and then returned. * * @param mediaFile * @return sage's native media object, or null if this is not a native * sagetv mediafile */ public Object GetSageMediaFile(Object mediaFile) { if (mediaFile == null) return null; if (mediaFile instanceof SageMediaFile) { return ((SageMediaFile) mediaFile).getMediaObject(); } else if (mediaFile instanceof ViewItem) { return GetSageMediaFile(((ViewItem) mediaFile).getDecoratedItem()); } else if (sagex.api.MediaFileAPI.IsMediaFileObject(mediaFile) || sagex.api.AiringAPI.IsAiringObject(mediaFile)) { return mediaFile; } else if (mediaFile instanceof IMediaFile) { File f = PathUtils.getFirstFile((IMediaFile) mediaFile); if (f != null) { return MediaFileAPI.GetMediaFileForFilePath(f); } } else if (mediaFile instanceof File) { return MediaFileAPI.GetMediaFileForFilePath((File) mediaFile); } else if (mediaFile instanceof Integer) { Object o = MediaFileAPI.GetMediaFileForID((Integer) mediaFile); if (o == null) { o = AiringAPI.GetAiringForID((Integer) mediaFile); } return o; } if (log.isDebugEnabled()) { log.warn("GetSageMediaFile() failed for: " + mediaFile); } return null; } /** * Returns the FileSystem File object for this media file. In the event that * there are multiple files, then only the first one is returned. * * @param mediaFile Sage MediaFile of VFS MediaFile * @return File if one exists. */ public File getFileSystemMediaFile(Object mediaFile) { if (mediaFile instanceof IMediaFile) { return PathUtils.getFirstFile((IMediaFile) mediaFile); } else if (MediaFileAPI.IsMediaFileObject(mediaFile)) { return MediaFileAPI.GetFileForSegment(mediaFile, 0); } else if (mediaFile instanceof File) { return (File) mediaFile; } log.warn("Failed to get File System File for media object: " + mediaFile); return null; } /** * Given the mediafile return a {@link IMediaResource}. The media resource * may be a file or folder. If the object is not a {@link IMediaResource} * then it is converted into a {@link IMediaResource} and then returned. * * @param mediaFile * @return */ public IMediaResource GetMediaResource(Object mediaFile) { if (mediaFile instanceof IMediaResource) { return (IMediaResource) mediaFile; } else if (sagex.api.MediaFileAPI.IsMediaFileObject(mediaFile) || sagex.api.AiringAPI.IsAiringObject(mediaFile)) { return (new SageMediaFile(null, mediaFile)); } return null; } /** * Given the object return it as a {@link IMediaFile} object. If the object * cannot be converted to a {@link IMediaFile}, then null is returned. * * @param mediaFile * @return */ public IMediaFile GetMediaFile(Object mediaFile) { if (mediaFile instanceof IMediaFile) { return (IMediaFile) mediaFile; } else if (sagex.api.MediaFileAPI.IsMediaFileObject(mediaFile)) { return (new SageMediaFile(null, mediaFile)); } else if (AiringAPI.IsAiringObject(mediaFile)) { return (new SageMediaFile(null, mediaFile)); } else if (mediaFile instanceof Integer) { Object mf = MediaFileAPI.GetMediaFileForID((Integer) mediaFile); if (mf != null) { return new SageMediaFile(null, mf); } } if (mediaFile != null) { log.debug("Failed to Create IMediaFile object from " + mediaFile); } return null; } /** * returns the Album for the Given Media File * * @param album MediaFile Object * @return */ public IAlbumInfo GetAlbum(Object album) { if (album == null) return null; if (album instanceof IAlbumInfo) { return (IAlbumInfo) album; } if (album instanceof IMediaFile) { return ((IMediaFile) album).getAlbumInfo(); } return null; } /** * return true if the media type is one of the ones listed in the * {@link MediaResourceType} constants * * @param type {@link MediaResourceType} contant as a String * @return true if the media has the given type */ public boolean IsMediaType(Object file, String type) { if (file == null) return false; MediaResourceType rt = MediaResourceType.toMediaResourceType(type); if (rt == null) return false; IMediaResource r = GetMediaResource(file); if (r != null) { return r.isType(rt.value()); } return false; } /** * returns true if the media is an Online Video type * * @param file * @return */ public boolean IsOnlineVideo(Object file) { return IsMediaType(file, MediaResourceType.ONLINE.name()); } /** * Returns true if the media file is a Dummy node, ie, cannot be played, * just informational placeholder. * * @param file * @return */ public boolean IsDummyVideo(Object file) { return IsMediaType(file, MediaResourceType.DUMMY.name()); } /** * Returns true if the media file is a Missing TV item, ie, cannot be played, * just a placeholder for one or more missing TV Episodes. * * @param file * @return */ public boolean IsMissingTV(Object file) { return IsMediaType(file, MediaResourceType.MISSINGTV.name()); } /** * returns true if the given media file is a Playon File * * @param file * @return */ public boolean IsPlayonVideo(Object file) { File seg = phoenix.media.GetFileSystemMediaFile(file); if (seg == null) return false; File f = new File(seg.getParentFile(), seg.getName() + ".playon"); return f != null && f.exists() && f.length() > 0; } public ICastMember GetCastMember(Object cm) { if (cm == null) return null; if (cm instanceof ICastMember) { return ((ICastMember) cm); } log.warn("GetCastMember(): Invalid Object Type: " + cm); return null; } /** * Resets the custom metadata for the given media file object. This is * useful when you fanart metadata is wrong and you want to forget it. * * @param mediaFile */ public void ClearCustomMetadata(Object mediaFile) { IMediaFile mf = phoenix.media.GetMediaFile(mediaFile); mf.accept(new ClearCustomMetadataFieldsVisitor(), NullProgressMonitor.INSTANCE, IMediaResource.DEEP_UNLIMITED); } /** * Given a MediaFile return the SeriesInfo, if it's been assigned. * * @param mediaFile * @return */ public ISeriesInfo GetSeriesInfo(IMediaFile mediaFile) { if (mediaFile == null) return null; boolean update = false; IMetadata md = mediaFile.getMetadata(); Object seriesInfo = null; int id = md.getSeriesInfoID(); if (id > 0) { seriesInfo = SeriesInfoAPI.GetSeriesInfoForID(String.valueOf(id)); if (seriesInfo != null) return new SageSeriesInfo(seriesInfo); } // we don't have a series info yet, so udpate it when we do update = true; // Ask sagetv for it seriesInfo = ShowAPI.GetShowSeriesInfo(phoenix.media.GetSageMediaFile(mediaFile)); if (seriesInfo == null) { // still no series info, check the provider id id = TVSeriesUtil.createNewSeriesInfoId(md.getMediaProviderID(), md.getMediaProviderDataID()); seriesInfo = SeriesInfoAPI.GetSeriesInfoForID(String.valueOf(id)); } if (seriesInfo != null) { if (update) { md.setSeriesInfoID(NumberUtils.toInt(SeriesInfoAPI.GetSeriesID(seriesInfo))); } return new SageSeriesInfo(seriesInfo); } // nothing found return null; } /** * return the duration of this mediaitem * * @return */ public long GetDuration(IMediaFile mf) { if (mf == null) return 0; return AiringAPI.GetAiringDuration(mf.getMediaObject()); } /** * Return the native file or url of the given mediafile. * * @param file * @return file/url or empty String */ public String GetNativeFile(IMediaFile file, String defaultFile) { if (file == null) return defaultFile; String val = null; if (file instanceof DecoratedMediaFile) { return GetNativeFile(((DecoratedMediaFile) file).getDecoratedItem(), defaultFile); } else if (file instanceof HasPlayableUrl) { val = ((HasPlayableUrl) file).getUrl(); } else { File f = PathUtils.getFirstFile(file); if (f != null) { val = f.getAbsolutePath(); } } if (val == null) { return defaultFile; } return val; } /** * Returns the Native File/Url, but only a path containing the last maxlen * characters. * * @param file * @param defaultFile * @param maxlen * @return */ public String GetNativeFile(IMediaFile file, String defaultFile, int maxlen) { String f = GetNativeFile(file, defaultFile); if (f != null && f.length() > maxlen) { return f.substring(f.length() - maxlen); } return f; } /** * Returns all child files from the given folder and it's subfolders * * @param folder * @return */ public List GetAllChildren(IMediaFolder folder) { return GetAllChildren(folder, Integer.MAX_VALUE, MediaResourceType.FILE.name()); } /** * Returns all child files from the given folder and it's subfolders but * only for a max number of items * * @param folder * @param max max number of items to return * @return */ public List GetAllChildren(IMediaFolder folder, int max) { return GetAllChildren(folder, max, MediaResourceType.FILE.name()); } /** * Returns a list of all children matching the {@link MediaResourceType} for * a max number of results. * * @param folder {@link IMediaFolder} to scan * @param max max number of results * @param type {@link MediaResourceType} * @return list of resources */ public List GetAllChildren(IMediaFolder folder, int max, String type) { if (folder == null) return null; // don't process online videos if (folder.isType(MediaResourceType.ONLINE.value())) { return folder.getChildren(); } // collect all files CollectorResourceVisitor crv = new CollectorResourceVisitor(max, MediaResourceType.toMediaResourceType(type)); folder.accept(crv, new BasicProgressMonitor(), IMediaResource.DEEP_UNLIMITED); return crv.getCollection(); } /** * Returns the path relative to the root folder, whereas GetPath() return * the complete path including the root folder name * * @param res * @param relativeToRoot * @return */ public String GetPath(IMediaResource res, boolean relativeToRoot) { String path = res.getPath(); if (path == null || !relativeToRoot) { return path; } // skip first char, since it is always a / int pos = path.indexOf('/', 1); if (pos == -1) return null; // is it's ourself, so we are not relative to ourself return path.substring(pos); } /** * Safely gets the count of items in a Video Folder. For online videos this * will NOT force the children to be loaded, and the size will return 0 if * the files have not been loaded * * @param folder * @return */ public int GetCount(IMediaFolder folder) { if (folder == null) return 0; if (folder instanceof OnlineViewFolder) { return ((OnlineViewFolder) folder).count(); } if (phoenix.umb.IsOnlineFolder(folder)) { return 0; } return folder.getChildren().size(); } }
import styled from "styled-components"; import { Colors } from "../../styles/colors"; interface ITextCont { bgColor?: string; padding?: string; } export const TextCont = styled.div<ITextCont>` background-color: ${props => (props.bgColor ? props.bgColor : Colors.white)}; padding: ${props => (props.padding ? props.padding : 0)}; `;
#ifndef VOXBLOX_GSM_CONVERSIONS_H_ #define VOXBLOX_GSM_CONVERSIONS_H_ #include <vector> #include <geometry_msgs/Transform.h> #include <pcl/point_types.h> #include <visualization_msgs/Marker.h> #include <voxblox/core/common.h> #include <voxblox/io/sdf_ply.h> namespace voxblox { namespace voxblox_gsm { inline void fillAlignedBoundingBoxMsg(Eigen::Vector3f bbox_translation, Eigen::Quaternionf bbox_quaternion, Eigen::Vector3f bbox_size, vpp_msgs::BoundingBox* bounding_box_msg) { CHECK_NOTNULL(bounding_box_msg); bounding_box_msg->pose.position.x = bbox_translation(0); bounding_box_msg->pose.position.y = bbox_translation(1); bounding_box_msg->pose.position.z = bbox_translation(2); bounding_box_msg->pose.orientation.x = bbox_quaternion.x(); bounding_box_msg->pose.orientation.y = bbox_quaternion.y(); bounding_box_msg->pose.orientation.z = bbox_quaternion.z(); bounding_box_msg->pose.orientation.w = bbox_quaternion.w(); bounding_box_msg->dimensions.x = bbox_size(0); bounding_box_msg->dimensions.y = bbox_size(1); bounding_box_msg->dimensions.z = bbox_size(2); } inline void fillBoundingBoxMarkerMsg(std::string world_frame, uint32_t id, Eigen::Vector3f bbox_translation, Eigen::Quaternionf bbox_quaternion, Eigen::Vector3f bbox_size, visualization_msgs::Marker* bbox_marker) { CHECK_NOTNULL(bbox_marker); bbox_marker->header.frame_id = world_frame; bbox_marker->header.stamp = ros::Time(); bbox_marker->id = id; bbox_marker->type = visualization_msgs::Marker::CUBE; bbox_marker->action = visualization_msgs::Marker::ADD; bbox_marker->pose.position.x = bbox_translation(0); bbox_marker->pose.position.y = bbox_translation(1); bbox_marker->pose.position.z = bbox_translation(2); bbox_marker->pose.orientation.x = bbox_quaternion.x(); bbox_marker->pose.orientation.y = bbox_quaternion.y(); bbox_marker->pose.orientation.z = bbox_quaternion.z(); bbox_marker->pose.orientation.w = bbox_quaternion.w(); bbox_marker->scale.x = bbox_size(0); bbox_marker->scale.y = bbox_size(1); bbox_marker->scale.z = bbox_size(2); bbox_marker->color.a = 0.3; bbox_marker->color.r = 0.0; bbox_marker->color.g = 1.0; bbox_marker->color.b = 0.0; bbox_marker->lifetime = ros::Duration(); } inline void fillBoundingBoxTfMsg(std::string world_frame, std::string child_frame, Eigen::Vector3f bbox_translation, Eigen::Quaternionf bbox_quaternion, geometry_msgs::TransformStamped* bbox_tf) { bbox_tf->header.stamp = ros::Time(); bbox_tf->header.frame_id = world_frame; bbox_tf->child_frame_id = child_frame; bbox_tf->transform.translation.x = bbox_translation(0); bbox_tf->transform.translation.y = bbox_translation(1); bbox_tf->transform.translation.z = bbox_translation(2); bbox_tf->transform.rotation.x = bbox_quaternion.x(); bbox_tf->transform.rotation.y = bbox_quaternion.y(); bbox_tf->transform.rotation.z = bbox_quaternion.z(); bbox_tf->transform.rotation.w = bbox_quaternion.w(); } inline void convertVoxelGridToPointCloud( const voxblox::Layer<voxblox::TsdfVoxel>& tsdf_voxels, const MeshIntegratorConfig& mesh_config, pcl::PointCloud<pcl::PointSurfel>* surfel_cloud) { CHECK_NOTNULL(surfel_cloud); static constexpr bool kConnectedMesh = false; voxblox::Mesh mesh; io::convertLayerToMesh(tsdf_voxels, mesh_config, &mesh, kConnectedMesh); surfel_cloud->reserve(mesh.vertices.size()); size_t vert_idx = 0u; for (const voxblox::Point& vert : mesh.vertices) { pcl::PointSurfel point; point.x = vert(0); point.y = vert(1); point.z = vert(2); if (mesh.hasColors()) { const voxblox::Color& color = mesh.colors[vert_idx]; point.r = static_cast<int>(color.r); point.g = static_cast<int>(color.g); point.b = static_cast<int>(color.b); } if (mesh.hasNormals()) { const voxblox::Point& normal = mesh.normals[vert_idx]; point.normal_x = normal(0); point.normal_y = normal(1); point.normal_z = normal(2); } else { LOG(FATAL) << "Mesh doesn't have normals."; } surfel_cloud->push_back(point); ++vert_idx; } surfel_cloud->is_dense = true; surfel_cloud->width = surfel_cloud->points.size(); surfel_cloud->height = 1u; } } // namespace voxblox_gsm } // namespace voxblox #endif // VOXBLOX_GSM_CONVERSIONS_H_
package WarmUp; public class Staircase { /* * Complete the staircase function below. */ static void staircase(int n) { /* * Write your code here. */ for (int i = 0; i < n; i++) { for (int y = 0; y < n; y++) { if (y > n - i - 2) { System.out.print("#"); } else { System.out.print(" "); } } System.out.println(); } } public static void main(String[] args) { staircase(6); } }
/** * Activity that holds the selected or clicked ImageView to view or edit it. */ public class ViewAndEditImageActivity extends AppCompatActivity { private ImageView image; private String[] filesPaths; private Bitmap bmp; File imageFile; Integer position; File[] files; private String resultText; private Uri fileUri; FirebaseFirestore db; FirebaseAuth mFirebaseAuth; private StorageReference mStorageRef; private StorageTask mUploadTask; public LocationClient mLocationClient = null; private MyLocationListener myListener = new MyLocationListener(); private List<StoreBean> storeBeans; private StoreBean storeBean; List<String> stores; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_view_and_edit_image); image = (ImageView) findViewById(R.id.imageView); db = FirebaseFirestore.getInstance(); mFirebaseAuth = FirebaseAuth.getInstance(); //get position from calling activity Intent intent = getIntent(); position = intent.getIntExtra("position",0); //get all the file images stored File mFile = this.getExternalFilesDir(null); files = mFile.listFiles(); filesPaths = new String[files.length]; for (int i = 0; i < files.length; i++) { filesPaths[i] = files[i].getAbsolutePath(); } try{ fileUri = Uri.fromFile(files[files.length - 1]); }catch (Exception e){ e.getMessage(); } // Reference to the Firebase Storage mStorageRef = FirebaseStorage.getInstance().getReference("uploads"); if(position==-1) { try { ((Button)findViewById(R.id.btnSaveStore)).setVisibility(View.GONE); detectLabel(); // get the bmp of the image to be viewed bmp = BitmapFactory.decodeFile(filesPaths[files.length - 2]); imageFile = files[files.length - 2]; } catch (Exception e){ e.getMessage(); } } else{ ((Button)findViewById(R.id.btnSaveStore)).setVisibility(View.VISIBLE); getData(); bmp = BitmapFactory.decodeFile(filesPaths[position]); imageFile = files[position]; } // Set the image to Image view image.setImageBitmap(bmp); initFirestore(); storeBeans = new ArrayList<>(); storeBean = new StoreBean(); mLocationClient = new LocationClient(getApplicationContext()); mLocationClient.registerLocationListener(myListener); initLocation(); mLocationClient.start(); // get the stores in the database to help auto fill TextView stores = new ArrayList<>(); db.collection("users").document("storeData").collection("data").get() .addOnCompleteListener(new OnCompleteListener<QuerySnapshot>() { @Override public void onComplete(@NonNull Task<QuerySnapshot> task) { if (task.isSuccessful()) { for (QueryDocumentSnapshot document : task.getResult()) { stores.add(document.getId()); } ArrayAdapter<String> adapter = new ArrayAdapter<String>(ViewAndEditImageActivity.this, android.R.layout.select_dialog_singlechoice, stores); AutoCompleteTextView acTextView = (AutoCompleteTextView) findViewById(R.id.editViewStore); //Set the number of characters the user must type before the drop down list is shown acTextView.setThreshold(1); //Set the adapter acTextView.setAdapter(adapter); } else { Log.w(null, "Error getting documents.", task.getException()); } } }); } @Override public void onBackPressed() { ClosePreviewActivity(null); } /** * get all the product details from the store */ public void getData(){ db.collection("users").document(mFirebaseAuth.getCurrentUser().getUid()).collection("product") .get() .addOnCompleteListener(new OnCompleteListener<QuerySnapshot>() { @Override public void onComplete(@NonNull Task<QuerySnapshot> task) { if (task.isSuccessful()) { for (QueryDocumentSnapshot document : task.getResult()) { Log.d(null, document.getId() + " => " + document.getData()); Map<String, Object> data = document.getData(); if(data.get("imageFilePath") != null) { if (imageFile.getAbsolutePath().equals(data.get("imageFilePath"))) { ((EditText) findViewById(R.id.editViewName)).setText(data.get("name").toString()); ((EditText) findViewById(R.id.editViewPrice)).setText(data.get("price").toString()); ((EditText) findViewById(R.id.editViewSize)).setText(data.get("size").toString()); ((EditText) findViewById(R.id.editViewStore)).setText(data.get("store").toString()); } } } } else { Log.w(null, "Error getting documents.", task.getException()); } } }); } /** * function to read the ocr from label picture */ public void detectLabel(){ FirebaseVisionImage image; try { image = FirebaseVisionImage.fromFilePath(this, fileUri); FirebaseVisionTextRecognizer textRecognizer = FirebaseVision.getInstance().getOnDeviceTextRecognizer(); textRecognizer.processImage(image) .addOnSuccessListener(new OnSuccessListener<FirebaseVisionText>() { @Override public void onSuccess(FirebaseVisionText result) { Log.d("Label detected ", "success"); onSuccessfulDetection(result); } }) .addOnFailureListener(new OnFailureListener() { @Override public void onFailure(@NonNull Exception e) { Log.getStackTraceString(e); } }); } catch (Exception e) { e.printStackTrace(); } } /** * when the ocr is detected * @param result */ public void onSuccessfulDetection(FirebaseVisionText result){ resultText = result.getText(); Log.d("Finished! Results: ", resultText); fillProductDetails(resultText); } /** * filter details read by ocr to categories and display them * @param extras */ private void fillProductDetails(String extras){ //process label text String [] lines = extras.split("\\n"); String itemDescription = ""; for (String line : lines) { try { if (line.contains("$")) { EditText editItem = (EditText) findViewById(R.id.editViewPrice); editItem.setText(line); } else if (line.matches("[[S|M|L|XL|XS|XXS|XXL|XXXL]]")) { EditText editItem = (EditText) findViewById(R.id.editViewSize); editItem.setText(line); } //words else if (line.matches("[^\\d\\W]{2,}")) { itemDescription += line; } //integer size, according to australian sizing standards else if (Integer.parseInt(line) > 2 && Integer.parseInt(line) < 40) { EditText editItem = (EditText) findViewById(R.id.editViewSize); editItem.setText(line); } else if (line.toLowerCase().contains("size")) { EditText editItem = (EditText) findViewById(R.id.editViewSize); String[] size = line.split("' ':"); for (String obj : size) { try { if (obj.equalsIgnoreCase("size")) { continue; } else if (Integer.parseInt(obj) > 2 && Integer.parseInt(obj) < 40) { editItem.setText(obj); } else if (obj.matches("[[S|M|L|XL|XS|XXS|XXL|XXXL]]")) { editItem.setText(obj); } } catch (NumberFormatException e) { continue; } } } //set item description to all other strings that are words EditText editItem = (EditText) findViewById(R.id.editViewName); editItem.setText(itemDescription); } catch (NumberFormatException e) { continue; } } } /** * Function to Close current activity and return to Main activity */ public void ClosePreviewActivity(View v){ if(position == -1) { imageFile.delete(); files[filesPaths.length-1].delete(); } Intent i=new Intent(this, MainActivity.class); i.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); startActivity(i); } /** * Function to Save the image with its details. */ public void SaveImage(View v){ //delete label if(position == -1) { files[filesPaths.length - 1].delete(); } ProductDetails item = new ProductDetails(); item.name = ((EditText)findViewById(R.id.editViewName)).getText().toString(); item.price = ((EditText)findViewById(R.id.editViewPrice)).getText().toString(); item.size = ((EditText)findViewById(R.id.editViewSize)).getText().toString(); item.storeName = ((EditText)findViewById(R.id.editViewStore)).getText().toString(); item.imageFilePath = imageFile.getAbsolutePath(); savetoFireStore(item); uploadFile(); SharedPreferences sp = this.getSharedPreferences("SP_StoreBean_List", Activity.MODE_PRIVATE); String peopleListJson = sp.getString("KEY_StoreBean_LIST_DATA",""); if(peopleListJson!="") //防空判断 { Gson gson = new Gson(); storeBeans = gson.fromJson(peopleListJson, new TypeToken<List<StoreBean>>() {}.getType()); } storeBean.setStoreName(item.name); storeBean.setLatitude(latitude); storeBean.setLongitude(longitude); storeBeans.add(storeBean); Gson gson = new Gson(); String jsonStr=gson.toJson(storeBeans); SharedPreferences.Editor editor = sp.edit(); editor.putString("KEY_StoreBean_LIST_DATA", jsonStr) ; editor.commit() ; Intent i=new Intent(this, MainActivity.class); i.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); startActivity(i); } /** * Function to get the link to the online store of the product * @param view */ public void LinkOnlineStore(View view){ String store = ((AutoCompleteTextView)findViewById(R.id.editViewStore)).getText().toString(); if(store.equals("")){ Toast.makeText(this,"Add Store name", Toast.LENGTH_SHORT).show(); } else{ db.collection("users").document("storeData").collection("data").document(store).get() .addOnCompleteListener(new OnCompleteListener<DocumentSnapshot>() { @Override public void onComplete(@NonNull Task<DocumentSnapshot> task) { if(task.isSuccessful()){ DocumentSnapshot document = task.getResult(); Map<String, Object> data = document.getData(); if(data != null) { String url = data.get("link").toString(); Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(url)); startActivity(browserIntent); } else{ Toast.makeText(ViewAndEditImageActivity.this,"Store Link not Available", Toast.LENGTH_SHORT).show(); } } } }); } } FirebaseFirestore mFireStore; private void initFirestore() {mFireStore = FirebaseFirestore.getInstance();} /** * function to save the product details to the fireStore database * @param item */ private void savetoFireStore(ProductDetails item){ Map<String, Object> data = new HashMap<>(); data.put("store",item.storeName); data.put("imageFilePath", item.imageFilePath); data.put("size",item.size); data.put("price",item.price); data.put("name",item.name); // Push the data to Firestore mFireStore.collection("users").document(mFirebaseAuth.getCurrentUser().getUid()).collection("product").document(imageFile.getName()).set(data) .addOnSuccessListener(new OnSuccessListener<Void>() { @Override public void onSuccess(Void aVoid) { Log.d(null, "DocumentSnapshot successfully written!"); } }) .addOnFailureListener(new OnFailureListener() { @Override public void onFailure(@NonNull Exception e) { Log.w(null, "Error writing document", e); } }); } /** * Upload file to FireBase storage */ private void uploadFile() { Uri mImageUri = Uri.fromFile(imageFile); if (mImageUri != null) { StorageReference fileReference = mStorageRef.child(mFirebaseAuth.getCurrentUser().getUid() + "/" + imageFile.getName()); // Store Image in FireBase Storage mUploadTask = fileReference.putFile(mImageUri) .addOnSuccessListener(new OnSuccessListener<UploadTask.TaskSnapshot>() { @Override public void onSuccess(UploadTask.TaskSnapshot taskSnapshot) { Handler handler = new Handler(); handler.postDelayed(new Runnable() { @Override public void run() { } }, 500); Toast.makeText(ViewAndEditImageActivity.this, "Upload successful", Toast.LENGTH_LONG).show(); } }) .addOnFailureListener(new OnFailureListener() { @Override public void onFailure(@NonNull Exception e) { Toast.makeText(ViewAndEditImageActivity.this, e.getMessage(), Toast.LENGTH_SHORT).show(); } }) .addOnProgressListener(new OnProgressListener<UploadTask.TaskSnapshot>() { @Override public void onProgress(UploadTask.TaskSnapshot taskSnapshot) { } }); // Get the product details from the view ProductDetails item = new ProductDetails(); item.name = ((EditText)findViewById(R.id.editViewName)).getText().toString(); item.price = ((EditText)findViewById(R.id.editViewPrice)).getText().toString(); item.size = ((EditText)findViewById(R.id.editViewSize)).getText().toString(); item.storeName = ((EditText)findViewById(R.id.editViewStore)).getText().toString(); item.imageFilePath = imageFile.getAbsolutePath(); // Create a new product with the details Map<String, Object> user = new HashMap<>(); user.put("name", item.name); user.put("price", item.price); user.put("size", item.size); user.put("storeName", item.storeName); } else { Toast.makeText(this, "No file selected", Toast.LENGTH_SHORT).show(); } } private void initLocation() { LocationClientOption option = new LocationClientOption(); option.setLocationMode(LocationClientOption.LocationMode.Hight_Accuracy); option.setCoorType("bd09ll"); option.setScanSpan(1000); option.setOpenGps(true); option.setLocationNotify(true); option.setIgnoreKillProcess(false); //optional,location is a service inside the SDK //setIgnoreKillProcess(true) option.SetIgnoreCacheException(false); //optional,set whether to collect the Crash details,default false option.setWifiCacheTimeOut(5*60*1000); option.setEnableSimulateGps(false); mLocationClient.setLocOption(option); } double latitude; double longitude; public class MyLocationListener extends BDAbstractLocationListener { @Override public void onReceiveLocation(BDLocation location){ //BDLocation latitude = location.getLatitude(); //Get the latitude longitude = location.getLongitude(); //Get the longitude float radius = location.getRadius(); //Get accuracy of location 0.0f String coorType = location.getCoorType(); int errorCode = location.getLocType(); mLocationClient.stop(); } } }
/** getCurrentTimeZone method for Linux implementation of OS Provider */ Boolean OperatingSystem::getCurrentTimeZone(Sint16& currentTimeZone) { struct tm buf; time_t now; #if defined(PEGASUS_PLATFORM_LINUX_GENERIC_GNU) now = time(NULL); localtime_r(&now, &buf); currentTimeZone = (buf.tm_gmtoff / 60); return true; #else return false; #endif }
import { EntitySubscriberInterface, EventSubscriber, InsertEvent } from "typeorm"; import { Employee } from "./employee.entity"; import { getUserDummyImage } from "./../core/utils"; @EventSubscriber() export class EmployeeSubscriber implements EntitySubscriberInterface<Employee> { /** * Indicates that this subscriber only listen to Employee events. */ listenTo() { return Employee; } /** * Called after entity is loaded. */ afterLoad(entity: Employee) { if (entity.user) { entity.fullName = entity.user.name; } } /** * Called before employee insertion. */ beforeInsert(event: InsertEvent<Employee>) { if (event.entity) { const { entity } = event; /** * Use a dummy image avatar if no image is uploaded for any of the employee */ if (!entity.user.imageUrl) { entity.user.imageUrl = getUserDummyImage(entity.user) } } } }
<reponame>hispindia/BIHAR-2.7 package org.hisp.dhis.de.state; import org.hisp.dhis.datavalue.DataValue; /** * Interface for how DataValues are saved in a stateful way. * Implementing classes must supply the remaining necessary properties in some * fashion, for example through a bean or some session storage. * * The relevant properties are: * OrganisationUnit to store the DataValue for. * Period to store the DataValue for * The storedBy property, who stored this DataValue * The timestamp property, when this DataValue was last updated * * @author <NAME> * @version $Id$ */ public interface StatefulDataValueSaver { /** * Save a value for the given DataElement, and other relevant properties. * @param dataElementId Id of the DataElement to save for. * @param value String value to save. */ DataValue saveValue( int dataElementId, int optionComboId, String value ); }
<reponame>richardcornish/cyndiloza from django.db import models from django.core.urlresolvers import reverse from django.contrib.auth import get_user_model class About(models.Model): user = models.ForeignKey(get_user_model()) blurb = models.TextField(help_text="Appears on the homepage. Please use <a href=\"http://en.wikipedia.org/wiki/Markdown\">Markdown styling</a> for links.") body = models.TextField() photo = models.ImageField(upload_to="about/photo/", blank=True) resume = models.FileField(upload_to="about/resume/", blank=True, help_text="Please upload a PDF.") class Meta: ordering = ["user"] def __unicode__(self): return u"%s" % self.user.get_full_name() def get_absolute_url(self): return reverse("about_list")
Greener Ullmann-Type Coupling of Aryl Halides for Preparing Biaryls Using Reusable Pd/ZrO2 Catalyst Biaryls with excellent yields can be prepared by the Ullmann-type coupling of aryl halides in the presence of potassium carbonate (as a base) and dimethylformamide (as a solvent), at 140 °C, using a reusable Pd (2.5 wt%)/ZrO2 catalyst. The product yield of 4-iodoanisole coupling is strongly influenced by the catalyst preparation method, solvent, and base.
<filename>mysqldatatypes/spatial/multipoint.go<gh_stars>0 package spatial import ( "bytes" "encoding/binary" ) type MultiPointData []PointData func (mpd *MultiPointData) decodeFrom(data *bytes.Reader) error { var length uint32 if err := binary.Read(data, byteOrder, &length); nil != err { return err } *mpd = make([]PointData, length) var p Point for i := uint32(0); i < length; i++ { if err := p.decodeFrom(data, false); nil != err { return err } (*mpd)[i] = p.Data } return nil } func (mpd *MultiPointData) encodeTo(data *bytes.Buffer) { length := uint32(len(*mpd)) binary.Write(data, byteOrder, length) for i := uint32(0); i < length; i++ { (*baseGeometry).encodeHeaderTo(nil, data, false, GEOMETRY_TYPE_POINT) (*mpd)[i].encodeTo(data) } } type MultiPoint struct { baseGeometry Data MultiPointData } func NewMultiPoint(srid Srid) *MultiPoint { return &MultiPoint{baseGeometry: baseGeometry{srid: srid}} } func (mp *MultiPoint) Decode(data []byte) error { return mp.decodeFrom(bytes.NewReader(data), true) } func (mp *MultiPoint) Encode() []byte { data := newEncodeBuffer() mp.encodeTo(data, true) return data.Bytes() } func (mp *MultiPoint) decodeFrom(data *bytes.Reader, decodeSrid bool) error { if _, err := mp.decodeHeaderFrom(data, decodeSrid, GEOMETRY_TYPE_MULTI_POINT); nil != err { return err } return mp.Data.decodeFrom(data) } func (mp *MultiPoint) encodeTo(data *bytes.Buffer, encodeSrid bool) { mp.encodeHeaderTo(data, encodeSrid, GEOMETRY_TYPE_MULTI_POINT) mp.Data.encodeTo(data) }
def fix(sequence): for idx, tag in enumerate(sequence): tag=self.rev_tagset[tag] if tag.startswith("I-"): parts=tag.split("-") label=parts[1] flag=False for i in range(idx-1, -1, -1): prev=self.rev_tagset[sequence[i]].split("-") if prev[0] == "B" and prev[1] == label: flag=True break if prev[0] == "O": break if prev[0] != "O" and prev[1] != label: break if flag==False: sequence[idx]=self.tagset["B-%s" % label]
Combined bioinformatics analysis reveals gene expression and DNA methylation patterns in osteoarthritis Osteoarthritis (OA) is a common type of arthritis, which may cause pain and disability. Alterations in gene expression and DNA methylation have been proven to be associated with the development of OA. The aim of the present study was to identify potential therapeutic targets and associated processes for OA via the combined analysis of gene expression and DNA methylation datasets. The gene expression and DNA methylation profiles were obtained from the Gene Expression Omnibus, and differentially expressed genes (DEGs) and differentially methylated sites (DMSs) were identified in the present study, using R programming software. The enriched functions of DEGs and DMSs were obtained via the Database for Annotation, Visualization and Integrated Discovery. Finally, cross analysis of DEGs and DMSs was performed to identify genes that exhibited differential expression and methylation simultaneously. The protein-protein interaction (PPI) network of overlaps between DEGs and DMSs was obtained using the Human Protein Reference Database; the topological properties of PPI network overlaps were additionally obtained. Hub genes in the PPI network were further confirmed via reverse transcription-quantitative polymerase chain reaction (RT-qPCR). The results of the present study revealed that the majority of DEGs and DMSs were upregulated and hypomethylated in patients with OA, respectively. DEGs and DMSs were primarily involved in inflammatory, immune and gene expression regulation-associated processes and pathways. Cross analysis revealed 30 genes that exhibited differential expression and methylation in OA simultaneously. Topological analysis of the PPI network revealed that numerous genes, including G protein subunit α1 (GNAI1), runt related transcription factor 2 (RUNX2) and integrin subunit β2 (ITGB2), may be involved in the development of OA. Additionally, RT-qPCR analysis of GNAI1, RUNX2 and ITGB2 provided further confirmation. Numerous known and novel therapeutic targets were obtained via network analysis. The results of the present study may be beneficial for the diagnosis and treatment of OA. Introduction Osteoarthritis (OA) is the most common type of arthritis and a leading cause of pain and disability, which places a great burden on the economy of health and reduces quality of life (1)(2)(3). OA involves the degeneration of numerous tissues, including subchondral bone, ligaments, muscle, tendons, and the meniscus and synovium (1). Numerous factors may affect OA progression, including age, gender, obesity, genetics and joint injury (4); however, how these factors affect the development of OA requires further investigation and no effective method has been developed for the relief of pain. In addition, molecular biology studies have identified numerous biomarkers and biological processes that contribute to OA, including the erosion of the extracellular matrix (5), the expression of chemokines (chemokine C-C ligands 9 and 5, and interleukin-8) (6) and the upregulation of inflammatory genes (7). Further investigation into the molecular events associated with cartilage degeneration is required. Over the past decades, the development of high-throughput technologies has resulted in the large amount of accumulation of omics data for various complex diseases. For OA, gene expression profiling via microarray or high-throughput sequencing has become a promising method for the analysis of the mechanisms underlying its initiation and progression (8). For example, Rasheed et al (9) performed an integrated study of microRNA (miRNA) expression profiles in OA chondrocytes and OA-associated genes, and identified numerous miRNAs associated with the development of OA. Sun et al (10) reported several potential biomarkers for OA via differential expression and network analysis based on gene microarray datasets. Microarray analysis in the study of Loeser et al (11) indicated the link between age-associated differences in gene expression and the development of OA. In addition, epigenetic modifications serve important roles in gene expression regulation, and DNA methylation is one of the most common types of epigenetic modification. Recently, an increasing number of studies have focused on the associations between methylation status and the progression of OA (12,13). In contrast to cancer, in which CpG sites are frequently hypermethylated, the majority of studies investigating OA reported a higher frequency of hypomethylation (14,15). DNA methylation may also affect the allelic imbalance of specific small nucleotide polymorphisms, and thus the development of OA (16). Combined analysis of gene expression and DNA methylation profiles may contribute to the screening of potential biomarkers of OA, early diagnosis and treatment; to the best of our knowledge, an investigation into this is yet to be performed. In the present study, combined analysis of publicly accessible gene expression and DNA methylation microarray datasets of OA was conducted. Functional enrichment and network analysis was performed for the identification of potential biomarkers. Numerous known and novel targets were obtained and their involvement in OA was further confirmed via reverse transcription-quantitative polymerase chain reaction (RT-qPCR) analysis. Materials and methods Microarray datasets. The publicly accessible data were all obtained from the Gene Expression Omnibus (GEO, www.ncbi.nlm.nih.gov/geo). Gene expression profiles deposited by Klinger et al (17), accession no. GSE43923, containing six samples (three osteophytic cartilage and three corresponding articular cartilage samples from the knee joints of patients with OA) were employed in the present study. The genome-wide expression profiles were quantified using the commercial gene microarray GPL570 Affymetrix Human Genome U133 Plus 2.0 Array (Affymetrix; Thermo Fisher Scientific, Inc., Waltham, MA, USA). The DNA methylation profiles (GSE73626) (18) of five hip OA, six knee OA and seven hip healthy cartilage samples were detected via Illumina HumanMethylation450 BeadChip assay (Illumina, Inc., San Diego, CA, USA), which contains >480,000 methylation sites, covering 99% of RefSeq (https://www.ncbi.nlm.nih.gov/refseq/) genes and 96% of University of California, Santa Cruz (http://genome.ucsc. edu/)-defined CpG sites with an average of 17 CpG sites/gene across different genomic regions, including the promoter, 5' untranslated region (UTR), first exon, gene body, intergenic and 3'UTR. Microarray data analysis. The present study conducted differential expression analysis for osteophytic and articular cartilage samples from patients with OA. The raw CEL data were imported into R version 3.2.2 (http://www.R-project. org/) and normalized via the affy package (19); subsequently, the limma package (20) was used for the screening of differentially expressed genes (DEGs) with the criteria of fold change >1.5 and false discovery rate (FDR)<0.05. For the methylation dataset, site-level analysis was performed based on the Illumina Methylation Analyzer package (Illumina, Inc.) (21) to obtain the differentially methylated CpG sites (DMSs) between hip/knee OA cartilage and healthy cartilage samples, with thresholds of db value >0.2 and FDR<0.05. DMSs were mapped to the corresponding genes (DMGs) and genomic regions based on the full annotation file of the microarray and following this, cross analysis was performed via the 'intersect' function of R version 3.2.2 (http://www.R-project.org/) using DEGs and DMGs to reveal overlapping genes. In addition, differences between distributions of DMSs relative to CpG islands and genes were compared using the χ 2 test. Functional clustering analysis. Investigation into the functions of enriched DEGs and DMGs may improve understanding of their involvement in OA. In the present study, functional clustering analysis of DEGs and DMGs based on the Database for Annotation, Visualization and Integrated Discovery (DAVID; david.abcc.ncifcrf.gov) (22) was conducted. Clusters with an enrichment score >1, and Gene Ontology (GO) terms and Kyoto Encyclopedia of Genes and Genomes (KEGG; www. genome.jp/kegg) pathways with P<0.05 were retained in the present study. Protein-protein interaction network analysis. Genes are likely to function together rather than alone in complex diseases; hub nodes in the network may represent key biomarkers. In the present study, protein-protein interaction (PPI) network analysis was performed to investigate the overlaps between DEGs and DMGs based on the Human Protein Reference Database (HPRD; www.hprd.org) (23). The network was visualized using Cytoscape 3.6.0 software (http://www.cytoscape.org/), and the topological property of every gene was additionally analyzed for the assessment of their importance. RT-qPCR. Normal and OA tissues were obtained from the articular cartilage of 26 females and 20 males with a median age of 56.35 years (43.58-69.12 years) between April 2012 and April 2015 in Zibo Central Hospital (Zibo, China). Patients exhibiting temporomandibular joint pain that were not suffering from any form of rheumatic disease or cancer were included in the present study. The study was approved by the ethics committee of Zibo Central Hospital. Written informed consent was obtained from all patients. Total RNA was extracted from OA and normal tissues (50-100 mg) using an RNeasy Mini kit (Qiagen GmbH, Hilden, Germany) and quantified with a NanoDrop system (Thermo Fisher Scientific, Inc.), and subsequently subjected to RT-qPCR using EasyScript Reverse Transcriptase kit (Promega Corporation, Madison, WI, USA). The temperature protocol used for RT was as follows: 95˚C for 10 min, 55˚C for 1 min and 68˚C for 10 min. The 7500 Real-Time PCR system (Applied Biosystems; Thermo Fisher Scientific, Inc.) was used for qPCR. Reactions were conducted in triplicate in each reaction tube using AceQ qPCR SYBR Green Master Mix (Vazyme Biotech Co., Ltd., Nanjing, China). The temperature protocol used for qPCR was as follows: 94˚C for 5 min; followed by 40 cycles of 95˚C for 10 sec and 60˚C for 30 sec; followed by 95˚C for 15 sec, 60˚C for 60 sec and 95˚C for 15 sec. Data were analyzed via the 2 -ΔΔCq method using GAPDH as internal control (24). Primer sequences were: G protein subunit α1 (GNAI1) forward Statistical analysis. R version 3.2.2 (http://www.R-project. org/) was used for all of the statistical analysis. The relative mRNA expression levels in the RT-qPCR analysis were presented as the mean ± standard deviation of the three replicates. RT-qPCR data were analyzed using a Student's t-test. P<0.05 was considered to indicate a statistically significant difference. Results Gene expression profile analysis. The raw microarray data was normalized and used for the following differential expression analysis. As a result, a total of 466 genes were detected to be DEGs in osteophytic cartilage samples compared with articular samples, which contained 49 downregulated and 417 upregulated genes (Fig. 1A). The two-way supervised clustering indicated notable differences between osteophytic and articular cartilage samples from patients with OA, with blue and red coloring indicating low and high expression levels, respectively (Fig. 1B). The full list of DEGs is provided in Table I. DMSs. Comparisons were performed between DMSs in hip/knee cartilage samples and healthy cartilage samples. As presented in Fig. 2A, the β values of OA hip compared with healthy hip tissue, and OA knee compared with healthy knee tissue, of all CpG sites in the microarray were obtained. The β values for OA hip compared with healthy hip tissue, and OA knee compared with healthy knee tissue, of CpG sites satisfied the criterion of P<0.05; the frequencies of hypomethylated sites were increased compared with hypermethylated sites in OA hip and knee samples (Fig. 2B). Additionally, the number of hypomethylated sites with P<0.05 and db>0.2 were increased compared with hypermethylated sites (Fig. 2C). This was consistent with the results of the DEG analysis (the number of upregulated genes was increased compared with downregulated genes), as hypermethylation of the promoter may result in the downregulation of the corresponding gene. To improve understanding of the functional significance of DMSs, the functional locations of DMSs were investigated. As presented in Fig. 3A, the majority of the DMSs were reported to be in intergenic, gene body and promoter regions. Additionally, four neighborhood locations were defined in the Illumina HumanMethylation450 BeadChip assay: 31% CpG islands, 23% shores (0-2 kb from canonical CpG islands), 10% shelves (2-4 kb from canonical) and open sea (rest of the sequence). Consistent with the BeadChip assay, the majority of the hypo-and hypermethylated sites in hip and knee cartilage samples were detected in open sea, and following the north and south shores (upstream and downstream shores). To investigate the associations between functional and neighborhood locations with differential methylation status, a χ 2 test for data presented in Fig. 3. The results indicated P<5x10 -7 in all of the cases, demonstrating that functional and neighborhood locations are associated with differential methylation status. One-way clustering of DMSs of hip and knee cartilage samples is presented in Fig. 4. Functional clusters. Functional clustering analysis in DAVID resulted in three functional clusters for DEGs and DMGs. DEGs were primarily involved in the GO terms and KEGG pathways that were associated with 'cell structure', 'inflammatory and immune response', 'substance synthetic' and 'metabolic'. 'Guanosine 5'-triphosphaate (GTP)ase activity', 'gene expression regulation' and 'inflammatory and immune response' were reported to be significantly enriched in DMGs (data not shown). Network analysis. Network topological properties are important representations of their roles in specific biological processes and diseases. In the present study, 30 overlaps were obtained among DMGs of hip and knee cartilage samples and DEGs; 20 of these overlaps were reported to interact with other genes from the HPRD. PPI networks are presented in Fig. 5. GNAI1 directly interacted with 50 genes, which was markedly higher compared with the degree of the other 19 overlaps in the network, potentially indicating its important roles in the development of OA. Table I includes the five previously identified biomarkers of OA and their corresponding PMID nos. RT-qPCR analysis. A total of three hub genes, GNAI1, RUNX2 and integrin subunit β 2 (ITGB2), were subjected to RT-qPCR analysis for the quantification of their relative abundance in OA and control samples. The results of the RT-qPCR analysis were consistent with the results of the gene microarray analysis; the relative mRNA expression levels of GNAI1, RUNX2 and ITGB2 in OA samples were significantly increased compared with the control samples (Fig. 6). Discussion OA is the most common degenerative disease of the synovial membrane, comprising the destruction and loss of articular tissues (25). The initiation and development of OA have been reported to be associated with numerous factors, including age, joint injury, obesity and chronic inflammation, in addition to genetic factors, including epigenetic modification and altered gene or miRNA expression (26). In-depth understanding of gene regulation in OA may contribute to diagnosis and treatment. For this purpose, DNA methylation and gene expression analysis of hip and knee cartilage of OA patients was performed, and potential targets for OA were identified and verified in the present study. DNA methylation has been revealed to repress gene expression by blocking the sites at the promoter where transcription factors bind; hypermethylation of the promoter is associated with no or low transcription (27). In the present study, the majority of the DEGs were observed to be upregulated and DMSs were hypomethylated, consistent with the roles of DNA methylation. Unlike cancer, which is characterized by the hypermethylation of tumor suppressor genes, genome-wide hypomethylation in OA has been widely observed previously (28)(29)(30) and in the present study. It is known that OA is influenced by inflammatory chemokines (31), which was also demonstrated by the functional enrichment analysis of the present study. The hypomethylation and increased expression of a number of inflammation-associated genes have been observed to be associated with OA, including IL-8 (32), nuclear factor-κB (33) and pleckstrin (34), which may serve as therapeutic targets for OA. In the present study, numerous genes that were differentially expressed and methylated simultaneously were identified, including ITGB2, GNAI1 and RUNX2. To the best of our knowledge, the roles of the aforementioned genes in the development of OA have not been investigated; the RT-qPCR analysis conducted in the present study revealed their relative abundance in OA and adjacent tissues, which may indicate their association with the progression of OA. Furthermore, numerous genes were observed to directly interact with other genes in the PPI network, including RUNX2, bleomycin hydrolase and gremlin 1 DAN family BMP antagonist (GREM1). The majority of these genes have been demonstrated to be closely associated with the progression of OA; for example, RUNX2 polymorphisms may affect temporomandibular joint OA in females (35) and may influence the expression of other genes in OA (36). The mRNA expression levels of GREM1 are correlated with OA and may be regulated by OA-associated factors (37); in addition, GREM1 is a key regulator of synoviocyte hyperplasia and invasiveness (38). Valverde-Franco et al (39) reported that ephrin-B2 may be essential for normal bone growth, and its absence may lead to knee and hip OA. In addition, a number of genes with a high degree in the PPI network have not been proven to be associated with the development of OA. For example, GNAI1, also known as Gi, is a protein that can hydrolyze GTP and interact with other proteins, and T cell differentiation may alter its structure (40). Additionally, the altered expression of GNAI1 was observed to be associated with the progression of inflammatory and immune diseases (41,42), and may be considered to be a novel biomarker for OA. Certain limitations of the present study were noted. Only three genes were analyzed via RT-qPCR and future studies are required to investigate more genes. Furthermore, osteophytes may be considered to contribute to regeneration in OA joints; however, further studies investigating the differences regarding whole genome expression between OA and healthy tissues should be employed for further analysis in the present study. In conclusion, the present study provided a pipeline for the combined analysis of gene expression and DNA methylation datasets. In addition, numerous known and potential novel markers were proposed in the present study, which may contribute to diagnosis and treatment targets for OA; however, further investigation is required for confirmation of the functions exhibited by these markers.
import Control.Concurrent import Control.Exception import System.IO import System.Posix import System.Posix.IO main = do (pout1, pin1) <- createPipe (pout2, _) <- createPipe pid <- forkProcess $ do hdl <- fdToHandle pin1 hSetBuffering hdl LineBuffering handle (\UserInterrupt{} -> hPutStrLn hdl "caught") $ do hPutStrLn hdl "registered" hdl2 <- fdToHandle pout2 putStrLn =<< hGetLine hdl2 hdl <- fdToHandle pout1 hSetBuffering hdl LineBuffering "registered" <- hGetLine hdl signalProcess sigINT pid putStrLn =<< hGetLine hdl
<reponame>LucaNicosia/suzieq import os import re import sys from typing import List import logging from logging.handlers import RotatingFileHandler from datetime import datetime import fcntl from importlib.util import find_spec from itertools import groupby from ipaddress import ip_network import errno import json import yaml from dateutil.relativedelta import relativedelta from tzlocal import get_localzone from pytz import all_timezones from dateparser import parse import pandas as pd from suzieq.version import SUZIEQ_VERSION logger = logging.getLogger(__name__) MAX_MTU = 9216 # MISSING_SPEED: the interface doesn't provide a speed and I have to complain # NO_SPEED: the interface doesn't provide a speed but I don't care # (for example virtual interfaces) # MISSING_SPEED_IF_TYPES: list of interface-types that will have MISSING_SPEED # if the speed is invalid. # Types which are not in this list will have NO_SPEED MISSING_SPEED = -1 NO_SPEED = 0 MISSING_SPEED_IF_TYPES = ['ethernet', 'bond', 'bond_slave'] SUPPORTED_POLLER_TRANSPORTS = ['ssh', 'https'] SUPPORTED_ENGINES = ['pandas', 'rest'] def validate_sq_config(cfg): """Validate Suzieq config file Parameters: ----------- cfg: yaml object, YAML encoding of the config file Returns: -------- status: None if all is good or error string """ if not isinstance(cfg, dict): return "FATAL: Invalid config file format" ddir = cfg.get("data-directory", None) if not ddir: return "FATAL: No data directory for output files specified" if not os.path.isdir(ddir): os.makedirs(ddir, exist_ok=True) if (not os.path.isdir(ddir) or not (os.access(ddir, os.R_OK | os.W_OK | os.EX_OK))): return f'FATAL: Data directory {ddir} is not an acceesible dir' # Locate the service and schema directories svcdir = cfg.get('service-directory', None) if (not (svcdir and os.path.isdir(ddir) and os.access(svcdir, os.R_OK | os.W_OK | os.EX_OK))): sqdir = get_sq_install_dir() svcdir = f'{sqdir}/config' if os.access(svcdir, os.R_OK | os.EX_OK): cfg['service-directory'] = svcdir else: svcdir = None if not svcdir: return 'FATAL: No service directory found' schemadir = cfg.get('schema-directory', None) if not (schemadir and os.access(schemadir, os.R_OK | os.EX_OK)): schemadir = f'{svcdir}/schema' if os.access(schemadir, os.R_OK | os.EX_OK): cfg['schema-directory'] = schemadir else: schemadir = None if not schemadir: return 'FATAL: No schema directory found' # Move older format logging level and period to appropriate new location if 'poller' not in cfg: cfg['poller'] = {} for knob in ['logging-level', 'period']: if knob in cfg: cfg['poller'][knob] = cfg[knob] if 'rest' not in cfg: cfg['rest'] = {} for knob in ['API_KEY', 'rest_certfile', 'rest_keyfile']: if knob in cfg: cfg['rest'][knob] = cfg[knob] # Verify timezone if present is valid def_tz = get_localzone().zone reader = cfg.get('analyzer', {}) if reader and isinstance(reader, dict): usertz = reader.get('timezone', '') if usertz and usertz not in all_timezones: return f'Invalid timezone: {usertz}' elif not usertz: reader['timezone'] = def_tz else: cfg['analyzer'] = {'timezone': def_tz} return None def load_sq_config(validate=True, config_file=None): """Load (and validate) basic suzieq config""" # Order of looking up suzieq config: # Current directory # ${HOME}/.suzieq/ cfgfile = None cfg = {} cfgfile = sq_get_config_file(config_file) if cfgfile: try: with open(cfgfile, "r") as f: cfg = yaml.safe_load(f.read()) except Exception as e: # pylint: disable=broad-except print(f'ERROR: Unable to open config file {cfgfile}: {e.args[1]}') sys.exit(1) if not cfg: print(f'ERROR: Empty config file {cfgfile}') sys.exit(1) if validate: error_str = validate_sq_config(cfg) if error_str: print(f'ERROR: Invalid config file: {config_file}') print(error_str) sys.exit(1) if not cfg: print("suzieq requires a configuration file either in " "./suzieq-cfg.yml or ~/.suzieq/suzieq-cfg.yml") sys.exit(1) return cfg def sq_get_config_file(config_file): """Get the path to the suzieq config file""" if config_file: cfgfile = config_file elif os.path.exists("./suzieq-cfg.yml"): cfgfile = "./suzieq-cfg.yml" elif os.path.exists(os.getenv("HOME") + "/.suzieq/suzieq-cfg.yml"): cfgfile = os.getenv("HOME") + "/.suzieq/suzieq-cfg.yml" else: cfgfile = None return cfgfile def get_latest_files(folder, start="", end="", view="latest") -> list: '''Get list of relevant parquet files from folder''' lsd = [] if start: ssecs = pd.to_datetime( start, infer_datetime_format=True).timestamp() * 1000 else: ssecs = 0 if end: esecs = pd.to_datetime( end, infer_datetime_format=True).timestamp() * 1000 else: esecs = 0 ts_dirs = False pq_files = False for root, dirs, files in os.walk(folder): flst = None if dirs and dirs[0].startswith("timestamp") and not pq_files: flst = get_latest_ts_dirs(dirs, ssecs, esecs, view) ts_dirs = True elif files and not ts_dirs: flst = get_latest_pq_files(files, root, ssecs, esecs, view) pq_files = True if flst: lsd.append(os.path.join(root, flst[-1])) return lsd def get_latest_ts_dirs(dirs, ssecs, esecs, view): '''Get latest timestamp directories in a folder''' newdirs = None if not ssecs and not esecs: dirs.sort(key=lambda x: int(x.split("=")[1])) newdirs = dirs elif ssecs and not esecs: newdirs = list(filter(lambda x: int(x.split("=")[1]) > ssecs, dirs)) if not newdirs and view != "changes": # FInd the entry most adjacent to this one newdirs = list(filter(lambda x: int( x.split("=")[1]) < ssecs, dirs)) elif esecs and not ssecs: newdirs = list(filter(lambda x: int(x.split("=")[1]) < esecs, dirs)) else: newdirs = list( filter( lambda x: int(x.split("=")[1]) < esecs and int( x.split("=")[1]) > ssecs, dirs, ) ) if not newdirs and view != "changes": # FInd the entry most adjacent to this one newdirs = list(filter(lambda x: int( x.split("=")[1]) < ssecs, dirs)) return newdirs def get_latest_pq_files(files, root, ssecs, esecs, view): '''Get the latest parquet files given a fileset/start & end times & view''' newfiles = None if not ssecs and not esecs: files.sort(key=lambda x: os.path.getctime("%s/%s" % (root, x))) newfiles = files elif ssecs and not esecs: newfiles = list( filter(lambda x: os.path.getctime( "%s/%s" % (root, x)) > ssecs, files) ) if not newfiles and view != "changes": # FInd the entry most adjacent to this one newfiles = list( filter( lambda x: os.path.getctime( "{}/{}".format(root, x)) < ssecs, files ) ) elif esecs and not ssecs: newfiles = list( filter(lambda x: os.path.getctime( "%s/%s" % (root, x)) < esecs, files) ) else: newfiles = list( filter( lambda x: os.path.getctime("%s/%s" % (root, x)) < esecs and os.path.getctime("%s/%s" % (root, x)) > ssecs, files, ) ) if not newfiles and view != "changes": # Find the entry most adjacent to this one newfiles = list( filter(lambda x: os.path.getctime( "%s/%s" % (root, x)) < ssecs, files) ) return newfiles def calc_avg(oldval, newval): '''Calculate average of old and new''' if not oldval: return newval return float((oldval+newval)/2) def get_timestamp_from_cisco_time(in_data, timestamp): """Get timestamp in ms from the Cisco-specific timestamp string Examples of Cisco timestamp str are P2DT14H45M16S, P1M17DT4H49M50S etc. """ if not in_data.startswith('P'): return 0 months = days = hours = mins = secs = 0 if 'T' in in_data: day, timestr = in_data[1:].split('T') else: day = in_data[1:] timestr = '' if 'Y' in day: years, day = day.split('Y') months = int(years)*12 if 'M' in day: mnt, day = day.split('M') months = months + int(mnt) if 'D' in day: days = int(day.split('D')[0]) if 'H' in timestr: hours, timestr = timestr.split('H') hours = int(hours) if 'M' in timestr: mins, timestr = timestr.split('M') mins = int(mins) if 'S' in timestr: secs = timestr.split('S')[0] secs = int(secs) delta = relativedelta(months=months, days=days, hours=hours, minutes=mins, seconds=secs) return int((datetime.fromtimestamp(timestamp)-delta).timestamp()*1000) def get_timestamp_from_junos_time(in_data, timestamp: int): """Get timestamp in ms from the Junos-specific timestamp string The expected input looks like: "attributes" : {"junos:seconds" : "0"}. We don't check for format because we're assuming the input would be blank if it wasn't the right format. The input can either be a dictionary or a JSON string. """ if not in_data: # Happens for logical interfaces such as gr-0/0/0 secs = 0 else: try: if isinstance(in_data, str): data = json.loads(in_data) else: data = in_data secs = int(data.get('junos:seconds', 0)) except Exception: # pylint: disable=broad-except logger.warning(f'Unable to convert junos secs from {in_data}') secs = 0 delta = relativedelta(seconds=int(secs)) return int((datetime.fromtimestamp(timestamp)-delta).timestamp()*1000) def convert_macaddr_format_to_colon(macaddr: str) -> str: """Convert NXOS/EOS . macaddr form to standard : format, lowecase :param macaddr: str, the macaddr string to convert :returns: the converted macaddr string or all 0s string if arg not str :rtype: str """ if isinstance(macaddr, str): if re.match(r'[0-9a-zA-Z]{4}.[0-9a-zA-Z]{4}.[0-9a-zA-Z]{4}', macaddr): return (':'.join([f'{x[:2]}:{x[2:]}' for x in macaddr.split('.')])).lower() else: return macaddr.lower() return '00:00:00:00:00:00' def validate_network(network: str) -> bool: """Validate network address Args: network: (str) the network id to validate Returns: bool: A boolean with the result of the validation """ try: if isinstance(network, str) and '/' in network: ip_network(network) return True return False except ValueError: return False def validate_macaddr(macaddr: str) -> bool: """Validate mac address Args: macaddr: (str) the macaddr string to validate Returns: bool: A boolean with the result of the validation """ if isinstance(macaddr, str): if re.fullmatch(r'([0-9a-fA-F]{4}.){2}[0-9a-fA-F]{4}', macaddr) or \ re.fullmatch(r'([0-9a-fA-F]{2}:){5}[0-9a-fA-F]{2}', macaddr): return True return False def convert_rangestring_to_list(rangestr: str) -> list: """Convert a range list such as '1, 2-5, 10, 12-20' to list """ tmplst = [] if not isinstance(rangestr, str): return tmplst try: for x in rangestr.split(','): x = x.strip().split('-') if x[0]: if len(x) == 2: intrange = list(range(int(x[0]), int(x[1])+1)) tmplst.extend(intrange) else: tmplst.append(int(x[0])) except Exception: # pylint: disable=broad-except logger.error(f"Range string parsing failed for {rangestr}") return [] return tmplst def convert_numlist_to_ranges(numList: List[int]) -> str: """Convert a given list of numbers into a range string Args: numList (List[int]): unsorted/sorted list of integers Returns: str: Range string such as '1-5, 10, 12-20' """ result = '' for _, b in groupby(enumerate(sorted(numList)), lambda pair: pair[1] - pair[0]): b = list(b) if len(b) > 1: result += f'{b[0][1]}-{b[-1][1]}, ' else: result += f'{b[0][1]}, ' return result[:-2] def build_query_str(skip_fields: list, schema, ignore_regex=True, **kwargs) -> str: """Build a pandas query string given key/val pairs """ query_str = '' prefix = '' def _build_query_str(fld, val, fldtype) -> str: """Builds the string from the provided user input""" if ((fldtype in ["long", "float"]) and not isinstance(val, str)): result = f'{fld} == {val}' elif val.startswith('!'): val = val[1:] if fldtype in ["long", "float"]: result = f'{fld} != {val}' else: result = f'{fld} != "{val}"' elif val.startswith(('<', '>')): result = val elif val.startswith('~'): val = val[1:] result = f'{fld}.str.match("{val}")' else: result = f'{fld} == "{val}"' return result for f, v in kwargs.items(): if not v or f in skip_fields or f in ["groupby"]: continue stype = schema.field(f).get('type', 'string') if isinstance(v, list) and len(v): subq = '' subcond = '' if ignore_regex and [x for x in v if isinstance(x, str) and x.startswith('~')]: continue for elem in v: subq += f'{subcond} {_build_query_str(f, elem, stype)} ' subcond = 'or' query_str += '{} ({})'.format(prefix, subq) prefix = "and" else: query_str += f'{prefix} {_build_query_str(f, v, stype)} ' prefix = "and" return query_str def poller_log_params(cfg: dict, is_controller=False, worker_id=0) -> tuple: """Get the log file, level and size for the given program from config It gets the base file name of the configuration file and appends a prefix which depends on the component of the poller Args: cfg (dict): The config dictionary is_controller (bool, optional): If the component is the controller. Defaults to False. worker_id (int, optional): The poller worker id. Defaults to 0. Returns: tuple: [description] """ def_logfile = '/tmp/sq-poller.log' logfile, loglevel, logsize, log_stdout = get_log_params( 'poller', cfg, def_logfile) file_name = logfile.split('.log')[0] if is_controller: file_name += '-controller.log' else: file_name += f'-{worker_id}.log' return file_name, loglevel, logsize, log_stdout def get_log_params(prog: str, cfg: dict, def_logfile: str) -> tuple: """Get the log file, level and size for the given program from config The logfile is supposed to be defined by a variable called logfile within the hierarchy of the config dictionary. Thus, the poller log file will be {'poller': {'logfile': '/tmp/sq-poller.log'}}, for example. :param prog: str, The name of the program. Valid values are poller, coaelscer, and rest. :param cfg: dict, The config dictionary :param def_logfile: str, The default log file to return :returns: log file name, log level, log size, and True/False for logging to stdout :rtype: str, str and int """ if cfg: logfile = cfg.get(prog, {}).get('logfile', def_logfile) loglevel = cfg.get(prog, {}).get('logging-level', 'WARNING') logsize = cfg.get(prog, {}).get('logsize', 10000000) log_stdout = cfg.get(prog, {}).get('log-stdout', False) else: logfile = def_logfile loglevel = 'WARNING' logsize = 10000000 log_stdout = False return logfile, loglevel, logsize, log_stdout def init_logger(logname: str, logfile: str, loglevel: str = 'WARNING', logsize: int = 10000000, use_stdout: bool = False) -> logging.Logger: """Initialize the logger :param logname: str, the name of the app that's logging :param logfile: str, the log file to use :param loglevel: str, the default log level to set the logger to :param use_stdout: str, log to stdout instead of or in addition to file """ fh = sh = None # this needs to be suzieq.poller, so that it is the root of all the # other pollers log = logging.getLogger(logname) log.setLevel(loglevel.upper()) if logfile: fh = RotatingFileHandler(logfile, maxBytes=logsize, backupCount=2) if use_stdout: sh = logging.StreamHandler(sys.stdout) formatter = logging.Formatter( "%(asctime)s - %(name)s - %(levelname)s " "- %(message)s" ) if fh: fh.setFormatter(formatter) if sh: sh.setFormatter(formatter) # set root logger level, so that we set asyncssh log level # asynchssh sets it's level to the root level root = logging.getLogger() root.setLevel(loglevel.upper()) if fh: root.addHandler(fh) if sh: root.addHandler(sh) log.warning(f"log level {logging.getLevelName(log.level)}") return log def known_devtypes() -> list: """Returns the list of known dev types""" return(['cumulus', 'eos', 'iosxe', 'iosxr', 'ios', 'junos-mx', 'junos-qfx', 'junos-qfx10k', 'junos-ex', 'junos-es', 'linux', 'nxos', 'sonic', 'panos']) def humanize_timestamp(field: pd.Series, tz=None) -> pd.Series: '''Convert the UTC timestamp in Dataframe to local time. Use of pd.to_datetime will not work as it converts the timestamp to UTC. If the timestamp is already in UTC format, we get busted time. ''' if field.empty: return field if pd.core.dtypes.common.is_datetime_or_timedelta_dtype(field): return field if pd.core.dtypes.common.is_datetime64_any_dtype(field): return field tz = tz or get_localzone().zone return field.apply(lambda x: datetime.utcfromtimestamp((int(x)/1000))) \ .dt.tz_localize('UTC').dt.tz_convert(tz) def expand_nxos_ifname(ifname: str) -> str: '''Expand shortned ifnames in NXOS to their full values, if required''' if not ifname: return '' if ifname.startswith('Eth') and 'Ether' not in ifname: return ifname.replace('Eth', 'Ethernet') elif ifname.startswith('Po') and 'port' not in ifname: return ifname.replace('Po', 'port-channel') return ifname def expand_eos_ifname(ifname: str) -> str: '''Expand shortned ifnames in EOS to their full values, if required''' if not ifname: return '' if ifname.startswith('Eth') and 'Ether' not in ifname: return ifname.replace('Eth', 'Ethernet') elif ifname.startswith('Po') and 'Port' not in ifname: return ifname.replace('Po', 'Port-Channel') elif ifname.startswith('Vx') and 'Vxlan' not in ifname: return ifname.replace('Vx', 'Vxlan') return ifname def ensure_single_instance(filename: str, block: bool = False) -> int: """Check there's only a single active instance of a process using lockfile It optionally can block waiting for the resource the become available. Use a pid file with advisory file locking to assure this. :returns: fd if lock was successful or 0 :rtype: int """ basedir = os.path.dirname(filename) if not os.path.exists(basedir): # Permission error or any other error will abort os.makedirs(basedir, exist_ok=True) fd = os.open(filename, os.O_RDWR | os.O_CREAT, 0o600) if fd: try: if block: fcntl.flock(fd, fcntl.LOCK_EX) else: fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB) os.truncate(fd, 0) os.write(fd, bytes(str(os.getpid()), 'utf-8')) except OSError: if OSError.errno == errno.EBUSY: # Return the PID of the process thats locked the file bpid = os.read(fd, 10) os.close(fd) try: fd = -int(bpid) except ValueError: fd = 0 else: os.close(fd) fd = 0 return fd def expand_ios_ifname(ifname: str) -> str: """Get expanded interface name for IOSXR/XE given its short form :param ifname: str, short form of IOSXR interface name :returns: Expanded version of short form interface name :rtype: str """ ifmap = {'BE': 'Bundle-Ether', 'BV': 'BVI', 'Fi': 'FiftyGigE', 'Fo': 'FortyGigE', 'FH': 'FourHundredGigE', 'Gi': 'GigabitEthernet', 'Gig': 'GigabitEthernet', 'Hu': 'HundredGigE', 'Lo': 'Loopback', 'Mg': 'MgmtEth', 'Nu': 'Null', 'Po': 'Port-channel', 'TE': 'TenGigE', 'Te': 'TenGigabitEthernet', 'Ten': 'TenGigabitEthernet', 'TF': 'TwentyFiveGigE', 'TH': 'TwoHundredGigE', 'tsec': 'tunnel-ipsec', 'tmte': 'tunnel-mte', 'tt': 'tunnel-te', 'tp': 'tunnel-tp', 'Vl': 'Vlan', 'CPU': 'cpu', } pfx = re.match(r'[a-zA-Z]+', ifname) if pfx: pfxstr = pfx.group(0) if pfxstr in ifmap: return ifname.replace(pfxstr, ifmap[pfxstr]) return ifname def get_sq_install_dir() -> str: '''Return the absolute path of the suzieq installation dir''' spec = find_spec('suzieq') if spec: return os.path.dirname(spec.loader.path) else: return os.path.abspath('./') def get_sleep_time(period: str) -> int: """Returns the duration in seconds to sleep given a period Checking if the period format matches a specified format MUST be done by the caller. :param period: str, the period of form <value><unit>, '15m', '1h' etc :returns: duration to sleep in seconds :rtype: int """ _, unit, _ = re.split(r'(\D)', period) now = datetime.now() nextrun = parse(period, settings={'PREFER_DATES_FROM': 'future'}) if unit == 'm': nextrun = nextrun.replace(second=0) elif unit == 'h': nextrun = nextrun.replace(minute=0, second=0) else: nextrun = nextrun.replace(hour=0, minute=0, second=0) return (nextrun-now).seconds def print_version(): '''Print the suzieq version and return''' print(SUZIEQ_VERSION)
import java.util.*; public class Main { public static void main(String[] args) throws Exception { Scanner sc = new Scanner(System.in); int H = sc.nextInt(); int W = sc.nextInt(); Point.setSize(W, H); int Ch = sc.nextInt(); int Cw = sc.nextInt(); int Dh = sc.nextInt(); int Dw = sc.nextInt(); Point start = new Point(Cw, Ch); Point goal = new Point(Dw, Dh); char map[][] = new char[H+1][W+1]; int step[][] = new int[H+1][W+1]; for(int s=1; s<=H; s++) { String tmp = sc.next(); for(int t=1; t<=W; t++) { map[s][t] = tmp.charAt(t-1); step[s][t] = Integer.MAX_VALUE; } } ArrayDeque<Point> dq = new ArrayDeque<>(); dq.add(start); step[Ch][Cw] = 0; Diff walks[] = {new Diff(-1,0), new Diff(0,-1), new Diff(1, 0), new Diff(0,1) }; Diff warps[] = { new Diff(-2, -2), new Diff(-1, -2), new Diff(0, -2), new Diff(1, -2), new Diff(2, -2), new Diff(-2, -1), new Diff(-1, -1), new Diff(1, -1), new Diff(2, -1), new Diff(-2, 0), new Diff(2, 0), new Diff(-2, 1), new Diff(-1, 1), new Diff(1, 1), new Diff(2, 1), new Diff(-2, 2), new Diff(-1, 2), new Diff(0, 2), new Diff(1, 2), new Diff(2, 2), }; while(dq.size()>0) { Point p = dq.removeFirst(); int currentStep = step[p.y][p.x]; for(Diff d1 : walks) { Point p_walk = p.getNeighbourIfExist(d1); if(p_walk==null) continue; if(map[p_walk.y][p_walk.x] == '#') continue; if(step[p_walk.y][p_walk.x]<= currentStep) continue; step[p_walk.y][p_walk.x] = currentStep; dq.addFirst(p_walk); } for(Diff d2 : warps) { Point p_warp = p.getNeighbourIfExist(d2); if(p_warp==null) continue; if(map[p_warp.y][p_warp.x] == '#') continue; if(step[p_warp.y][p_warp.x]<=currentStep + 1) continue; step[p_warp.y][p_warp.x] = currentStep + 1; dq.addLast(p_warp); } } // for(int s=1; s<=H; s++) { // for(int t=1; t<=W; t++) { // System.out.printf("%d,", step[s][t] == Integer.MAX_VALUE ? -1 : step[s][t]); // } // System.out.println(); // } System.out.println(Integer.MAX_VALUE == step[goal.y][goal.x] ? -1 : step[goal.y][goal.x]); } static class Point { static int XX; static int YY; static void setSize(int x, int y) { XX = x; YY = y; } int x; int y; int step; Point(int a, int b) { x=a; y=b; } Point getNeighbourIfExist(Diff d) { if(x + d.xx <= 0 || x + d.xx > XX) return null; if(y + d.yy <= 0 || y + d.yy > YY) return null; return new Point(x + d.xx, y + d.yy); } } static class Diff { int xx; int yy; Diff(int a, int b) { xx=a; yy=b; } } }
package io.vertx.ext.asyncsql.impl; import com.github.mauricio.async.db.Connection; import com.github.mauricio.async.db.QueryResult; import io.vertx.core.json.JsonArray; import io.vertx.ext.asyncsql.impl.pool.AsyncConnectionPool; import io.vertx.ext.sql.UpdateResult; import scala.concurrent.ExecutionContext; /** * @author <a href="mailto:<EMAIL>"><NAME></a>. */ public class PostgreSQLConnectionImpl extends AsyncSQLConnectionImpl { public PostgreSQLConnectionImpl(Connection conn, AsyncConnectionPool pool, ExecutionContext ec) { super(conn, pool, ec); } @Override protected String getStartTransactionStatement() { // TODO: consider the tx isolation level return "BEGIN"; } @Override protected UpdateResult queryResultToUpdateResult(QueryResult qr) { int affected = (int) qr.rowsAffected(); return new UpdateResult(affected, new JsonArray()); } }
I speak to Jacob Rees-Mogg down a crackling phone line. Despite the poor-quality of the sound, his voice is unmistakeable: those rounded Edwardian vowels; the careful, deliberate delivery of phrases which fall slightly at the end, like a gramophone needing an extra turn of the crank. It is as though some enterprising audio-logist had devised the perfectly reassuring voice and presented it, with great doses of warmth and humour, in this double-breasted package. A figure of intrigue (and not a little amusement) in the political world since first standing for Parliament in the safe Labour seat of Central Fife in 1997, there is a side to Rees-Mogg which few in the Westminster bubble see: that of the successful financier. The investment company of which he is the chairman, Somerset Capital Management, was founded by Rees-Mogg, Dominic Johnson and Edward Robertson in 2007 and currently has $7.6-billion under management, with offices in London and Singapore. The three men were colleagues at Lloyd George Management in Hong Kong before leaving to found SCM. Robertson and Rees-Mogg had been responsible for building the company’s emerging-market products, the sector which forms the basis of SCM’s investments. His career as an investor began early; indeed, one might say he was something of a prodigy. At the age when most boys are playing cricket or football, Rees-Mogg was playing the stock market. ‘I was left £50 when I was ten by a fairly distant cousin, which my father invested in GEC shares on my behalf,’ he says. ‘I became interested in the market and was given some more shares by my father, which is when I began looking to see how the shares were performing and learning how to read company reports, balance sheets and so on in order to gauge that.’ That’s unusual enough, but what happened next was truly remarkable. ‘I had these GEC shares and went to a shareholders’ meeting,’ he says. ‘I’d been looking over the company report and thought the dividend was too low, so I voted against it. Then I went to the Lonhro AGM when it was going to buy the Observer. I thought this was a bad idea, so asked a question at the end.’ He was 12 at the time. It is was as a result of this intervention that Rees-Mogg was photographed at home reading the Financial Times, a typewriter in front of him and a teddy bear behind. That photo, above, has often been used to portray him as an upper-class nerd, but looked at more closely it reveals something more fundamental to his character: an intense seriousness of purpose. Serious about money: Jacob at 12, pictured at home reading the FT Rees-Mogg never thought of his youthful investments as the pastime of a rich, dilettantish boy, despite later portrayals. Investing was, and remains, a serious business, to be undertaken only after thorough research and with considerable caution. The ‘Investment Philosophy’ page of SCM’s website emphasises sustainable returns, long-term management, research, and capping strategies — all fundamentals of the Rees-Mogg investment philosophy and, he says, the reason for his success. A Cherwell article on Rees-Mogg published during his first year at Oxford alleges that he was a millionaire by the age of 16 and paid his own school fees out of the profit on his early investments. This is untrue, he says, but shows the degree to which Rees-Mogg’s financial acumen had impressed — or, in some cases, annoyed —his contemporaries from an early age. Similar stories (all untrue) about his having employed fellow Etonians to polish his shoes and follow him with an umbrella on cross-country runs reinforce the impression that here was a man whose self-confidence was off the charts, even by Etonian standards. It also explains his seeming imperviousness to ridicule, despite much of it being mercilessly cruel. The story of his having canvassed Fife in a Bentley with his nanny used to be wheeled out by his detractors in an attempt to show how terribly grand and unworldly he was. (‘It was a Mercedes,’ he tells me. ‘A Bentley would be most unsuitable for canvassing.’) Now it is more often told with affection, even by Labour colleagues in the Commons, which demonstrates his knack of deflecting criticism and eliciting warmth from the most unlikely sources. It helps that he’s genuinely self-deprecating. When I put it to him that SCM has done well because of his guiding principles, he demurs. ‘Our success is all down to my partners,’ he says. Following a first in history from Trinity College, Oxford, during which he continued to invest on his own behalf, Rees-Mogg went to work for Rothschild’s. It was here he came under the influence of legendary Anglo-Estonian fund manager Baron Nils Taube, described in his Daily Telegraph obituary as ‘one of the best [City fund managers], having returned more than 15 per cent a year for the two funds he ran continuously between 1969 and 2006 — a remarkable record’. Taube had begun as an office junior at Kitkat & Aitken in 1948, rising through the ranks to become an analyst in 1951, a fund manager in 1969 and a senior partner in 1975. He was renowned for his caution. ‘We only invest in countries where they wear overcoats in the winter,’ he joked. These principles of circumspection and deep research chimed very much with Rees-Mogg’s inclinations and reinforced his idea that they were the key to long-term investment success. ‘I went into investments out of interest and thought I knew a little. The longer I’ve been involved, the less I realise I’d known… I’m very lucky not to have lost most of my money — I simply hadn’t the knowledge I do now. I’ve never put all my eggs in one basket and I’ve always been cautious.’ This is the theme to which Rees-Mogg returns time and again during our conversation: before investing, one must gather as much knowledge as possible about what one proposes to buy and never become overexposed in any one particular area. I put it to him that his financial success and the accompanying security have enabled him, still at a relatively young age, to devote his life to public service — that his wealth makes him incorruptible, unlike some of his colleagues. Here I glimpse for the first time the steeliness beneath the surface of his courtly good manners. ‘Many of my colleagues have blemish-free records,’ he says. ‘It’s a very small number who have engaged in that sort of thing. Outside interests are a good thing for Parliament: the major benefit is the expertise these people bring to the institution. One doesn’t need money to run for Parliament, but it is undoubtedly expensive. The costs are not just specific costs, there’s also the effect on careers — one misses out on promotions. It isn’t easy to quantify, but it’s a price most are willing to pay. Indeed, the price paid by some is very great.’ There are, of course, dissenting voices, who say that his reputation as a financial wunderkind is undeserved. According to one former colleague at Lloyd George Management in Hong Kong, ‘Jacob was a pedestrian fund manager, always more interested in politics than investment — he never outperformed the index. He was great friends with Chris Patten and was always in and out of Government House, working on his political reputation.’ The same colleague also questions whether Rees-Mogg really is the courtly, self-effacing gent he always appears to be. ‘I saw him accost a former manager to complain his seven-figure bonus wasn’t sufficient — in the line-up at his own wedding,’ he says. David Crump/Rex/Shutterstock Rees-Mogg’s wife, the former Helena de Chair, brought her own money to the marriage. She is the daughter of Lady Juliet Tadgell, formerly Marchioness of Bristol, and ex-Tory MP, author and poet Somerset de Chair. Lady Juliet is heir to the Fitzwilliam fortune and has an estimated net worth of £45 million, all of which Helena stands to inherit as her only surviving child. It was under one of his mother-in-law’s six Van Dycks that Rees-Mogg proposed to Helena at Bourne Park, her mother’s stately home in Kent, which she also stands to inherit. This will add to the family’s already substantial property portfolio, which currently comprises a Mayfair townhouse, a Grade II-listed manor house in Rees-Mogg’s Somerset constituency and an interest in several other London properties. When Helena comes into her inheritance, the Rees-Moggs’ net worth will be in excess of £100 million, and possibly as high as £150 million. Meanwhile, though he is no longer actively involved in the investment side of SCM (Dominic Johnson took over from him as chief executive in 2010) Rees-Mogg still receives an average of £11,730 a month in his capacity as a partner, which together which his MP’s salary gives him an income of at least £216,000 a year. As our conversation draws to a close, I ask Rees-Mogg whether he would leave finance and pursue other interests if he lost his seat. ‘No! Finance is my profession and my great interest. Had I lost my seat in 2015 I would have gone back to SCM and continued my career.’ It has been overwhelmingly evident throughout the interview that the Honourable Jacob Rees-Mogg MP is a member of that rare and happy breed who have been able to turn their passion into a career — in his case, doubly so. Whatever his political future, it’s safe to assume that he will be keeping a weather eye on the markets for many years to come, if only to manage the Mogg millions.
// FromProto creates new object metadata from the given proto metadata func FromProto(meta metaapi.ObjectMeta) ObjectMeta { var revision Revision if meta.Revision != nil { revision = Revision(meta.Revision.Num) } var timestamp time.Timestamp if meta.Timestamp != nil { timestamp = time.NewTimestamp(*meta.Timestamp) } return ObjectMeta{ Revision: revision, Timestamp: timestamp, Tombstone: meta.Type == metaapi.ObjectMeta_TOMBSTONE, } }
package estatio.prototype.servlet; import java.io.IOException; import java.io.PrintWriter; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import estatio.prototype.database.DatabaseManager; import estatio.prototype.database.InvoiceColumnName; @WebServlet(name = "InvoiceServlet", urlPatterns = { "invoice" }, loadOnStartup = 1) public class InvoiceServlet extends HttpServlet { @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { DatabaseManager dbm = new DatabaseManager(); Connection conn = dbm.connect(); String query = "SELECT * FROM Invoice LIMIT 10"; PrintWriter out = resp.getWriter(); out.println("<html>"); out.println("<head>"); out.println("<h1>Invoice Page</h1>"); out.println("<style>"); out.println("table, th, td {border: 1px solid black;}"); out.println("</style>"); out.println("</head>"); out.println("<body>"); out.println("<table style=\"width:100%\">"); out.println("<tr>"); out.println("<th>Invoice ID</th>"); out.println("<th>Renter Name</th>"); out.println("<th>Payment Method</th>"); out.println("<th>Status</th>"); out.println("<th>Amount</th>"); out.println("<th>Currency</th>"); out.println("<th>Created Date</th>"); out.println("<th>Updated Date</th>"); out.println("<th>Create Payment Request</th>"); out.println("</tr>"); if (conn != null) { try { PreparedStatement ps = conn.prepareStatement(query); ResultSet rs = ps.executeQuery(); while (rs.next()) { out.println("<tr>"); for (int i = 1; i <= InvoiceColumnName.values().length; i++) { out.println("<th>" + rs.getString(i) + "</th>"); } // out.println("<th><button type=\"button\">Create</button></th>"); out.println("<th><form action=\"payment\" method=\"POST\" >"); out.println("<input type=\"submit\" value=\"Creat Payment\" />"); for (int i = 1; i <= InvoiceColumnName.values().length; i++) { out.println("<input type=\"hidden\" name=\"" + InvoiceColumnName.getColumnName(i).toString() + "\" + value=\" " + rs.getString(i) + "\"/>"); } out.println("</form></th>"); out.println("</tr>"); } conn.close(); System.out.println("Disconnected from database"); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } } out.println("</table>"); out.println("</body>"); out.println("</html>"); } @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { } }
package models import ( "bytes" "fmt" "io/ioutil" "net/http" "time" ) type Request struct { Id string `json:"id"` Created int64 `json:"created"` Method string `json:"method"` Protocol string `json:"protocol"` Header http.Header `json:"header"` ContentLength int64 `json:"contentLength"` RemoteAddr string `json:"remoteAddr"` Host string `json:"host"` RequestURI string `json:"requestURI"` Body string `json:"body"` FormValue map[string][]string `json:"formValue"` FormFile []string `json:"formFile"` } func CreateRequest(req *http.Request, maxBodySize int) *Request { var ( bodyValue string formValue map[string][]string formFile []string ) if body, err := ioutil.ReadAll(req.Body); err == nil { if len(body) > 0 && maxBodySize != 0 { if maxBodySize == -1 || req.ContentLength < int64(maxBodySize) { bodyValue = string(body) } else { bodyValue = fmt.Sprintf( "%s\n TRUNCATED , %d of %d", string(body[0:maxBodySize]), maxBodySize, req.ContentLength) } } req.Body = ioutil.NopCloser(bytes.NewBuffer(body)) defer req.Body.Close() } req.ParseMultipartForm(0) if req.MultipartForm != nil { formValue = req.MultipartForm.Value for key := range req.MultipartForm.File { formFile = append(formFile, key) } } else { formValue = req.PostForm } request := Request{ Id: stringGen.Generate(8), Created: time.Now().Unix(), Method: req.Method, Protocol: req.Proto, Host: req.Host, Header: req.Header, ContentLength: req.ContentLength, RemoteAddr: req.RemoteAddr, RequestURI: req.RequestURI, FormValue: formValue, FormFile: formFile, Body: bodyValue, } return &request }
// Match - matches object name with resource pattern. func (r Resource) Match(resource string, conditionValues map[string][]string) bool { pattern := r.Pattern for _, key := range condition.CommonKeys { if rvalues, ok := conditionValues[key.Name()]; ok && rvalues[0] != "" { pattern = strings.Replace(pattern, key.VarName(), rvalues[0], -1) } } return wildcard.Match(pattern, resource) }
// ---------------------------------------------------------------*- Java -*- // File: ./examples/src/java/PlantLocation.java // -------------------------------------------------------------------------- // Licensed Materials - Property of IBM // // 5724-Y48 5724-Y49 5724-Y54 5724-Y55 5725-A06 5725-A29 // Copyright IBM Corporation 1990, 2017. All Rights Reserved. // // Note to U.S. Government Users Restricted Rights: // Use, duplication or disclosure restricted by GSA ADP Schedule // Contract with IBM Corp. // -------------------------------------------------------------------------- /* ------------------------------------------------------------ Problem Description ------------------- A ship-building company has a certain number of customers. Each customer is supplied by exactly one plant. In turn, a plant can supply several customers. The problem is to decide where to set up the plants in order to supply every customer while minimizing the cost of building each plant and the transportation cost of supplying the customers. For each possible plant location there is a fixed cost and a production capacity. Both take into account the country and the geographical conditions. For every customer, there is a demand and a transportation cost with respect to each plant location. While a first solution of this problem can be found easily by CP Optimizer, it can take quite some time to improve it to a very good one. We illustrate the warm start capabilities of CP Optimizer by giving a good starting point solution that CP Optimizer will try to improve. This solution could be one from an expert or the result of another optimization engine applied to the problem. In the solution we only give a value to the variables that determine which plant delivers a customer. This is sufficient to define a complete solution on all model variables. CP Optimizer first extends the solution to all variables and then starts to improve it. ------------------------------------------------------------ */ import ilog.concert.*; import ilog.cp.*; import java.io.*; public class PlantLocation { private static class DataReader { private StreamTokenizer st; public DataReader(String filename) throws IOException { FileInputStream fstream = new FileInputStream(filename); Reader r = new BufferedReader(new InputStreamReader(fstream)); st = new StreamTokenizer(r); } public int next() throws IOException { st.nextToken(); return (int) st.nval; } } public static void main(String args[]) throws IOException, IloException { IloCP cp = new IloCP(); DataReader data = new DataReader("../../../examples/data/plant_location.data"); int nbCustomer = data.next(); int nbLocation = data.next(); int[][] cost = new int[nbCustomer][]; for (int c = 0; c < nbCustomer; c++) { cost[c] = new int[nbLocation]; for (int w = 0; w < nbLocation; w++) { cost[c][w] = data.next(); } } int[] demand = new int[nbCustomer]; for (int c = 0; c < nbCustomer; c++) demand[c] = data.next(); int[] fixedCost = new int[nbLocation]; for (int w = 0; w < nbLocation; w++) fixedCost[w] = data.next(); int[] capacity = new int[nbLocation]; for (int w = 0; w < nbLocation; w++) capacity[w] = data.next(); IloIntVar cust[] = new IloIntVar[nbCustomer]; for (int c = 0; c < nbCustomer; c++) cust[c] = cp.intVar(0, nbLocation - 1); IloIntVar[] open = new IloIntVar[nbLocation]; IloIntVar[] load = new IloIntVar[nbLocation]; for (int w = 0; w < nbLocation; w++) { open[w] = cp.intVar(0, 1); load[w] = cp.intVar(0, capacity[w]); cp.add(cp.eq(open[w], cp.gt(load[w], 0))); } cp.add(cp.pack(load, cust, demand)); IloNumExpr obj = cp.scalProd(fixedCost, open); for (int c = 0; c < nbCustomer; c++) { obj = cp.sum(obj, cp.element(cost[c], cust[c])); } cp.add(cp.minimize(obj)); int[] custValues = { 19, 0, 11, 8, 29, 9, 29, 28, 17, 15, 7, 9, 18, 15, 1, 17, 25, 18, 17, 27, 22, 1, 26, 3, 22, 2, 20, 27, 2, 16, 1, 16, 12, 28, 19, 2, 20, 14, 13, 27, 3, 9, 18, 0, 13, 19, 27, 14, 12, 1, 15, 14, 17, 0, 7, 12, 11, 0, 25, 16, 22, 13, 16, 8, 18, 27, 19, 23, 26, 13, 11, 11, 19, 22, 28, 26, 23, 3, 18, 23, 26, 14, 29, 18, 9, 7, 12, 27, 8, 20 }; IloSolution sol = cp.solution(); for (int c = 0; c < nbCustomer; c++) { sol.setValue(cust[c], custValues[c]); } cp.setStartingPoint(sol); cp.setParameter(IloCP.DoubleParam.TimeLimit, 10); cp.setParameter(IloCP.IntParam.LogPeriod, 10000); cp.solve(); } }
/** * Print the smile. The number of sample and range is controlled by static variables * @param smile the smile function */ private void printSmile(Function1D<Double, Double> smile) { System.out.println("Strike\tImplied Volatility"); double range = (UPPER_STRIKE - LOWER_STRIKE) / (NUM_SAMPLES - 1.0); for (int i = 0; i < NUM_SAMPLES; i++) { double k = LOWER_STRIKE + i * range; double vol = smile.evaluate(k); System.out.println(k + "\t" + vol); } }
import { NgModule } from '@angular/core'; import { CommonModule } from '@angular/common'; import { IconModule, NavbarModule, SectionModule } from '@swimlane/ngx-ui'; import { PrismModule } from '../../common/prism/prism.module'; import { NavbarPageRoutingModule } from './navbar-page-routing.module'; import { NavbarPageComponent } from './navbar-page.component'; import { NavbarChildComponent1 } from './child-component-1.component'; import { NavbarChildComponent2 } from './child-component-2.component'; @NgModule({ declarations: [NavbarPageComponent, NavbarChildComponent1, NavbarChildComponent2], imports: [CommonModule, PrismModule, SectionModule, NavbarModule, IconModule, NavbarPageRoutingModule] }) export class NavbarPageModule {}
/** * @author Jeremy Aldrich * * */ public class P1 { private boolean startFlag = false; TreeNode<Term> node; //private ArrayList<String> fileText = new ArrayList<String>(); //private ArrayList<Integer> occurList = new ArrayList<Integer>(); // private ArrayList<Term> terms = new ArrayList<Term>(); //private ArrayList<Term> termIndex = new ArrayList<Term>(); // public ArrayList<Term>getTermList() { // // return terms; // } // public TreeNode<Term> readFile(String fileName,String docName) { String word = null; String wordPuncRemoved = null; String htmlRemoved = null; BinarySearch search = new BinarySearch(); BST bst = new BST(); try { Scanner read = new Scanner(new File(fileName)); while(read.hasNext() ) { String temp = null; String tempTwo = null; word = read.next(); boolean flag = false; //System.out.println(word); //System.out.println(word+" ="); if(word.isEmpty() == false) { htmlRemoved = removeHTML(word).replaceAll("\\s+",""); wordPuncRemoved = removePunctuation(htmlRemoved).toLowerCase(); for(int i=0;i<wordPuncRemoved.length();i++) { if(wordPuncRemoved.charAt(i) == ' ' && i >= 1) { flag = true; temp = wordPuncRemoved.substring(0, i); tempTwo = wordPuncRemoved.substring(i+1,wordPuncRemoved.length() ); //System.out.println(temp+"...."+tempTwo); } } if(flag) { temp = temp.replaceAll("\\s+",""); tempTwo = tempTwo.replaceAll("\\s+",""); if(!temp.isEmpty()) bst.add(docName,temp); //termIndex = search.searchList(termIndex, temp,docName); if(!tempTwo.isEmpty()) bst.add(docName,tempTwo); //termIndex = search.searchList(termIndex, tempTwo,docName); flag = false; }else { wordPuncRemoved = wordPuncRemoved.replaceAll("[\\s]*",""); if(wordPuncRemoved.isEmpty() == false) { bst.add(docName,wordPuncRemoved); //System.out.println(wordPuncRemoved); //termIndex = search.searchList(termIndex, wordPuncRemoved,fileName); } } //if(wordPuncRemoved.length() > 0) // tempWord.add(wordPuncRemoved); //wordCount++; } //text = removeWhiteSpace(wordPuncRemoved); //System.out.println(wordPuncRemoved); // if(!wordPuncRemoved.isEmpty()) { // fileText.add(wordPuncRemoved); // //System.out.println(wordPuncRemoved); // } //System.out.println(fileText); //System.out.println(removeWhiteSpace("Test")); //System.out.println(removeWhiteSpace("12 ")); //System.out.println(removeWhiteSpace("Test")); //System.out.println(text); //check for punctuation //check for html //check if word already exists // if exists ++ else add to list } read.close(); //occurList = search.getOccurrList(); //System.out.println(occurList.size() ); //System.out.println(termIndex.size()+"\n"); // for(int i=0;i<fileText.size();i++) { // System.out.print(fileText.get(i)+"\n"); // // } // for(int i=0;i<occurList.size();i++) { // System.out.print("Index: "+i+" "+occurList.get(i)+" = "+fileText.get(i)+"\n"); // } //System.out.println(""); //System.out.println("NUMBER OF WORDS: "+fileText.size() ); } catch (FileNotFoundException e) { System.err.println("Error: found in output!"); } node = bst.getNode(); return node; } /* * Removes special characters including Punctuation * */ private String removePunctuation(String word) { //removes all punctuation and special characters // adds a space so we can deal with words such as pre-req return word.replaceAll("[&@#$%^*()\\\"\\\\/$\\-\\!\\+\\=|(){},.;:!?\\%]+", " "); } /* * * Removes the HTML tags starting with < and ending with > */ private String removeHTML(String word) { String result = ""; String temp = null; for(int i=0;i<word.length();i++) { char character = word.charAt(i); //if < is found we don't add that text if(word.charAt(i) == '<') { startFlag = true; } //if < is found we don't add that text if(startFlag == true) { //System.out.println(word.charAt(i)+" Removed"); //end bracket > found so we can continue to add now if(character == '>') { startFlag = false; } }else { if(i == 0) { result = Character.toString(character); } else { result += character; } } } temp = result; return temp; } /* * Returns the the number of occurances each word has. * Each element matches up with the word in the textFile List */ //public ArrayList<Integer> getOccurrList() { // // return occurList; // } public static void main(String[] args) { if(args.length ==0) { System.err.println("Error: No args found!"); } String fileName = args[0]; String docName = "test"; P1 p = new P1(); //p.readFile(fileName,docName); } }
package com.dynatrace.diagnostics.plugins.jmx.monitor; import java.util.HashMap; import java.util.Iterator; import java.util.Properties; import java.util.ArrayList; import javax.management.MBeanServerConnection; import javax.management.ObjectName; import com.dynatrace.diagnostics.plugins.jmx.variableholder.ThreadWorkmanagerStats; import com.dynatrace.diagnostics.plugins.jmx.variableholder.ThreadWorkmanagerTimeHolder; import com.dynatrace.diagnostics.pdk.MonitorEnvironment; import com.dynatrace.diagnostics.plugins.jmx.WeblogicConstants; import java.util.logging.Logger; import java.util.concurrent.Callable; import weblogic.management.runtime.ExecuteThread; public class ThreadsWork implements Callable<ArrayList<ThreadWorkmanagerStats>>, WeblogicConstants { private ArrayList<ThreadWorkmanagerStats> test; public ThreadsWork(ArrayList<ThreadWorkmanagerStats> test){ this.test = test; } private static final Logger log = Logger.getLogger(ThreadsWork.class.getName()); private ArrayList<ThreadWorkmanagerStats> workList = new ArrayList<ThreadWorkmanagerStats>(); ThreadWorkmanagerTimeHolder ad = new ThreadWorkmanagerTimeHolder(); MBeanServerConnection connections; ObjectName destRT; MonitorEnvironment envs; String environments; public ThreadsWork(ObjectName destR, MBeanServerConnection connection, MonitorEnvironment env, String environment) { envs = env; destRT = destR; connections = connection; environments=environment; } @Override public ArrayList<ThreadWorkmanagerStats> call() throws Exception { try { HashMap<String, Properties> threadWorkManagerMap = new HashMap<String, Properties>(); String name = (String) connections.getAttribute(destRT, "Name"); ObjectName threadRT = (ObjectName) connections.getAttribute(destRT, "ThreadPoolRuntime"); ExecuteThread[] executeThreads2 = (ExecuteThread[]) connections.getAttribute(threadRT, "ExecuteThreads"); for (int jj = 0; jj < executeThreads2.length; jj++) { String currReq = executeThreads2[jj].getCurrentRequest(); if (currReq == null) { continue; } String appName = executeThreads2[jj].getApplicationName(); String modName = executeThreads2[jj].getModuleName(); String workManager = executeThreads2[jj].getWorkManagerName(); if (appName == null) { appName = "undefined"; } if (modName == null) { modName = "undefined"; } if (workManager == null || workManager.equals("")) { workManager = "default"; } if (threadWorkManagerMap.containsKey(workManager)) { Properties workManagerProps = threadWorkManagerMap.get(workManager); threadWorkManagerMap.put(workManager, workManagerProps); int threadCount = Integer.parseInt(workManagerProps.getProperty("threadCount")); threadCount++; workManagerProps.setProperty("threadCount", String.valueOf(threadCount)); } else { Properties workManagerProps = new Properties(); threadWorkManagerMap.put(workManager, workManagerProps); workManagerProps.setProperty("serverName", name); workManagerProps.setProperty("appName", appName); workManagerProps.setProperty("threadCount", "1"); workManagerProps.setProperty("modName", modName); } } for (Iterator<String> it = threadWorkManagerMap.keySet().iterator(); it.hasNext();) { ThreadWorkmanagerStats statsWork = new ThreadWorkmanagerStats(); String workManager1 = (String) it.next(); Properties workManagerProps = threadWorkManagerMap.get(workManager1); int threadCount = Integer.parseInt(workManagerProps.getProperty("threadCount")); statsWork.setserverName(name); statsWork.setEnviro(environments); statsWork.setTotalThreads(threadCount); statsWork.setWorkManager(workManager1); String appName1 = workManagerProps.getProperty("appName"); statsWork.setApplication(appName1); String modName = workManagerProps.getProperty("modName"); statsWork.setModName(modName); workList.add(statsWork); ad.addServerStatsList(workList); workList.clear(); } } catch (Exception e) { log.info("Workmanager Exception: " + e); } test = ad.getArrayList(); return test; } }
#include <stdio.h> #include <stdlib.h> int main() { int n, m, i, j, *a, *b, temp, count=0; scanf("%d", &n); a=(int *)malloc(n*sizeof(int)); for(i=0; i<n ; i++) scanf("%d", &a[i]); scanf("%d", &m); b=(int *)malloc(m*sizeof(int)); for(i=0; i<m ; i++) scanf("%d", &b[i]); for(i=0; i<n-1 ; i++) for(j=i+1; j<n ; j++) { if(a[j]<a[i]) { temp= a[i]; a[i]= a[j]; a[j]= temp; } } for(i=0; i<m-1 ; i++) for(j=i+1; j<m ; j++) { if(b[j]<b[i]) { temp= b[i]; b[i]= b[j]; b[j]= temp; } } i=0; j=0; while(i<n || j<m) { if(a[i]- b[j]== -1 || a[i]- b[j]== 0 || a[i]- b[j]== 1) { i++; j++; count++; } else if(a[i]-b[j]>= 2) j++; else i++; } printf("%d", count); return 0; }
#include <stdio.h> int main (){ int i,a,b,k[101]={0},max=0,sum=0; scanf ("%d",&a); for (i=0;i<a;i++){ scanf ("%d",&k[i]); if (max<k[i]){ max=k[i]; b=i; } } for (i=0;i<a;i++){ if (i==b){ sum=sum+(k[i]/2); } else { sum=sum+k[i]; } } printf ("%d",sum); return 0; }
package main import ( "fmt" "strconv" "strings" ) // Given a string containing only digits, // restore it by returning all possible valid IP address combinations. // A valid IP address consists of exactly four integers (each integer // is between 0 and 255) separated by single points. // Example: // Input: "25525511135" // Output: ["255.255.11.135", "255.255.111.35"] // Solution: dfs(注意0的处理) func restoreIpAddresses(s string) []string { res := []string{} var dfs func(start int, parts []string) dfs = func(start int, parts []string) { if len(parts) == 4 && start >= len(s) { res = append(res, strings.Join(parts, ".")) } if len(parts) == 4 || start >= len(s) { return } if s[start] == '0' { dfs(start+1, append(parts, "0")) } else { for i := start; i < len(s); i++ { num, _ := strconv.Atoi(s[start : i+1]) if num <= 255 { dfs(i+1, append(parts, strconv.Itoa(num))) } else { break } } } } dfs(0, []string{}) return res } func main() { fmt.Println(restoreIpAddresses("0000")) fmt.Println(restoreIpAddresses("25525511135")) fmt.Println(restoreIpAddresses("255255255255")) fmt.Println(restoreIpAddresses("010010")) }
// // This file is part of nuBScript Project // Copyright (c) <NAME> (<EMAIL>) // All rights reserved. // Licensed under the MIT License. // See COPYING file in the project root for full license information. // /* -------------------------------------------------------------------------- */ #include "nu_icstring.h" /* -------------------------------------------------------------------------- */ namespace nu { /* -------------------------------------------------------------------------- */ icstring_t& icstring_t::operator=(icstring_t&& s) noexcept { if (this != &s) _data = std::move(s._data); return *this; } /* -------------------------------------------------------------------------- */ size_t icstring_t::find(std::string searching_s) { std::string s = _data; std::transform(s.begin(), s.end(), s.begin(), ::tolower); std::transform( searching_s.begin(), searching_s.end(), searching_s.begin(), ::tolower); return s.find(searching_s); } /* -------------------------------------------------------------------------- */ bool icstring_t::operator<(const icstring_t& s) const noexcept { if (s._data.empty()) return false; if (_data.empty()) return true; return strcasecmp(_data.c_str(), s._data.c_str()) < 0; } /* -------------------------------------------------------------------------- */ } // namespace nu
Capacity, Capacity Drop, and Relation of Capacity to the Path Width in Bicycle Traffic Bicycle usage is encouraged in many cities because of its health and environmental benefits. As a result, bicycle traffic increases which leads to questions on the requirements of bicycle infrastructure. Design guidelines are available but the scientific substantiation is limited. This research contributes to understanding bicycle traffic flow by studying the aggregated movements of cyclists before and after the onset of congestion within the setting of a controlled bottleneck flow experiment. The paper quantitatively describes the relation between capacity and path width, provides a qualitative explanation of this relation by analyzing the cyclist configuration for different path widths, and studies the existence of a capacity drop in bicycle flow. Using slanted cumulative curves and regression analysis, the capacity of a bicycle path is found to increase linearly with increasing path width. A steady drop in flow rate is observed after the onset of congestion, indicating that the capacity drop phenomenon is observed in bicycle traffic. The results presented in this paper can help city planners to create bicycle infrastructure that can handle high cyclist demand.
<gh_stars>1-10 # -*- coding: utf-8 -*- """CircleCore CLI.""" import importlib # project module from .cli_main import cli_main as cli_entry for key in ( 'box', 'invitation', 'module', 'replication_link', 'replication_master', 'schema', 'user', 'cliutil', 'debug' ): mod = importlib.import_module('.{}'.format(key), __name__) group = getattr(mod, 'cli_{}'.format(key)) cli_entry.add_command(group)
/** * Load and process document list. * * @param url * the url * @param processStrategy * the process strategy * @throws XmlAgentException * the xml agent exception */ private void loadAndProcessDocumentList(final String url, final ProcessDataStrategy<DocumentElement> processStrategy) throws XmlAgentException { final DocumentContainerElement dokumentLista = ((JAXBElement<DocumentContainerElement>) xmlAgent.unmarshallXml( riksdagenDocumentListMarshaller, url, HTTP_DOKUMENTLISTA_RIKSDAGEN_EXTERNAL_MODEL_CIA_HACK23_COM_IMPL, null, null)).getValue(); int resultSize = dokumentLista.getDokument().size(); processAll(dokumentLista.getDokument(), processStrategy); final BigInteger pages = dokumentLista.getTotalPages(); for (int i = 1; i < pages.intValue(); i++) { final DocumentContainerElement otherPagesdokumentLista = ((JAXBElement<DocumentContainerElement>) xmlAgent .unmarshallXml(riksdagenDocumentListMarshaller, url + PAGE_PROPERTY + i, HTTP_DOKUMENTLISTA_RIKSDAGEN_EXTERNAL_MODEL_CIA_HACK23_COM_IMPL, null, null)).getValue(); resultSize = resultSize + otherPagesdokumentLista.getDokument().size(); processAll(otherPagesdokumentLista.getDokument(), processStrategy); LOGGER.info(LOADING_DOCUMENTS, resultSize, dokumentLista.getHits()); } }
<filename>app/scripts/modules/core/src/application/ApplicationDisplayRenderer.tsx<gh_stars>1-10 import { IApplicationSearchResult } from 'core/domain'; import { AbstractBaseResultRenderer, ITableColumnConfigEntry } from 'core/search/searchResult/AbstractBaseResultRenderer'; import './application.less'; export class ApplicationDisplayRenderer extends AbstractBaseResultRenderer<IApplicationSearchResult> { private static instance: ApplicationDisplayRenderer = new ApplicationDisplayRenderer(); public static renderer() { return ApplicationDisplayRenderer.instance; } public getRendererClass(): string { return 'application'; } public getKey(item: IApplicationSearchResult): string { return item.application; } public sortItems(items: IApplicationSearchResult[]): IApplicationSearchResult[] { return items.sort((a, b) => a.application.localeCompare(b.application)); } public getColumnConfig(): ITableColumnConfigEntry<IApplicationSearchResult>[] { return [ { key: 'application', label: 'Name', cellRenderer: this.hrefCellRenderer }, { key: 'accounts', label: 'Account', cellRenderer: this.accountCellRenderer }, { key: 'email', label: 'Owner Email', cellRenderer: this.defaultCellRender } ]; } }
//to delete an element and return it from the front of dequeue int delete_front() { int item; if (dq.front == -1) return -1; else { item = dq.arr[dq.front]; if (dq.front == dq.rear) { dq.front = -1; dq.rear = -1; } else dq.front = (dq.front + 1) % 10; } return item; }
/** * Created by dynamicheart on 5/14/2017. */ public class NoticeListAdapter extends ArrayAdapter<Post> { private int resourceId; public NoticeListAdapter(Context context, int textViewResourceId, List<Post> objects) { super(context, textViewResourceId, objects); resourceId = textViewResourceId; } @Override public View getView(int position, View convertView, ViewGroup parent) { Post post = getItem(position); View view; NoticeListAdapter.ViewHolder viewHolder; if (convertView == null) { view = LayoutInflater.from(getContext()).inflate(resourceId, null); viewHolder = new NoticeListAdapter.ViewHolder(); viewHolder.imageViewAvartar = (CircleImageView) view.findViewById(R.id.message_avatar); viewHolder.textUsername = (TextView) view.findViewById(R.id.message_username); viewHolder.imageViewPicture = (ImageView) view.findViewById(R.id.message_picture); view.setTag(viewHolder); } else { view = convertView; viewHolder = (NoticeListAdapter.ViewHolder) view.getTag(); } Picasso.with(getContext()).load(post.getUser().getAvatar()).into(viewHolder.imageViewAvartar); viewHolder.textUsername.setText(post.getUser().getUsername()); Picasso.with(getContext()).load(post.getImage()).into(viewHolder.imageViewPicture); view.setOnClickListener(v -> { Intent intent = new Intent(getContext(), MessageActivity.class); getContext().startActivity(intent); }); return view; } class ViewHolder { CircleImageView imageViewAvartar; TextView textUsername; ImageView imageViewPicture; } }
n,k=map(int,input().split()) cost=0 count=0 flag=True flag1=True for i in range(1,n+1): if 240-cost<k: print(count-1) flag=False break else: cost+=i*5 count+=1 if 240-cost<k and flag==True: print(count-1) flag1=False if flag==True and flag1==True: print(count)
Share With an image of the U.S. Constitution as his background, NSA whistleblower Edward Snowden beamed in through a choppy Google Hangouts video feed to call on the technologists at the 2014 South by Southwest Interactive Festival in Austin, Texas, to help “fix” the problem of mass government surveillance through easy-to-use encryption technology. “I would say South by Southwest and the technology community – the people who are in Austin right now – they’re the folks who can really fix things, who can enforce our rights through technical standards even when Congress hasn’t yet gotten to the point of creating legislation to protect our rights in the same manner,” said Snowden in his opening remarks. He added that, in addition to a “policy response” to mass surveillance activities, “there’s also a technical response. And it’s the makers, it’s the thinkers, it’s the development community that can really crack those solutions to make sure we’re safe.” “You guys who are in the room now are all the firefighters,” he said. “And we need you to help fix this.” “The bottom line … is that encryption does work.” Soghoian said the lack of consideration for security and other encryption by developers is what has allowed widespread government surveillance to occur. “We need to make services secure out of the box,” said Soghoian. “And that’s going to require a re-think by developers – it’s going to require that developers think about security early on, rather than later on down the road.” Soghoian went on to add that, since the disclosures by Snowden, major technology companies have greatly improved their security offerings. Within the past eight months, for example, both Google and Yahoo have increased their use of SSL encryption by default for users. Despite these improvements, said Soghoian, the additional security still allows these companies to collect user data, which in turn allows the government to obtain user data from the companies – something that is not likely to change thanks to their advertising-based business models that rely on users’ information. “The irony that we’re using Google Hangouts to talk to Snowden has not been lost on me or our team here,” Soghoian said to applause. He also indicated that the video feed had been routed through “several” proxy servers for security purposes (Wizner earlier indicated the exact number was seven, a possible allusion to this meme), which led to the poor quality of the video stream. “This in fact I think reflects the state of play for many services,” said Soghoian. “You have to choose between a service that’s easy to use, reliable, and polished, or a tool that is highly secure and impossible for the average person to use.” Snowden said he thinks we’re seeing “a lot of progress” in terms of more accessible products with encryption at the forefront, but agreed that we far more work needs to be done. By enabling more people to use encrypted communications, he said, it will simply become too expensive for the NSA and other government agencies to conduct mass surveillance at current levels. “The bottom line … is that encryption does work,” said Snowden, shooting down rumors that the NSA had cracked popular encryption standards. Still, he said, more advancement needs to be made to improve the encryption standards we have available. At the end of the event, Snowden said that he believes his disclosures have allowed the public to engage in a meaningful debate about privacy protections, and said he does not regret leaking classified information to the press. “When it comes to the question, would I do this again? The answer is yes,” said Snowden. “Regardless of what happens to me, this is something we have the right to know.” Edward Snowden at SXSW 2014 Full Video Ed. note: This post was updated with new text after the conclusion of Snowden’s appearance at SXSW 2014.
import React from 'react'; import { CopyToClipboard } from 'react-copy-to-clipboard'; import { message, Tooltip } from 'antd'; import { CopyOutlined } from '@ant-design/icons'; import styles from './index.less'; export type Props = { text: string; }; const CopyText: React.FC<Props> = (props) => { const copyHandle = (): void => { message.success('Copy Success'); }; return ( <> <CopyToClipboard text={props.text} onCopy={copyHandle}> <Tooltip title="copy" key={'copy'}> {props.children ? ( props.children ) : ( <CopyOutlined className={styles.iconColor} /> )} </Tooltip> </CopyToClipboard> </> ); }; export default CopyText;
<gh_stars>1-10 import { IMapping, IMappingItems } from "./interfaces/typemapper.interface"; import { Types } from "./enums/types.enum"; declare class Mapping<ISource, IDest> implements IMapping<ISource, IDest> { source: string; dest: string; items: IMappingItems[]; constructor(source: string, dest: string); private readonly lastItem; map(source: (type: ISource) => any, destination: (type: IDest) => any): IMapping<ISource, IDest>; conditions: (check: (s: ISource, d?: IDest | undefined) => boolean) => IMapping<ISource, IDest>; is(type: Types): IMapping<ISource, IDest>; } export default Mapping;
Effect of 200 MeV Ag16+ swift heavy ion irradiation on structural and magnetic properties of M-type barium hexaferrite M-type barium hexagonal ferrite (BaFe12O19) has been synthesized by sol-gel auto combustion method. The synthesized material was irradiated with 200 MeV Ag16+ ions using the 15UD Pelletron tandem accelerator and the changes in structural and surface morphology of material were investigated. The pristine (as-synthesised) and irradiated samples were characterized using different experimental techniques like x-ray diffraction (XRD), Fourier-transform infrared spectroscopy, transmission electron microscope (TEM) and vibrating sample magnetometer (VSM). The strong absorption peak between 580 and 440 cm−1 in the infrared spectrum and XRD confirmed the formation of ferrite structure for both irradiated and pristine samples. XRD peaks for the irradiated barium hexagonal ferrite were slightly broadened when compared pristine ferrite samples. The crystallite size of the irradiated barium hexagonal ferrite was higher than that of pristine barium hexagonal ferrite and is consistent with TEM images. Both saturation magnetization and coercivity were decreased with irradiation. Introduction Since last two decades, effects of Swift heavy ion irradiation on magnetic oxides and ferrites have been investigated to understand the modifications on their physical, magnetic and dielectric properties . This is an establish phenomenon that irradiation of solids with energetic particle beams could be able to create an extensive variation of defect states. It is possible for some materials to create additional defects and phase transformations to anisotropic growth, using various range (MeV) of swift heavy ions radiation . The material with swift heavy ion irradiation is an important tool which would be able to manipulate the properties of materials. This could provide an alternative to photons for presenting electronic excitations to material . The wide application of hexagonal ferrites attracted the attention of researchers due to its technological applications in electronic and magnetic devices . Different research group has been tried to tune the magnetic anisotropy of M-type strontium hexaferrite crystals by the swift heavy ion irradiation . Panchal et al reported effect of swift heavy ion irradiation on structural and magnetic properties of strontium hexaferrites where the intensity of all the peaks and FWHM were increased. M-type BaFe 12 O 19 has special identity due to its application as permanent magnets . The increasing current demand of low cost, excellent chemically stabile and corrosion resistivite M-type BaFe 12 O 19 was further studied for microwave communication, microwave dark room, the anti-electromagnetic wave radiation applications . In this purpose, we synthesized M-type barium hexagonal ferrite (BaFe 12 O 19 ) and to tune the structural and magnetic properties irradiated by swift heavy ion irradiation. Original content from this work may be used under the terms of the Creative Commons Attribution 3.0 licence. Any further distribution of this work must maintain attribution to the author(s) and the title of the work, journal citation and DOI. Experimental procedure Barium nitrate Ba(NO 3 ) 2 (Merck, GR grade), iron nitrate Fe(NO 3 ) 3 .9H 2 O (Sigma Aldrich, >98% purity) were used as precursor materials. Firstly, all of the materials with specific amount were dissolved into distilled water to produce the target product, BaFe 12 O 19 . Citric acid C 6 H 8 O 7 (Merck, GR grade) as combustion fuel was then added and aqueous solution of NH 4 OH (Merck, GR grade) was used drop wise to maintain the pH to 7. After that the solution was heated on a hot plate at 80-90°C to evaporate the remaining water. Finally, the solution was turned into a viscous gel and the gel was self-ignited and burnt. The ash form of the product is crushed and preheated for 500°C followed by final calcinations at 950°C for 4 h. Thus, the prepared barium hexagonal ferrite was irradiated with 200 MeV Ag 16+ ions at a fluence of 1×10 13 ions cm −2 using 15UD Pelletron Accelerator at New Delhi Inter University Accelerator Centre (IUAC), India. TRIM/SRIM calculations was used to calculate the electronic energy loss, nuclear energy loss in range of the 200 MeV Ag 16+ ion beam . Both pristine and irradiated barium hexagonal ferrites were characterized using different experimental techniques like x-ray diffraction (XRD), Fourier-transform infrared spectroscopy, transmission electron microscope (TEM) and vibrating sample magnetometer (VSM). XRD spectrum was recorded by SEIFERT XRD 3000 PTS between the diffraction angle (2θ) from 20°to 80°using CuK α (λ=1.5405 Ǻ) as a radiation source. FTIR was taken at room temperature in the wavenumber range from 4000 to 400 cm −1 using FTIR Brucker tensor-27 spectrometer. The particle size of pristine and irradiated hexagonal ferrites was examined through a scanning electron microscope (Philips, CM 200, USA). Field-dependent magnetization was recorded using vibrating sample magnetometer (VSM: EG & G Princeton Applied Research, Model 4500) with a maximum field of 15 KOe. Results and discussion FTIR spectra of pristine and irradiated barium hexaferrite samples is shown in figure 1. Two absorption bands at 580 and 440 cm −1 were observed in both pristine and irradiated barium hexaferrite samples. These bands correspond to vibrations of the intermetallic bond between the metal-oxygen ions. It is noted that the intensity of these bands is found to decrease in the irradiated sample when compared to the pristine barium hexaferrite sample. The excited dipole moments in the sample originated from molecular vibrations is responsible for the occurrence of the peak in the FTIR spectrum. The decrease in intensity for IR Spectrum of irradiated sample may be due to shifting of some ions of small size to interstitial positions in the crystal lattice after irradiation . XRD patterns of both pristine and irradiated samples are shown in figure 2. The clear inspection of phase identification study of XRD patterned show M-type barium hexagonal ferrite (space group P63/mmc) with small impurity peaks of Ba 2 Fe 6 O 11 for both pristine and irradiated barium hexagonal ferrite samples. It can be seen from figure 2 that the intensity of orthorhombic Ba 2 Fe 6 O 11 impurity phase (a=23.024 Å b=5.181 Å c=8.900) slightly increases with irradiation compare to main phase. This phenomenon implies that both phases are in equilibrium where irradiation increased the percentage of Ba 2 Fe 6 O 11 . The lattice parameters a=5.892 Å and c=23.183 Å are agreed with JCPDS file-PDF#840757 . It can be said that the basic hexagonal crystal structure remains almost the same after irradiation. By the comparison of XRD peaks of the pristine and irradiated barium hexaferrite sample, the widths and peak intensities were altered slightly. The peaks intensity of pristine sample has much higher and sharp with less width than irradiated sample. The irradiation on barium hexaferrite sample origins inelastic collisions of higher energy ions with the molecules and introduces either point defects or partial re-crystallization and track formation in the material, which alter the crystal lattices and the peak intensities . The higher intensity diffraction peaks in XRD pattern of irradiated barium hexaferrite sample clearly indicates that the mean particle size is in the range of nanometers . The crystallite sizes were calculated using the Scherer's formula using full-width at half maxima (β), wavelength (λ) of x-ray and Bragg angle θ. The lattice constants, unit-cell volume and crystallite size of both pristine and irradiated BaFe 12 O 19 samples were calculated and the values were given in table 1. There is no much change in lattice constants and unit-cell volume, but crystallite size was found to be increased in the irradiated sample. The c/a ratio is found to be 3.933 in pristine and irradiated barium hexaferrite samples and is in conformity with the reported for M-type hexagonal structure . The increase in crystallite size of barium hexaferrite in present investigation with the irradiation of heavy ions specifies stress-induced defects and distortion in the lattice. However, the nature of substance gets altered for more radiation exposure. TEM micrographs of pristine and irradiated barium hexaferrite samples is shown in figure 3. The pristine sample shows more uniform grains of cluster with a little agglomeration. However, it is difficult to detect the exact particle size in the irradiated barium hexaferrite samples. TEM images shows that clustering is more in the radiated samples. Swift ion radiation leads to create more cluster in the irradiated BaFe 12 O 19 sample due the local heat generated during the radiation process. In addition, the particle size of the irradiated barium hexaferrite sample is a higher than that of pristine barium hexaferrite sample. This observation is similar to the observation made for effects of 200 MeV Ag 15+ ion irradiation on structural properties of nanocrystalline ferrites reported in the literature . The electronic energy loss in nanoparticles was occurred due to inelastic collisions of high energy ions with the host atoms and molecules during the swift heavy ions irradiation. This leads to introduce either point/cluster-like defects/imperfections or partial amorphization depending on the dosage of the radiation and amount of energy lost . Room temperature hysteresis loops of pristine and irradiated barium hexaferrite samples is shown in figure 4. Saturation magnetization (M s ) and coercivity (H c ) of irradiated sample is slightly less than that of pristine sample. From figure 4, the saturation magnetization samples could not be attained for an applied field. Therefore, the values of saturation magnetization were determined by the extrapolation plot of inverse of the applied field and magnetization (M) where H C is obtained at 1/H=0 . As discussed previously, the irradiated hexaferrite samples exhibit cluster and agglomeration of grains Mosleh et al was adopted co-precipitation route to prepare the prepared barium hexa ferrite nano particles. The saturation magnetization varies from 46 to 42.2 emu g −1 for the variation of annealing temperature on barium hexaferrites from 900 to 1200°C. However, the particle size of the particles in the present investigation is very less that earlier reported values. By using the Swift heavy ion irradiation on barium hexaferrites, the magnetization of the pristine samples decreases nearly 33%. The irradiated barium hexaferrite sample shows a smaller magnetization than pristine barium hexaferrite implies, the swift heavy ion-induced disorder. From TEM observation for both pristine and irradiated barium hexaferrite samples, the size of the crystallite/particle is of the order of nanometers and are almost same. Generally, single domain state exited in nanocrystalline sample require higher fields for orientation on applied field direction. These energy-rich Swift heavy ions enter in the sample lead to alteration of atomic ordering by pushing the atoms from their regular position sites. The amorphous tracks were formed during irradiation which was helping to suppress the atomic and ferrimagnetic long-range ordering . Irradiation disturbs effectively on Fe 3+ site 4f 4 (tetrahedral) besides 4f 4 (octahedral) site than the 12k sites. This phenomenon reduces the superexchange interactions between Fe 3+ -O-Fe 3+ in the crystallographic site i.e. octahedral as well as tetrahedral site and decreases the magnetization in irradiated sample . The increasing impurity phase of Ba 2 Fe 6 O 11 by irradiation might be one of the reasons of decreasing saturation magnetization. The cause for magnetism in ferrites is indirect exchange interaction between lattice O 2− and the magnetic ions . Generally, four building blocks, namely S, S * , R, and R * were used to construct M-type BaFe 12 O 19 hexaferrite. Spinel structure with two oxygen layers with a hexagonal structure with of three oxygen layers were existed in S and S * blocks and R and R * blocks respectively. The BaFe 12 O 19 unit cell contains 38O 2− , 2Ba 2+ and 24Fe 3+ ions . Swift ion irradiation produces more oxygen vacancies through the high density electronic excitation process, and the corresponding density efficiency of Fe 3+ ions depends on the electronic excitation process . The uniaxial magnetic anisotropy existed in BaFe 12 O 19 hexaferrite was decreased by the oxygen vacancies created by swift ion radiation and hence, the magnetic saturation value was decreased. Due to the similar reason, remenance magnetization also decreases in the sample. Thus, irradiation affects the magnetic character of the sample. Conclusion M-type barium hexagonal ferrite synthesized via sol-gel auto combustion technique was examined using 200 MeV Ag 16+ ion beam on its structural and magnetic nature. Coercivity decreases in the irradiated sample due to the decrease in crystallite and grain size on irradiation. The irradiated sample shows a lower magnetization value than pristine sample due to ion-induced disorder and decreasing of superexchange interactions.
class Query: "Base class for all queries" def __init__(self, source): self.source = source def __call__(self, data): return data def __repr__(self): return str(self) def __str__(self): return f'<{self.__class__.__name__}: {repr(self.source)}>' def __eq__(self, other): return self.source == other.source def __ne__(self, other): return not self.__eq__(other) def __hash__(self): return hash(self.source)
/** * Test an 'empty' module. */ public class EmptyModuleTest extends AbstractRuntimeTest { protected LilyRuntimeModel getRuntimeModel() throws Exception { File moduleDir = createModule("org.lilyproject.runtime.test.testmodules.emptymodule"); LilyRuntimeModel model = new LilyRuntimeModel(); ModuleDefinition module = new ModuleDefinition("foo", moduleDir, ModuleSourceType.EXPANDED_JAR); model.addModule(module); return model; } public void testEmptyModule() throws Exception { } }
# -*- coding: utf-8 -*- N = int(input()) s = [int(input()) for i in range(N)] s.sort() s_sum = sum(s) inf=float("inf") min_s_not10x = inf for si in s: if si %10!=0: min_s_not10x=si break if s_sum %10 != 0: print(s_sum) else: if min_s_not10x==inf: print(0) else: print(s_sum-min_s_not10x)
// ListUsers returns all the users matching the filters. func (s *Server) ListUsers(ctx context.Context, req *usersservicepb.ListUsersRequest) (res *usersservicepb.ListUsersResponse, err error) { order, err := orderProtoToInternal(req.GetSortOrder()) if err != nil { return nil, status.Error(codes.InvalidArgument, err.Error()) } users, err := s.Repo.ListUsers( ctx, req.GetNames(), order, ) if err != nil { return nil, status.Error(codes.Internal, err.Error()) } pbUsers := []*userspb.User{} for _, user := range users { ct, err := ptypes.TimestampProto(user.CreateTime) if err != nil { return nil, status.Error(codes.Internal, "failed to convert create time to proto type") } pbUser := &userspb.User{ Name: user.Name, Id: user.ID, CreateTime: ct, } pbUsers = append(pbUsers, pbUser) } return &usersservicepb.ListUsersResponse{ Users: pbUsers, }, nil }
Stephen King still seems to be on a creative roll, producing books at nearly six-month intervals. He delivered “Joyland” and “Doctor Sleep” in 2013, and 2014 saw the publication of “Mr. Mercedes” and “Revival.” One might quibble about their relative merits, but all were ambitious, well-plotted novels executed with cleverness and care. Two new books by King will appear this year. We’ll have to wait until November for his story collection, “The Bazaar of Bad Dreams,” but “Finders Keepers,” the sequel to “Mr. Mercedes” and the second volume of what’s shaping up to be the “Bill Hodges Trilogy,” is available now. review “Finders Keepers.” By Stephen King. Scribner. 448 pages. $30. Release date: June 2, 2015 The novel opens in 1978, with the invasion of John Rothstein’s New Hampshire cabin. The 80-year-old reclusive novelist wakes to find three intruders in his bedroom, demanding money. One of the crew, Morris Bellamy, wants more than cash, however. He’s obsessed with Rothstein’s literary output, to the point of mental instability. The elderly writer characterizes Morris perfectly when he says, “It’s guys like you who give reading a bad name.” Although Rothstein hasn’t published any new material in decades, it’s rumored that he has completed at least one more novel. Morris wants to see whether Rothstein actually allowed his signature character, the iconoclastic Jimmy Gold, to “sell out” for a career in advertising. Morris’s violent insistence leads to the unearthing of a cache of notebooks, a literary treasure of inestimable value, one that costs Rothstein his life. Before another act of violence puts him out of commission for a long time, Morris is able to hide the money and the notebooks. The stash stays hidden for decades, until a young teen named Pete Saubers stumbles across it. Pete’s father was critically injured when the psychopath known as Mr. Mercedes drove his vehicle into a crowd lining up for a local job fair. The Saubers family is on the verge of disintegrating from financial and medical stress, so Pete devises a plan to funnel the windfall to his parents. He’s a smart kid, but even so, he’s not able to suss out all the ramifications of his scheme. When Morris Bellamy returns, looking for what he believes belongs to him, the safety of Pete’s family depends on a set of unlikely heroes. Foremost among them is Bill Hodges, the retired cop who helped foil a second terrorist plot by Mr. Mercedes four years earlier. The dedication to “Finders Keepers” reads “Thinking of John D. MacDonald.” The homage to the late, great pulp writer is apt. Bill Hodges is no Travis McGee, but “Finders Keepers” feels like one of the many stand-alone thrillers MacDonald devised during his long career, such as “A Flash of Green” or “The Executioners,” filmed twice as “Cape Fear.” Like MacDonald, King possesses a keen sense of the nuts-and-bolts of everyday American life, and he knows exactly how to infuse it with escalating menace. In April, “Mr. Mercedes” earned King an Edgar Award for best mystery novel. Although solidly constructed, “Finders Keepers” isn’t likely to garner as much attention. It’s a middle book in a trilogy, and its premise doesn’t feel quite as fresh as that of the previous volume. It also hints that a more compelling narrative may still lie ahead, with Hodges and his associates likely to face off against Mr. Mercedes again in the final installment. “Finders Keepers” also hews a little too closely to the themes and mechanics of another, better King novel. As literature-obsessed villains go, Morris Bellamy pales in comparison with “Number One Fan” Annie Wilkes from “Misery.” Although exciting and well choreographed, the climactic showdown in “Finders Keepers” between Morris and Bill Hodges feels a little too pat. Nevertheless, the novel mostly delivers on what it promises: a gripping setup, a group of resourceful good guys, an antagonist capable of terrible violence. It also speaks to the powerful allure of fiction, of how a great story can capture someone’s imagination and make him or her see the world in a completely different way. Prolific and personable King seems to be the complete antithesis of the reclusive, withholding John Rothstein. That’s something for which his legions of adoring fans can be extremely thankful, as they await the wrap-up to this intriguing trilogy of hard-boiled thrillers. Mike Berry is a freelance writer. Share
// Will place a marker on either the project (if META-INF exist but not a MANIFEST.MF) or on the MANIFEST.MF file with incorrect casing. private void validateManifestCasing(IProject project) { IFolder manifestFolder = PDEProject.getMetaInf(project); if (manifestFolder.exists()) { try { manifestFolder.deleteMarkers(PDEMarkerFactory.MARKER_ID, false, IResource.DEPTH_ONE); } catch (CoreException e1) { } if (PDEProject.getManifest(project).exists()) return; IPath location = manifestFolder.getLocation(); if (location != null) { File metaFolder = location.toFile(); String[] fileList = metaFolder.list(new ManifestFilter()); if (fileList == null || fileList.length == 0) { try { IMarker marker = project.createMarker(PDEMarkerFactory.MARKER_ID); marker.setAttribute(IMarker.SEVERITY, IMarker.SEVERITY_ERROR); marker.setAttribute(IMarker.MESSAGE, PDECoreMessages.ManifestConsistencyChecker_manifestDoesNotExist); } catch (CoreException e) { } } else { for (int i = 0; i < fileList.length; i++) { String fileName = fileList[i]; IFile currentFile = manifestFolder.getFile(fileName); try { IMarker marker = currentFile.createMarker(PDEMarkerFactory.MARKER_ID); marker.setAttribute(IMarker.SEVERITY, IMarker.SEVERITY_ERROR); marker.setAttribute(IMarker.MESSAGE, PDECoreMessages.ManifestConsistencyChecker_manifestMisspelled); } catch (CoreException e) { } } } } } }
import * as React from "react"; import * as Loadable from "react-loadable"; const Loading = (): React.ReactElement => <span>Loadong</span>; export const List = Loadable({ loader: () => import(/* webpackChunkName: "lists" */ "./list-constainer"), loading: Loading, }); export const User = Loadable({ loader: () => import(/* webpackChunkName: "register" */ "./users-container"), loading: Loading, }); export const Lists = Loadable({ loader: () => import(/* webpackChunkName: "LandingPage" */ "./lists-constainer"), loading: Loading, });
import torch from torch import nn import torch.nn.functional as F import torch from torch import nn, einsum import numpy as np from einops import rearrange, repeat class h_sigmoid(nn.Module): def __init__(self, inplace=True): super(h_sigmoid, self).__init__() self.relu = nn.ReLU6(inplace=inplace) def forward(self, x): return self.relu(x + 3) / 6 class h_swish(nn.Module): def __init__(self, inplace=True): super(h_swish, self).__init__() self.sigmoid = h_sigmoid(inplace=inplace) def forward(self, x): return x * self.sigmoid(x) class CoordAtt(nn.Module): def __init__(self, inp, oup, reduction=32): super(CoordAtt, self).__init__() self.pool_h = nn.AdaptiveAvgPool2d((None, 1)) self.pool_w = nn.AdaptiveAvgPool2d((1, None)) mip = max(8, inp // reduction) self.conv1 = nn.Conv2d(inp, mip, kernel_size=1, stride=1, padding=0) self.bn1 = nn.BatchNorm2d(mip) self.act = h_swish() self.conv_h = nn.Conv2d(mip, oup, kernel_size=1, stride=1, padding=0) self.conv_w = nn.Conv2d(mip, oup, kernel_size=1, stride=1, padding=0) def forward(self, x): identity = x n, c, h, w = x.size() x_h = self.pool_h(x) x_w = self.pool_w(x).permute(0, 1, 3, 2) y = torch.cat([x_h, x_w], dim=2) y = self.conv1(y) y = self.bn1(y) y = self.act(y) x_h, x_w = torch.split(y, [h, w], dim=2) x_w = x_w.permute(0, 1, 3, 2) a_h = self.conv_h(x_h).sigmoid() a_w = self.conv_w(x_w).sigmoid() out = identity * a_w * a_h return out class EncoderBlock(nn.Module): def __init__(self, in_channels, out_channels): super(EncoderBlock, self).__init__() self.pad1 = nn.ReflectionPad2d(3//2) self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size=3) self.pad2 = nn.ReflectionPad2d(3 // 2) self.conv2 = nn.Conv2d(out_channels, out_channels, kernel_size=3) self.bn1 = nn.BatchNorm2d(out_channels) self.elu1 = nn.ELU() self.pad3 = nn.ReflectionPad2d(3 // 2) self.conv3 = nn.Conv2d(out_channels, out_channels, kernel_size=3, stride=2) self.bn2 = nn.BatchNorm2d(out_channels) self.elu2 = nn.ELU() def forward(self, x): x=self.pad1(x) x=self.conv1(x) return self.elu2(self.bn2(self.conv3(self.pad3(self.elu1(self.bn1(self.conv2(self.pad2(x)))))))) class DecoderBlock(nn.Module): def __init__(self, in_channels, out_channels): super(DecoderBlock, self).__init__() self.bn0 = nn.BatchNorm2d(out_channels) self.pad1 = nn.ReflectionPad2d(3//2) self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size=3) self.pad2 = nn.ReflectionPad2d(3 // 2) self.conv2 = nn.Conv2d(out_channels, out_channels, kernel_size=3) self.bn1 = nn.BatchNorm2d(out_channels) self.elu1 = nn.ELU() self.pad3 = nn.ReflectionPad2d(3 // 2) self.conv3 = nn.Conv2d(out_channels, out_channels, kernel_size=3) self.bn2 = nn.BatchNorm2d(out_channels) self.elu2 = nn.ELU() def forward(self, x): x = F.interpolate(x, scale_factor=2, mode='bicubic', align_corners=False) return self.elu2(self.bn2(self.conv3(self.pad3(self.elu1(self.bn1(self.conv2(self.pad2(self.conv1(self.pad1(x)))))))))) class ZipperNet(nn.Module): def __init__(self): super(ZipperNet, self).__init__() depth = 1 self.encoderblockx1 = EncoderBlock(depth, 8) self.encoderblocky1 = EncoderBlock(depth, 8) self.encoderblock2 = EncoderBlock(8, 16) self.encoderblock3 = EncoderBlock(16, 32) self.encoderblock4 = EncoderBlock(32, 64) self.encoderblock5 = EncoderBlock(64, 128) self.decoderblock1 = DecoderBlock(128, 64) self.decoderblock2 = DecoderBlock(64*2, 32) self.decoderblock3 = DecoderBlock(32*2, 16) self.decoderblock4 = DecoderBlock(16*2, 8) self.decoderblock5 = DecoderBlock(8*2, 8) self.CoordAtt = CoordAtt(256,256) self.conv1x1_1 = nn.Conv2d(16, 8, 1) self.conv1x1_2 = nn.Conv2d(32, 16, 1) self.conv1x1_3 = nn.Conv2d(64, 32, 1) self.conv1x1_4 = nn.Conv2d(128, 64, 1) self.conv1x1_5 = nn.Conv2d(256, 128, 1) self.conv3x3 = nn.Conv2d(8,depth,3) self.sigmoid = nn.Sigmoid() def forward(self,x,y): layerx1 = self.encoderblockx1(x) layery1 = self.encoderblocky1(y) cat1 = self.conv1x1_1(torch.cat([layerx1,layery1],1)) layerx2 = self.encoderblock2(layerx1+layery1) layery2 = self.encoderblock2(layery1) cat2 = self.conv1x1_2(torch.cat([layerx2, layery2], 1)) layerx3 = self.encoderblock3(layerx2) layery3 = self.encoderblock3(layery2+layerx2) cat3 = self.conv1x1_3(torch.cat([layerx3, layery3], 1)) layerx4 = self.encoderblock4(layerx3+layery3) layery4 = self.encoderblock4(layery3) cat4 = self.conv1x1_4(torch.cat([layerx4, layery4], 1)) layerx5 = self.encoderblock5(layerx4) layery5 = self.encoderblock5(layery4+layerx4) up1 = self.decoderblock1(self.conv1x1_5(torch.cat([layerx5, layery5], 1))) up2 = self.decoderblock2(torch.cat([up1, cat4], 1)) up3 = self.decoderblock3(torch.cat([up2, cat3], 1)) up4 = self.decoderblock4(torch.cat([up3, cat2], 1)) output = self.decoderblock5(torch.cat([up4, cat1], 1)) output = self.conv3x3(output) output = self.sigmoid(output) output = F.interpolate(output, (x.shape[2],x.shape[3]), mode='bicubic', align_corners=False) return output
You can get much bigger inverters on 24V or 48V than 12V. There are a number advantages in opting for a higher DC supply voltage. – For any given load, half the DC current and losses are down by ¼. Reduced fire risk. – Better input regulation. 0.5v line drop at 12v = 4.6% supply drop whereas 0.25 v line drop at 24v = 1.04% drop. – Better inverter efficiency and regulation. i.e. Less losses converting to 240 VAC. An inverter doesn’t have to work as hard to keep its AC output constant. – Larger usable operating voltage window (for acceptable DOD) available from batteries. Up to 3kW max demand a quaility 24 volt inverter would still be ok.. the rule of thumb is max current demand from inverter should not be over 120-140amps. If over 3KW indeed go for 48 volt system. 150 amps is the limit for cost effective cabling, switching, breakers & fusing. In short, your energy consumption should determine the voltage of your power system. You should not have continuous currents greater than 100 Amp. Power – Current – Voltage 1,000 Watt = 83 Amp @ 12 Volt 2,000 Watt = 83 Amp @ 24 Volt 4,000 Watt = 83 Amp @ 48 Volt 20,000 Watt = 83 Amp @ 230 Volt The higher the current (measured in Ampere or Amps) the bigger the components need to be. High currents require large diameter cables and fuses, both of which are expensive. By doubling the voltage you get double the power (Watt) at the same current. Dealing with currents over 100A is costly (and therefore inefficient) and potentially dangerous. A perspective: a standard household extension cord is rated at 10A max. current. 100A would probably melt it and could start a fire! Industry Standard 12 Volt used to be a standard for extra low voltage power systems. Today, most systems are 24V or 48V and include a 230V AC inverter. This means the wiring of the house does not have to be different from any other grid-connected household and cabling cost is greatly reduced. We advise that you get an electrician to wire your house for 230V AC. This way you can use standard AC appliances and lighting, most of which are a lot cheaper to buy and many are becoming quite efficient. System Size In the past we tried to reduce the cost of an off-grid system by limiting its size. This was achieved by using 12V or 24V appliances & lighting that do not require an inverter. In recent years, inverters and solar panels have become more efficient and a lot more affordable. In addition, most customers seem to want more power over the years. A 12V DC system with a tiny inverter is difficult if not impossible to upgrade/upsize. Not to mention that only very few companies sell extra low voltage appliances or lighting. another calculator online: see http://www.rapidtables.com/ca lc/electric/ohms-law-calculator.htm It’s mostly a matter of distribution. You can use smaller cable to distribute the same power using higher voltage. So, if you have a big DC motor to run, or want to run something a long way away from the batteries, 24v will allow you to get power there using smaller cable. In some installations this can be a big money saver, or allow larger services, but it’s not always better. You will also need more batteries with higher voltage, so it’s not a magic bullet for a less expensive system.
package com.jungdam.member.domain; import com.jungdam.common.domain.BaseEntity; import com.jungdam.member.domain.vo.Avatar; import com.jungdam.member.domain.vo.Email; import com.jungdam.member.domain.vo.Nickname; import com.jungdam.member.domain.vo.ProviderType; import com.jungdam.member.domain.vo.Role; import com.jungdam.member.domain.vo.Status; import java.util.Objects; import javax.persistence.Column; import javax.persistence.Embedded; import javax.persistence.Entity; import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; @Entity public class Member extends BaseEntity { @Id @Column(name = "member_id") @GeneratedValue(strategy = GenerationType.IDENTITY) private Long id; @Embedded private Email email; @Embedded private Nickname nickname; @Embedded private Avatar avatar; @Column(name = "member_role") @Enumerated(EnumType.STRING) private Role role; @Column(name = "member_status") @Enumerated(value = EnumType.STRING) private Status status; @Column(name = "member_provider_type") @Enumerated(EnumType.STRING) private ProviderType providerType; @Column(name = "member_oauth_permission", unique = true) private String oauthPermission; protected Member() { } public Member( String oauthPermission, Nickname nickname, Email email, Avatar avatar, ProviderType providerType ) { this.oauthPermission = oauthPermission; this.nickname = nickname; this.email = email; this.avatar = avatar; this.providerType = providerType; this.role = Role.USER; this.status = Status.FREE; } public static MemberBuilder builder() { return new Member.MemberBuilder(); } public void update(Nickname nickname, Avatar avatar) { this.nickname = nickname; this.avatar = avatar; } public String getEmailValue() { return email.getEmail(); } public String getNicknameValue() { return nickname.getNickname(); } public String getAvatarValue() { return avatar.getAvatar(); } public String getRoleValue() { return role.getRole(); } public Email getEmail() { return email; } public Nickname getNickname() { return nickname; } public Avatar getAvatar() { return avatar; } public Role getRole() { return role; } public ProviderType getProviderType() { return providerType; } public Long getId() { return id; } public String getOauthPermission() { return oauthPermission; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Member member = (Member) o; return Objects.equals(id, member.id) && Objects.equals(email, member.email) && Objects.equals(nickname, member.nickname) && Objects.equals(avatar, member.avatar) && role == member.role && status == member.status && providerType == member.providerType && Objects.equals(oauthPermission, member.oauthPermission); } @Override public int hashCode() { return Objects.hash(id, email, nickname, avatar, role, status, providerType, oauthPermission); } public static class MemberBuilder { private String oauthPermission; private Nickname nickname; private Email email; private Avatar avatar; private ProviderType providerType; private MemberBuilder() { } public MemberBuilder oauthPermission(final String oauthPermission) { this.oauthPermission = oauthPermission; return this; } public MemberBuilder nickname(final Nickname nickname) { this.nickname = nickname; return this; } public MemberBuilder email(final Email email) { this.email = email; return this; } public MemberBuilder avatar(final Avatar avatar) { this.avatar = avatar; return this; } public MemberBuilder providerType(ProviderType providerType) { this.providerType = providerType; return this; } public Member build() { return new Member(this.oauthPermission, this.nickname, this.email, this.avatar, this.providerType); } } }
<reponame>jongsuk0214/react-shopping-cart const StarButton = () => ( <div className="star-button-container"> <p> <small>Leave a star on Github if this repository was useful :)</small> </p> <a className="github-button" href="https://github.com/jeffersonRibeiro/react-shopping-cart" data-icon="octicon-star" data-size="large" data-show-count="true" aria-label="Star jeffersonRibeiro/react-shopping-cart on GitHub" tabIndex={-1} > Star </a> </div> ); export default StarButton;
/** * Back port of JSR-203 from Java Platform, Standard Edition 7. * @see <a href="http://docs.oracle.com/javase/7/docs/api/java/nio/file/Paths.html">Original JavaDoc</a> */ public final class Paths { private Paths() { } /** * @throws IllegalArgumentException * @see <a href="http://docs.oracle.com/javase/7/docs/api/java/nio/file/Paths.html#get(java.lang.String, java.lang.String...)">JDK JavaDoc</a> */ public static Path get(final String first, final String... more) throws IllegalArgumentException { checkNotNull("first", first); if (first.trim().length() == 0) { return FileSystems.getDefault().getPath(first); } URI uri = null; if (more == null || more.length == 0) { try { uri = new URI(first); } catch (URISyntaxException ex) { try { uri = URI.create(first); } catch (IllegalArgumentException e) { uri = null; } } } if (uri != null && uri.getScheme() != null) { return get(uri); } return FileSystems.getDefault().getPath(first, more); } /** * @throws IllegalArgumentException * @throws FileSystemNotFoundException * @throws SecurityException * @see <a href="http://docs.oracle.com/javase/7/docs/api/java/nio/file/Paths.html#get(java.net.URI)">JDK JavaDoc</a> */ public static Path get(final URI uri) throws IllegalArgumentException, FileSystemNotFoundException, SecurityException { checkNotNull("uri", uri); return FileSystemProviders.resolveProvider(uri).getPath(uri); } }
/// this does not run any git hooks, git-hooks have to be executed manually, checkout `hooks_commit_msg` for example pub fn commit(repo_path: &RepoPath, msg: &str) -> Result<CommitId> { scope_time!("commit"); let repo = repo(repo_path)?; let signature = signature_allow_undefined_name(&repo)?; let mut index = repo.index()?; let tree_id = index.write_tree()?; let tree = repo.find_tree(tree_id)?; let parents = if let Ok(id) = get_head_repo(&repo) { vec![repo.find_commit(id.into())?] } else { Vec::new() }; let parents = parents.iter().collect::<Vec<_>>(); Ok(repo .commit( Some("HEAD"), &signature, &signature, msg, &tree, parents.as_slice(), )? .into()) }
package ucla.nesl.notificationpreference.service; import android.util.Log; import android.util.SparseIntArray; import com.google.android.gms.location.ActivityRecognitionResult; import com.google.android.gms.location.DetectedActivity; import com.google.android.gms.location.Geofence; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.concurrent.TimeUnit; import ucla.nesl.notificationpreference.notification.INotificationEventListener; import ucla.nesl.notificationpreference.notification.NotificationHelper; import ucla.nesl.notificationpreference.notification.enums.NotificationEventType; import ucla.nesl.notificationpreference.sensing.RingerModeDataCollector; import ucla.nesl.notificationpreference.sensing.ScreenStatusDataCollector; import ucla.nesl.notificationpreference.sensing.location.LocationDataCollector; import ucla.nesl.notificationpreference.sensing.motion.MotionActivityDataCollector; import ucla.nesl.notificationpreference.task.scheduler.PeriodicTaskScheduler; import ucla.nesl.notificationpreference.utils.HashUtils; import ucla.nesl.notificationpreference.utils.If; import ucla.nesl.notificationpreference.utils.Utils; /** * Created by timestring on 6/14/18. * * `SensorMaster` has a pool of data collectors and focus on sensor data handling, especially some * data collectors are push-based (a callback is required) while others are pull-based. * `SensorMaster` provides a nice interface to `TaskSchedulingService` to summarize what the current * state is. */ public class SensorMaster { private static final String TAG = PeriodicTaskScheduler.class.getSimpleName(); private MotionActivityDataCollector motionActivityDataCollector; private LocationDataCollector locationDataCollector; private RingerModeDataCollector ringerModeDataCollector; private ScreenStatusDataCollector screenDataCollector; // motion activity related private static final int[] MOTION_ACTIVITY_OF_INTEREST = { DetectedActivity.STILL, DetectedActivity.WALKING, DetectedActivity.RUNNING, DetectedActivity.IN_VEHICLE, DetectedActivity.ON_BICYCLE }; private String lastMotionActivity = motionActivityTypeToString(DetectedActivity.STILL); private SparseIntArray motionConfidenceSum = new SparseIntArray(); private int motionActivityCount; // location geofence related private HashMap<String, Long> geofenceEnteredTimestamp = new HashMap<>(); // notification status related private NotificationHelper notificationHelper; private long lastNotificationTime; //region Section: Master-level control // ============================================================================================= public SensorMaster(TaskSchedulingService service) { motionActivityDataCollector = new MotionActivityDataCollector( service, motionActivityCallback); locationDataCollector = new LocationDataCollector(service, geofenceCallback); ringerModeDataCollector = new RingerModeDataCollector(service); screenDataCollector = new ScreenStatusDataCollector(service); // initialize notification status related notificationHelper = new NotificationHelper(service, false, notificationEventListener); lastNotificationTime = 0L; } public void start() { resetMotionScore(); resetGeofenceStatus(); motionActivityDataCollector.start(); locationDataCollector.start(); } public void stop() { motionActivityDataCollector.stop(); locationDataCollector.stop(); } public String getStateMessageAndReset() { String percentageOfDay = String.valueOf(getPercentageOfTheDay()); String percentageOfWeek = String.valueOf(getPercentageOfTheWeek()); String motionActivity = determineMotionTypeWithinCurrentWindow(); String location = determineCurrentPlace(); String notificationTimeElapsed = String.valueOf(getLastNotificationElapsedTimeInMinute()); String ringerMode = ringerModeDataCollector.query(); String screenStatus = screenDataCollector.query(); resetMotionScore(); return Utils.stringJoin(",", percentageOfDay, percentageOfWeek, motionActivity, location, notificationTimeElapsed, ringerMode, screenStatus); } //endregion //region Section: Callbacks // ============================================================================================= private MotionActivityDataCollector.Callback motionActivityCallback = new MotionActivityDataCollector.Callback() { @Override public void onMotionActivityResult(ActivityRecognitionResult result) { for (int motionActivityType : MOTION_ACTIVITY_OF_INTEREST) { HashUtils.addAssign(motionConfidenceSum, motionActivityType, result.getActivityConfidence(motionActivityType)); } motionActivityCount++; } }; private LocationDataCollector.Callback geofenceCallback = new LocationDataCollector.Callback() { @Override public void onGeofenceResult(int geofenceTransitionType, String placeCode) { switch (geofenceTransitionType) { case Geofence.GEOFENCE_TRANSITION_ENTER: geofenceEnteredTimestamp.put(placeCode, System.currentTimeMillis()); break; case Geofence.GEOFENCE_TRANSITION_EXIT: geofenceEnteredTimestamp.put(placeCode, null); break; } } }; private INotificationEventListener notificationEventListener = new INotificationEventListener() { @Override public void onNotificationEvent(int notificationID, NotificationEventType event) { if (event == NotificationEventType.CREATED) { lastNotificationTime = System.currentTimeMillis(); } } }; //endregion //region Section: Time/day computation // ============================================================================================= private double getPercentageOfTheWeek() { Calendar calendar = GregorianCalendar.getInstance(); calendar.setTime(new Date()); int dayOfWeek = calendar.get(Calendar.DAY_OF_WEEK) - Calendar.SUNDAY; return ((double) dayOfWeek + getPercentageOfTheDay()) / 7.0; } private double getPercentageOfTheDay() { Calendar calendar = GregorianCalendar.getInstance(); calendar.setTime(new Date()); long secondsOfTheDay = TimeUnit.HOURS.toSeconds(calendar.get(Calendar.HOUR_OF_DAY)) + TimeUnit.MINUTES.toSeconds(calendar.get(Calendar.MINUTE)) + calendar.get(Calendar.SECOND); return (double) secondsOfTheDay / TimeUnit.DAYS.toSeconds(1); } //endregion //region Section: Motion activity computation // ============================================================================================= private void resetMotionScore() { for (int motionActivityType : MOTION_ACTIVITY_OF_INTEREST) { motionConfidenceSum.put(motionActivityType, 0); } motionActivityCount = 0; } private String motionActivityTypeToString(int type) { switch (type) { case DetectedActivity.STILL: return "still"; case DetectedActivity.WALKING: return "walking"; case DetectedActivity.RUNNING: return "running"; case DetectedActivity.IN_VEHICLE: return "driving"; case DetectedActivity.ON_BICYCLE: return "biking"; default: throw new IllegalArgumentException("Type not in list"); } } private String determineMotionTypeWithinCurrentWindow() { if (motionActivityCount == 0) { Log.w(TAG, "No motion activity samples in the previous window"); return lastMotionActivity; } else { return motionActivityTypeToString(HashUtils.argMax(motionConfidenceSum)); } } //endregion //region Section: Geofence status computation // ============================================================================================= private void resetGeofenceStatus() { geofenceEnteredTimestamp.put(LocationDataCollector.PLACE_LABEL_HOME, null); geofenceEnteredTimestamp.put(LocationDataCollector.PLACE_LABEL_WORK, null); } private String determineCurrentPlace() { return If.nullThen(HashUtils.argMax(geofenceEnteredTimestamp), "others"); } //endregion //region Section: Notification elapsed time // ============================================================================================= private double getLastNotificationElapsedTimeInMinute() { double timeElapsed = (double) TimeUnit.MILLISECONDS.toMinutes( System.currentTimeMillis() - lastNotificationTime); return Math.min(Math.max(timeElapsed, 0.0), (double) TimeUnit.DAYS.toMinutes(1)); } //endregion }
/** * Updates the position of the Soldier to move it towards pt. No collision. * * @param pt */ public void moveTowards(GridPoint pt) { if (!solved && !foundWall && !waiting && !pt.equals(grid.getLocation(this))) { waiting = false; if (xSoldier > pt.getX()) { xSoldier -= velocitySoldier; } else if (xSoldier < pt.getX()) { xSoldier += velocitySoldier; } if (ySoldier > pt.getY()) { ySoldier -= velocitySoldier; } else if (ySoldier < pt.getY()) { ySoldier += velocitySoldier; } space.moveTo(this, xSoldier, ySoldier); grid.moveTo(this, (int) xSoldier, (int) ySoldier); grid.moveTo(this, (int) xSoldier, (int) ySoldier); } else if (foundWall && !solved && !waiting) { waiting = true; inPosition = false; askForHelp(); } else if (foundWall && solved && waiting) { solved = false; foundWall = false; waiting = false; destroyWall(pt); } else { solved = false; foundWall = false; waiting = false; inPosition = true; analyseArea(pt); } }
<filename>ccudata/src/main/java/org/zankio/ccudata/ecourse/source/local/DatabaseBaseSource.java<gh_stars>1-10 package org.zankio.ccudata.ecourse.source.local; import android.content.Context; import android.database.sqlite.SQLiteDatabase; import org.zankio.ccudata.base.Repository; import org.zankio.ccudata.base.source.BaseSource; import org.zankio.ccudata.ecourse.database.EcourseDatabaseHelper; public abstract class DatabaseBaseSource<TArgument, TData> extends BaseSource<TArgument, TData> { private static EcourseDatabaseHelper databaseHelper; private static SQLiteDatabase database; protected DatabaseBaseSource(Repository context) { setContext(context); if (databaseHelper == null) databaseHelper = new EcourseDatabaseHelper(context.getContext()); } protected SQLiteDatabase getDatabase() { if (database == null) database = databaseHelper.getWritableDatabase(); return database; } public static void clearData(Context context) { if (databaseHelper == null) databaseHelper = new EcourseDatabaseHelper(context); if (database == null) database = databaseHelper.getWritableDatabase(); database.delete(EcourseDatabaseHelper.TABLE_ECOURSE_ANNOUNCE, null, null); database.delete(EcourseDatabaseHelper.TABLE_ECOURSE_SCORE, null, null); database.delete(EcourseDatabaseHelper.TABLE_ECOURSE, null, null); database.delete(EcourseDatabaseHelper.TABLE_ECOURSE_CLASSMATE, null, null); } }
<filename>think-generator-core/src/main/java/io/github/thinkframework/generator/core/configuration/package-info.java /** * 配置文件 * @author hdhxby * @see org.springframework.beans.factory.xml.NamespaceHandler */ package io.github.thinkframework.generator.core.configuration;
import {Parser} from '@parsify/math'; import pMemoize from 'p-memoize'; import {fetcher} from './utils/fetcher'; const memFetcher = pMemoize(fetcher); export default (parser: Parser) => async (expression: string): Promise<string> => { const data = await memFetcher(); parser.set('confirmed', data.results[0].total_cases); parser.set('recoveries', data.results[0].total_recovered); parser.set('deaths', data.results[0].total_deaths); return expression; };
/** * Automatically generated by gen_l10n_data.py * @packageDocumentation * @internal */ /* Unicode CLDR Version 37, retrieved 2021-06-29 */ export * from './root'; export * from './af'; export * from './am'; export * from './ar'; export * from './as'; export * from './az'; export * from './be'; export * from './bg'; export * from './bn'; export * from './bs'; export * from './ca'; export * from './cs'; export * from './cy'; export * from './da'; export * from './de'; export * from './el'; export * from './en'; export * from './es'; export * from './et'; export * from './eu'; export * from './fa'; export * from './fi'; export * from './fil'; export * from './fr'; export * from './ga'; export * from './gl'; export * from './gu'; export * from './he'; export * from './hi'; export * from './hr'; export * from './hu'; export * from './hy'; export * from './id'; export * from './is'; export * from './it'; export * from './ja'; export * from './jv'; export * from './ka'; export * from './kk'; export * from './km'; export * from './kn'; export * from './ko'; export * from './ky'; export * from './lo'; export * from './lt'; export * from './lv'; export * from './mk'; export * from './ml'; export * from './mn'; export * from './mr'; export * from './ms'; export * from './my'; export * from './nb'; export * from './ne'; export * from './nl'; export * from './or'; export * from './pa'; export * from './pl'; export * from './ps'; export * from './pt'; export * from './ro'; export * from './ru'; export * from './sd'; export * from './si'; export * from './sk'; export * from './sl'; export * from './so'; export * from './sq'; export * from './sr'; export * from './sv'; export * from './sw'; export * from './ta'; export * from './te'; export * from './th'; export * from './tk'; export * from './tr'; export * from './uk'; export * from './ur'; export * from './uz'; export * from './vi'; export * from './yue'; export * from './zh'; export * from './zu';
At first glance, it is easy to believe that programming as a profession is one which is both in rude health, and for which the future is incredibly bright. Increased automation, the mind bending world of machine learning, and the ever more intuitive ways in which software impacts our lives all suggest that programming is the career to be in, and one of the few careers which one can safely guarantee will still be around in 50 years irrespective of automation or many of the other issues that threaten the future workforce. Many thousands of people have heeded the call. An entire industry has been rapidly built around getting budding young developers “job ready” in 12 weeks. The idea, in my experience as a student of an early self driven remote course, is to provide fertile young minds with just enough Rails/JS knowledge to get them through a technical interview, and that’s about it. These businesses thrive on selling dreams of working for Google and Facebook and usually profit handsomely both at point of enrolment and graduation (when finding their students jobs). But this is not yet another blog post criticising the commoditisation of young programmers, as that topic has been explored to a large degree already. The problem comes when these budding young developers hit the jobs board. The halcyon days where a developer could simply get stuck in with a particular language, whether on the front or the backend, are gone. The definition of a full stack developer is somewhat vague and the requirements for the role depends entirely on who you talk to. A better place to start may well be to ask the question: what makes a good professional developer, full stop? The short answer is clear: a professional developer produces good quality code on a regular basis. It is a much more complex question as to how said developer can achieve this. It's not enough to be a savant about a given language or framework, due to the fact that this does not help with strategic decisions and technology moves at such a pace that such knowledge in and of itself may soon be useless (who's hiring Flash developers?). Knowing the plethora of buzzwords of design and architecture is all well and good in a theoretical sense, but it does little to help with a concrete implementation. Understanding design patterns is frequently cited as good advice for budding developers, but that again is not sufficient, both due to the fact that they focus on particular challenges and are frequently misused but also due to the fact that the framework that you use often makes virtually all of these decisions for you. Instead, a good professional developer must have an understanding on all of these areas (in their mother tongue), alongside many others. The code which appears on the IDE is simply the culmination of this work and the considerations of dozens of technical details, which are often interconnected in tenuous ways. And then you add to this the full stack, beyond one's mother tongue: The phrase "a jack of all trades is master of none" can ring true here. Whilst nobody would be expected to know all of the items in this list, understanding one from each row would certainly be a prerequisite to being considered a "senior" full stack developer. This is the stack as of today, and every year there are additional prerequisites added. There is also a clear opportunity cost when you deviate from your core competency to learn something new. Whilst some of these skills are trivial to learn, fully appreciating the idiosyncracies of any major programming language or framework can take years. So where did this idea of a full stack developer come from? Facebook of all places seems to have provided the genesis of this idea, or more specifically a Facebook engineer named Carlos Bueno. At the time this was written, Facebook only employed full stack developers which makes a lot more sense when you view it within the context of its time. It had a relatively simple PHP backend and did not have the massive technical demands that it has now. Early iterations of Facebook certainly did not require 2-3 years of professional front end design skills. Personally I think the idea of the full stack developer comes from the age old idea of the 10x developer, who have come to represent the Ark of the Covenant for startups and smaller businesses that cannot afford to hire specialised developers for every aspect of the delivery of a web application. The two terms seem to be used as synonyms for each other, but I think the idea that is hidden beneath all of the advertising is that companies want to hire super effective engineers. I would argue that it is significantly counter productive to developing such engineers by hiring them for these roles. Excellent full stack developers do exist (I work with several), but very few of them started out as such. The conventional wisdom is that a good programmer is a good programmer irrespective of language, but as the programming world splinters into ever more complex language combinations, frameworks and even programming paradigms (functional anyone?), it is perhaps pertinent to take a moment to sit and consider the best way of acquiring good problem solving skills. Whilst critical thinking and problem solving skills are developed, are we helped by having to learn to understand the intricacies of Chef? What makes this even worse is that often the companies who are seeking full stack developers most ardently are startups where you can add the specs for a project manager to the list of requirements. But what is happening in programming is symptomatic of a wider cultural shift within the workforce. Employers want full stack employees, because why hire dozens of people if you can get one person to do all of their jobs to a higher degree? If a brilliant full stack developer exists, as defined in Bueno's piece, are they achieving maximum utility by even writing code? Someone of that talent, with that level of expertise across the stack should sit in a CTO role rather than making rudimentary code changes in the trenches. For the avoidance of doubt, I am not advocating siloed programming where everyone is ignorant of the rest of the stack and what the rest of the team is working on. The path to being successful at anything is in knowing what you know, knowing it well, and more importantly knowing what you don't know and knowing where to improve on this. Being thrown into the deep end of development by having to learn 8 (possibly more) disciplines simulataneously is not the way to do this. The underlying truth is that there are not enough unicorns in the forest to fill all of these roles to the level at which they have been advertised and this makes a large amount of job "vacancies" permanently unfilled, which in turn pushes more people into the cycle of thinking there are millions of tech jobs available. Instead we should focus on allowing our junior developers to grow and to develop their core competencies before branching out into the various layers of the stack. A key part of a developer's growth is confidence, generated via a series of "wins", but that is difficult to achieve when you are seeking wins in sometimes drastically different areas. As a case in point, Chef is written almost entirely in Ruby, but trying to navigate the source code as a novice programmer is a project in and of itself. The most important skill in programming is learning to learn, and that can only be improved upon by learning one thing and learning it well. The best programmers I know have a rapacious thirst for knowledge, which is built on a solid bedrock of understanding of the fundamentals they are using. It's difficult to have that when you are standing on 8 separate bedrocks each made of sand.
//this functions clears all active slots of the array void free_entire_cache() { while(m_entries.size()) { if ((*this)[m_entries.back()]) (*this)[m_entries.back()]->free(); (*this)[m_entries.back()]=0; m_entries.pop_back(); } }
// init registers a scheme to defaultScheme. func init() { utilruntime.Must(clientgoscheme.AddToScheme(defaultScheme)) utilruntime.Must(apiextensionsv1.AddToScheme(defaultScheme)) utilruntime.Must(apiextensionsv1beta1.AddToScheme(defaultScheme)) }
import { observer } from "mobx-react-lite"; import * as React from "react"; import Guide from "~/components/Guide"; import Modal from "~/components/Modal"; import useStores from "~/hooks/useStores"; function Dialogs() { const { dialogs } = useStores(); const { guide, modalStack } = dialogs; return ( <> {guide ? ( <Guide isOpen={guide.isOpen} onRequestClose={dialogs.closeGuide} title={guide.title} > {guide.content} </Guide> ) : undefined} {[...modalStack].map(([id, modal]) => ( <Modal key={id} isOpen={modal.isOpen} isCentered={modal.isCentered} onRequestClose={() => dialogs.closeModal(id)} title={modal.title} > {modal.content} </Modal> ))} </> ); } export default observer(Dialogs);
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for variable store.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import gc import threading from absl.testing import parameterized import numpy from tensorflow.python.eager import context from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import test_util from tensorflow.python.keras import combinations from tensorflow.python.keras import regularizers from tensorflow.python.keras.legacy_tf_layers import core as core_layers from tensorflow.python.keras.legacy_tf_layers import variable_scope_shim from tensorflow.python.ops import array_ops from tensorflow.python.ops import init_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import nn_ops from tensorflow.python.ops import state_ops from tensorflow.python.ops import variable_scope from tensorflow.python.ops import variables as variables_lib from tensorflow.python.platform import test def run_inside_wrap_function_in_eager_mode(graph_function): """Decorator to execute the same graph code in eager and graph modes. In graph mode, we just execute the graph_function passed as argument. In eager mode, we wrap the function using wrap_function and then execute the wrapped result. Args: graph_function: python function containing graph code to be wrapped Returns: decorated function """ def wrap_and_execute(self): tracker = variable_scope_shim.VariableAndLossTracker() with tracker.scope(): # use the original function graph_function(self) return wrap_and_execute class VariableScopeTest(test.TestCase): def tearDown(self): gc.collect() # This will only contain uncollectable garbage, i.e. reference cycles # involving objects with __del__ defined. self.assertEqual(0, len(gc.garbage)) @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testGetVar(self): vs = variable_scope._get_default_variable_store() v = vs.get_variable("v", [1]) v1 = vs.get_variable("v", [1]) self.assertIs(v, v1) @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testNameExists(self): vs = variable_scope._get_default_variable_store() # No check by default, so we can both create and get existing names. v = vs.get_variable("v", [1]) v1 = vs.get_variable("v", [1]) self.assertIs(v, v1) self.assertIsNot(v, vs.get_variable("u", [1], reuse=False)) @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testNamelessStore(self): vs = variable_scope._get_default_variable_store() vs.get_variable("v1", [2]) vs.get_variable("v2", [2]) expected_names = ["%s:0" % name for name in ["v1", "v2"]] self.assertEqual( set(expected_names), set(v.name for v in vs._vars.values())) # TODO(mihaimaruseac): Not converted to use wrap_function because of # TypeError: Expected tf.group() expected Tensor arguments not 'None' with # type '<type 'NoneType'>' @test_util.run_in_graph_and_eager_modes def testVarScopeInitializer(self): init = init_ops.constant_initializer(0.3) with variable_scope.variable_scope("tower0") as tower: with variable_scope.variable_scope("foo", initializer=init): v = variable_scope.get_variable("v", []) self.evaluate(variables_lib.variables_initializer([v])) self.assertAllClose(self.evaluate(v.value()), 0.3) with variable_scope.variable_scope(tower, initializer=init): w = variable_scope.get_variable("w", []) self.evaluate(variables_lib.variables_initializer([w])) self.assertAllClose(self.evaluate(w.value()), 0.3) @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testVarScopeConstraint(self): constraint = lambda x: 0. * x with variable_scope.variable_scope("tower1") as tower: with variable_scope.variable_scope("foo", constraint=constraint): v = variable_scope.get_variable("v", []) self.assertIsNotNone(v.constraint) with variable_scope.variable_scope(tower, constraint=constraint): w = variable_scope.get_variable("w", []) self.assertIsNotNone(w.constraint) @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testVarScopeDType(self): with variable_scope.variable_scope("tower2") as tower: with variable_scope.variable_scope("foo", dtype=dtypes.float16): v = variable_scope.get_variable("v", []) self.assertEqual(v.dtype.base_dtype, dtypes.float16) with variable_scope.variable_scope(tower, dtype=dtypes.float16): w = variable_scope.get_variable("w", []) self.assertEqual(w.dtype.base_dtype, dtypes.float16) @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testInitFromNonTensorValue(self): v = variable_scope.get_variable("v4", initializer=4, dtype=dtypes.int32) self.evaluate(variables_lib.variables_initializer([v])) self.assertAllClose(self.evaluate(v.value()), 4) w = variable_scope.get_variable( "w4", initializer=numpy.array([1, 2, 3]), dtype=dtypes.int64) self.evaluate(variables_lib.variables_initializer([w])) self.assertAllClose(self.evaluate(w.value()), [1, 2, 3]) # A quirk to be revisited? error = ValueError if context.executing_eagerly() else TypeError with self.assertRaises(error): variable_scope.get_variable("x4", initializer={}) @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testInitFromNonInitializer(self): # Test various dtypes with zeros initializer as following: types = [ dtypes.int8, dtypes.uint8, dtypes.int16, dtypes.uint16, dtypes.int32, dtypes.int64, dtypes.bool ] # Use different variable_name to distinguish various dtypes for (i, dtype) in enumerate(types): x = variable_scope.get_variable( name="xx%d" % i, shape=(3, 4), dtype=dtype) y = variable_scope.get_variable( name="yy%d" % i, shape=(3, 4), dtype=dtype, initializer=init_ops.zeros_initializer(dtype=dtype)) self.evaluate(variables_lib.global_variables_initializer()) self.assertAllEqual(self.evaluate(x.value()), self.evaluate(y.value())) @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testVarScopeRegularizer(self): init = init_ops.constant_initializer(0.3) def regularizer1(v): return math_ops.reduce_mean(v) + 0.1 def regularizer2(v): return math_ops.reduce_mean(v) + 0.2 with variable_scope.variable_scope( "tower3", regularizer=regularizer1) as tower: with variable_scope.variable_scope("foo", initializer=init): v = variable_scope.get_variable("v", []) self.evaluate(variables_lib.variables_initializer([v])) with variable_scope.variable_scope(tower, initializer=init) as vs: variable_scope.get_variable("u", []) vs.set_regularizer(regularizer2) variable_scope.get_variable("w", []) # Next 3 variable not regularized to test disabling regularization. variable_scope.get_variable( "x", [], regularizer=variable_scope.no_regularizer) with variable_scope.variable_scope( "baz", regularizer=variable_scope.no_regularizer): variable_scope.get_variable("y", []) vs.set_regularizer(variable_scope.no_regularizer) variable_scope.get_variable("z", []) @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testInitializeFromValue(self): init = constant_op.constant(0.1) w = variable_scope.get_variable("v", initializer=init) self.evaluate(variables_lib.variables_initializer([w])) self.assertAllClose(self.evaluate(w.value()), 0.1) with self.assertRaisesRegex(ValueError, "shape"): # We disallow explicit shape specification when initializer is constant. variable_scope.get_variable("u", [1], initializer=init) with variable_scope.variable_scope("foo", initializer=init): # Constant initializer can be passed through scopes if needed. v = variable_scope.get_variable("v") self.evaluate(variables_lib.variables_initializer([v])) self.assertAllClose(self.evaluate(v.value()), 0.1) # Check that non-float32 initializer creates a non-float32 variable. init = constant_op.constant(1, dtype=dtypes.int32) t = variable_scope.get_variable("t", initializer=init) self.assertEqual(t.dtype.base_dtype, dtypes.int32) # Raise error if `initializer` dtype and `dtype` are not identical. with self.assertRaisesRegex(ValueError, "don't match"): variable_scope.get_variable("s", initializer=init, dtype=dtypes.float64) @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testVarScopeGetOrCreateReuse(self): with self.cached_session(): def test_value(value): x = constant_op.constant(value) with variable_scope.variable_scope( "testVarScopeGetOrCreateReuse_bar", reuse=variable_scope.AUTO_REUSE): _ = state_ops.assign(variable_scope.get_variable("var", []), x) with variable_scope.variable_scope( "testVarScopeGetOrCreateReuse_bar", reuse=variable_scope.AUTO_REUSE): _ = variable_scope.get_variable("var", []) self.assertEqual(value, self.evaluate(x)) test_value(42.) # Variable is created. test_value(13.) # Variable is reused hereafter. test_value(17.) @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testVarOpScope(self): with self.cached_session(): with ops.name_scope_v2("testVarOpScope1"): with variable_scope.variable_scope("tower", "default", []): self.assertEqual( variable_scope.get_variable("w", []).name, "tower/w:0") with ops.name_scope_v2("testVarOpScope2"): with variable_scope.variable_scope(None, "default", []): self.assertEqual( variable_scope.get_variable("w", []).name, "default/w:0") with variable_scope.variable_scope(None, "default", []): self.assertEqual( variable_scope.get_variable("w", []).name, "default_1/w:0") @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testVarOpScopeUniqueNamesInterleavedSubstringScopes(self): with self.cached_session(): with variable_scope.variable_scope(None, "defaultScope1"): with variable_scope.variable_scope(None, "layer"): self.assertEqual( variable_scope.get_variable("w", []).name, "defaultScope1/layer/w:0") with variable_scope.variable_scope(None, "defaultScope1"): with variable_scope.variable_scope(None, "layer"): self.assertEqual( variable_scope.get_variable("w", []).name, "defaultScope1_1/layer/w:0") with variable_scope.variable_scope(None, "defaultScope"): with variable_scope.variable_scope(None, "layer"): self.assertEqual( variable_scope.get_variable("w", []).name, "defaultScope/layer/w:0") with variable_scope.variable_scope(None, "defaultScope1"): with variable_scope.variable_scope(None, "layer"): self.assertEqual( variable_scope.get_variable("w", []).name, "defaultScope1_2/layer/w:0") @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testVarOpScopeUniqueNamesWithJump(self): with self.cached_session(): with variable_scope.variable_scope("default") as default: with variable_scope.variable_scope(None, "layer"): self.assertEqual( variable_scope.get_variable("w", []).name, "default/layer/w:0") with variable_scope.variable_scope(None, "layer"): self.assertEqual( variable_scope.get_variable("w", []).name, "default/layer_1/w:0") with variable_scope.variable_scope(default): pass # No matter the jump in the middle, unique numbering continues. with variable_scope.variable_scope(None, "layer"): self.assertEqual( variable_scope.get_variable("w", []).name, "default/layer_2/w:0") @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testVarOpScopeReuse(self): with self.cached_session(): with variable_scope.variable_scope("outer") as outer: with variable_scope.variable_scope("tower", "default", []): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/tower/w:0") with variable_scope.variable_scope(None, "default", []): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/default/w:0") with variable_scope.variable_scope(outer, reuse=True) as outer: with variable_scope.variable_scope("tower", "default", []): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/tower/w:0") with variable_scope.variable_scope(None, "default", []): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/default/w:0") @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testVarScopeGetVar(self): with self.cached_session(): with variable_scope.variable_scope("root"): with variable_scope.variable_scope("towerA") as tower_a: va = variable_scope.get_variable("v", [1]) self.assertEqual(va.name, "root/towerA/v:0") with variable_scope.variable_scope(tower_a, reuse=True): va2 = variable_scope.get_variable("v", [1]) self.assertIs(va2, va) with variable_scope.variable_scope("towerB"): vb = variable_scope.get_variable("v", [1]) self.assertEqual(vb.name, "root/towerB/v:0") with variable_scope.variable_scope("towerA", reuse=True): va2 = variable_scope.get_variable("v", [1]) self.assertIs(va2, va) with variable_scope.variable_scope("foo"): with variable_scope.variable_scope("bar"): v = variable_scope.get_variable("v", [1]) self.assertEqual(v.name, "root/foo/bar/v:0") with variable_scope.variable_scope(tower_a, reuse=True): va3 = variable_scope.get_variable("v", [1]) self.assertIs(va, va3) with self.assertRaises(ValueError) as exc: with variable_scope.variable_scope(tower_a, reuse=True): variable_scope.get_variable("v", [2]) # Different shape. self.assertEqual("shape" in str(exc.exception), True) with self.assertRaises(ValueError) as exc: with variable_scope.variable_scope(tower_a, reuse=True): variable_scope.get_variable("v", [1], dtype=dtypes.int32) self.assertEqual("dtype" in str(exc.exception), True) @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testVarScopeOuterScope(self): with self.cached_session(): with variable_scope.variable_scope("outer") as outer: pass with variable_scope.variable_scope(outer): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/w:0") with variable_scope.variable_scope("default"): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/default/w:0") with variable_scope.variable_scope(outer, reuse=True): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/w:0") with variable_scope.variable_scope("default", reuse=True): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/default/w:0") @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testVarScopeNestedOuterScope(self): with self.cached_session(): with variable_scope.variable_scope("outer") as outer: with variable_scope.variable_scope(outer): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/w:0") with variable_scope.variable_scope("default"): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/default/w:0") with variable_scope.variable_scope(outer, reuse=True): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/w:0") with variable_scope.variable_scope("default", reuse=True): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/default/w:0") @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testVarOpScopeReuseParam(self): with self.cached_session(): with variable_scope.variable_scope("outer") as outer: with variable_scope.variable_scope("tower", "default", []): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/tower/w:0") with variable_scope.variable_scope(None, "default", []): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/default/w:0") with variable_scope.variable_scope(outer) as outer: with variable_scope.variable_scope("tower", "default", reuse=True): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/tower/w:0") outer.reuse_variables() with variable_scope.variable_scope(None, "default", []): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/default/w:0") @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testVarOpScopeReuseError(self): with self.cached_session(): with self.assertRaises(ValueError): with variable_scope.variable_scope(None, "default", reuse=True): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/tower/w:0") @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testVarOpScopeOuterScope(self): with self.cached_session(): with variable_scope.variable_scope("outer") as outer: pass with variable_scope.variable_scope(outer, "default", []): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/w:0") with variable_scope.variable_scope(None, "default", []): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/default/w:0") with variable_scope.variable_scope(outer, "default", reuse=True): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/w:0") outer.reuse_variables() with variable_scope.variable_scope(None, "default", []): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/default/w:0") @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testVarOpScopeNestedOuterScope(self): with self.cached_session(): with variable_scope.variable_scope("outer") as outer: with variable_scope.variable_scope(outer, "default", []): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/w:0") with variable_scope.variable_scope(None, "default", []): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/default/w:0") with variable_scope.variable_scope(outer, "default", reuse=True): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/w:0") with variable_scope.variable_scope(None, "default", []): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/default/w:0") @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testBasicWhenAuxiliaryNameScopeIsFalse(self): with self.cached_session(): with variable_scope.variable_scope( "scope", auxiliary_name_scope=False) as scope: self.assertEqual( variable_scope.get_variable("w", []).name, "scope/w:0") with variable_scope.variable_scope(scope, auxiliary_name_scope=False): self.assertEqual( variable_scope.get_variable("w1", []).name, "scope/w1:0") with variable_scope.variable_scope("outer"): with variable_scope.variable_scope( "inner", auxiliary_name_scope=False) as inner: self.assertEqual(inner.original_name_scope, "outer/") self.assertEqual( variable_scope.get_variable("w", []).name, "outer/inner/w:0") with variable_scope.variable_scope( inner, auxiliary_name_scope=False) as inner1: self.assertEqual(inner1.original_name_scope, "outer/") self.assertEqual( variable_scope.get_variable("w1", []).name, "outer/inner/w1:0") @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testCreatedByDefaultNameWhenAuxiliaryNameScopeIsFalse(self): with self.cached_session(): with variable_scope.variable_scope( None, default_name="default", auxiliary_name_scope=False): self.assertEqual( variable_scope.get_variable("w", []).name, "default/w:0") with variable_scope.variable_scope("outer"): with variable_scope.variable_scope( None, default_name="default", auxiliary_name_scope=False) as inner: self.assertEqual(inner.original_name_scope, "outer/") self.assertEqual( variable_scope.get_variable("w", []).name, "outer/default/w:0") @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testReenterRootScopeWhenAuxiliaryNameScopeIsFalse(self): with self.cached_session(): root_scope = variable_scope.get_variable_scope() with variable_scope.variable_scope( root_scope, auxiliary_name_scope=False): self.assertEqual(variable_scope.get_variable("w", []).name, "w:0") with variable_scope.variable_scope("outer"): with variable_scope.variable_scope( root_scope, auxiliary_name_scope=False) as inner: self.assertEqual(inner.original_name_scope, "") self.assertEqual(variable_scope.get_variable("w1", []).name, "w1:0") @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testAuxiliaryNameScopeIsInvalid(self): with self.cached_session(): with self.assertRaisesRegex(TypeError, "auxiliary_name_scope"): with variable_scope.variable_scope( None, default_name="scope", auxiliary_name_scope="invalid"): pass with self.assertRaisesRegex(TypeError, "auxiliary_name_scope"): with variable_scope.variable_scope( "scope", auxiliary_name_scope="invalid"): pass with variable_scope.variable_scope("scope") as scope: pass with self.assertRaisesRegex(TypeError, "auxiliary_name_scope"): with variable_scope.variable_scope( scope, auxiliary_name_scope="invalid"): pass @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testReuseScopeWithoutNameScopeCollision(self): # Github issue: #13429 with self.cached_session(): with variable_scope.variable_scope("outer"): with variable_scope.variable_scope("inner") as inner: pass with variable_scope.variable_scope( inner, auxiliary_name_scope=False) as scope: with ops.name_scope_v2(scope.original_name_scope): self.assertEqual( variable_scope.get_variable("w", []).name, "outer/inner/w:0") with variable_scope.variable_scope("another"): with variable_scope.variable_scope( inner, auxiliary_name_scope=False) as scope1: with ops.name_scope_v2(scope1.original_name_scope): self.assertEqual( variable_scope.get_variable("w1", []).name, "outer/inner/w1:0") @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testGetVarWithDevice(self): g = ops.Graph() varname_type = [] def device_func(op): if op.type in ["Variable", "VariableV2", "VarHandleOp"]: varname_type.append((op.name, op.get_attr("dtype"))) return "/device:GPU:0" with g.as_default(): with ops.device(device_func): _ = variable_scope.get_variable("x", (100, 200)) _ = variable_scope.get_variable( "y", dtype=dtypes.int64, initializer=numpy.arange(73)) self.assertEqual(varname_type[0], ("x", dtypes.float32)) self.assertEqual(varname_type[1], ("y", dtypes.int64)) @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testGetVariableWithRefDtype(self): v = variable_scope.get_variable("v", shape=[3, 4], dtype=dtypes.float32) # Ensure it is possible to do get_variable with a _ref dtype passed in. _ = variable_scope.get_variable("w", shape=[5, 6], dtype=v.dtype) @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testGetVariableWithInitializerWhichTakesNoArgs(self): v = variable_scope.get_variable("foo", initializer=lambda: [2]) self.assertEqual(v.name, "foo:0") @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testGetVariableWithInitializerWhichTakesOptionalArgs(self): v = variable_scope.get_variable("foo", initializer=lambda x=True: [2]) self.assertEqual(v.name, "foo:0") @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testTwoGraphs(self): def f(): g1 = ops.Graph() g2 = ops.Graph() with g1.as_default(): with g2.as_default(): with variable_scope.variable_scope("_"): pass self.assertRaisesRegex(ValueError, "'_' is not a valid (?:root )?scope name", f) class VariableScopeWithCustomGetterTest(test.TestCase): @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testNonCallableGetterFails(self): with self.assertRaisesRegex(ValueError, r"custom_getter .* not callable:"): with variable_scope.variable_scope("scope0", custom_getter=3): variable_scope.get_variable("name0") with self.assertRaisesRegex(ValueError, r"custom_getter .* not callable:"): variable_scope.get_variable("name0", custom_getter=3) @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testNoSideEffectsWithIdentityCustomGetter(self): called = [0] def custom_getter(getter, *args, **kwargs): called[0] += 1 return getter(*args, **kwargs) with variable_scope.variable_scope( "scope", custom_getter=custom_getter) as scope: v = variable_scope.get_variable("v", [1]) with variable_scope.variable_scope(scope, reuse=True): v2 = variable_scope.get_variable("v", [1]) with variable_scope.variable_scope("new_scope") as new_scope: v3 = variable_scope.get_variable("v3", [1]) with variable_scope.variable_scope( new_scope, reuse=True, custom_getter=custom_getter): v4 = variable_scope.get_variable("v3", [1]) self.assertIs(v, v2) self.assertIs(v3, v4) self.assertEqual(3, called[0]) # skipped one in the first new_scope @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testSynchronizationAndAggregationWithCustomGetter(self): called = [0] synchronization = variable_scope.VariableSynchronization.AUTO aggregation = variable_scope.VariableAggregation.NONE def custom_getter(getter, *args, **kwargs): called[0] += 1 # Verify synchronization and aggregation kwargs are as expected. self.assertEqual(kwargs["synchronization"], synchronization) self.assertEqual(kwargs["aggregation"], aggregation) return getter(*args, **kwargs) with variable_scope.variable_scope("scope", custom_getter=custom_getter): variable_scope.get_variable("v", [1]) self.assertEqual(1, called[0]) with variable_scope.variable_scope("scope", custom_getter=custom_getter): synchronization = variable_scope.VariableSynchronization.ON_READ aggregation = variable_scope.VariableAggregation.MEAN variable_scope.get_variable( "v1", [1], synchronization=synchronization, aggregation=aggregation) self.assertEqual(2, called[0]) @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testVariableCreator(self): variable_names = [] def creator_a(next_creator, **kwargs): variable_names.append(kwargs.get("name", "")) return next_creator(**kwargs) def creator_b(next_creator, **kwargs): kwargs["name"] = "forced_name" return next_creator(**kwargs) with variable_scope.variable_creator_scope(creator_a): with variable_scope.variable_creator_scope(creator_b): variable_scope.variable(1.0, name="one_name") self.assertEqual(variable_names[0], "forced_name") called = [False] def creater_c(next_creator, **kwargs): called[0] = True self.assertEqual(kwargs["synchronization"], variable_scope.VariableSynchronization.ON_WRITE) self.assertEqual(kwargs["aggregation"], variable_scope.VariableAggregation.MEAN) return next_creator(**kwargs) with variable_scope.variable_creator_scope(creater_c): variable_scope.get_variable( "v", [], synchronization=variable_scope.VariableSynchronization.ON_WRITE, aggregation=variable_scope.VariableAggregation.MEAN) self.assertTrue(called[0]) @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testVariableCreatorNestingError(self): def creator(next_creator, **kwargs): return next_creator(**kwargs) # Save the state so we can clean up at the end. graph = ops.get_default_graph() old_creator_stack = graph._variable_creator_stack try: scope = variable_scope.variable_creator_scope(creator) scope.__enter__() with variable_scope.variable_creator_scope(creator): with self.assertRaises(RuntimeError): scope.__exit__(None, None, None) finally: graph._variable_creator_stack = old_creator_stack class VariableScopeMultithreadedTest(test.TestCase): @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testTwoThreadsDisjointScopeEntry(self): def thread_fn(i, graph): with graph.as_default(): with variable_scope.variable_scope("foo"): if i == 0: v = variable_scope.get_variable("v", []) self.assertEqual("foo/v:0", v.name) else: # Any thread after the first one should fail to create variable # with the same name. with self.assertRaises(ValueError): variable_scope.get_variable("v", []) graph = ops.get_default_graph() threads = [ threading.Thread(target=thread_fn, args=( i, graph, )) for i in range(2) ] threads[0].start() # Allow thread 0 to finish before starting thread 1. threads[0].join() threads[1].start() threads[1].join() @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testTwoThreadsNestedScopeEntry(self): def thread_fn(i, graph, run_event, pause_event): with graph.as_default(): with variable_scope.variable_scope("foo"): if i == 0: v = variable_scope.get_variable("v", []) self.assertEqual("foo/v:0", v.name) else: # Any thread after the first one should fail to create variable # with the same name. with self.assertRaises(ValueError): variable_scope.get_variable("v", []) pause_event.set() run_event.wait() graph = ops.get_default_graph() run_events = [threading.Event() for _ in range(2)] pause_events = [threading.Event() for _ in range(2)] threads = [ threading.Thread( target=thread_fn, args=(i, graph, run_events[i], pause_events[i])) for i in range(2) ] # Start first thread. threads[0].start() pause_events[0].wait() # Start next thread once the first thread has paused. threads[1].start() pause_events[1].wait() # Resume both threads. run_events[0].set() run_events[1].set() threads[0].join() threads[1].join() @test_util.run_in_graph_and_eager_modes @run_inside_wrap_function_in_eager_mode def testReenterMainScope(self): def thread_fn(graph, main_thread_scope): with graph.as_default(): # Variable created with main scope will have prefix "main". with variable_scope.variable_scope(main_thread_scope): with variable_scope.variable_scope("foo"): v = variable_scope.get_variable("v", []) self.assertEqual("main/foo/v:0", v.name) # Variable created outside main scope will not have prefix "main". with variable_scope.variable_scope("bar"): v = variable_scope.get_variable("v", []) self.assertEqual("bar/v:0", v.name) graph = ops.get_default_graph() with variable_scope.variable_scope("main") as main_thread_scope: thread = threading.Thread( target=thread_fn, args=(graph, main_thread_scope)) thread.start() thread.join() @combinations.generate(combinations.combine(mode=["eager"])) class TF1VariableScopeWrapperLayerTest(test.TestCase, parameterized.TestCase): def test_get_variable(self): # Test the shim when using `get_variable` (and regularizers) directly class WrappedDenseLayer(variable_scope_shim.VariableScopeWrapperLayer): def __init__(self, units, *args, **kwargs): super().__init__(*args, **kwargs) self.units = units def forward_pass(self, inputs, training=None): out = inputs with variable_scope.variable_scope("dense_one"): # The weights are created with a `regularizer`, # so the layer should track their regularization losses kernel = variable_scope.get_variable( shape=[out.shape[-1], self.units], regularizer=regularizers.L2(), initializer=init_ops.ones_initializer(), name="kernel") bias = variable_scope.get_variable( shape=[self.units,], initializer=init_ops.zeros_initializer(), name="bias") out = math_ops.matmul(out, kernel) out = nn_ops.bias_add(out, bias) with variable_scope.variable_scope("nested_scope"): with variable_scope.variable_scope("dense_two"): kernel = variable_scope.get_variable( shape=[out.shape[-1], self.units], regularizer=regularizers.L2(), initializer=init_ops.ones_initializer(), name="kernel") bias = variable_scope.get_variable( shape=[self.units,], initializer=init_ops.zeros_initializer(), name="bias") out = math_ops.matmul(out, kernel) out = nn_ops.bias_add(out, bias) return out layer = WrappedDenseLayer(10) out = layer(array_ops.ones(shape=(5, 5))) weights = {x.name: x for x in layer.variables} # Verify the correct output, regularization losses, + variables were made self.assertEqual(weights.keys(), {"dense_one/bias:0", "dense_one/kernel:0", "nested_scope/dense_two/bias:0", "nested_scope/dense_two/kernel:0"}) self.assertAllEqual(out, array_ops.ones(shape=(5, 10)) * 50) self.assertAllEqual(math_ops.add_n(layer.losses), 1.5) # Verify reuse by updating the variables then re-running weights["dense_one/kernel:0"].assign(array_ops.ones(shape=(5, 10)) * 2) weights["nested_scope/dense_two/kernel:0"].assign( array_ops.ones(shape=(10, 10)) * 2) out = layer(array_ops.ones(shape=(5, 5))) self.assertAllEqual(out, array_ops.ones(shape=(5, 10)) * 200) self.assertAllEqual(math_ops.add_n(layer.losses), 6) def test_compat_v1_layer(self): # Test the shim when using `compat.v1` layers class WrappedDenseLayer(variable_scope_shim.VariableScopeWrapperLayer): def __init__(self, units, *args, **kwargs): super().__init__(*args, **kwargs) self.units = units def forward_pass(self, inputs, training=None): out = core_layers.dense(inputs, self.units, name="dense_one", kernel_initializer=init_ops.ones_initializer(), kernel_regularizer="l2") with variable_scope.variable_scope("nested_scope"): out = core_layers.dense( out, self.units, name="dense_two", kernel_initializer=init_ops.ones_initializer(), kernel_regularizer="l2") return out layer = WrappedDenseLayer(10) out = layer(array_ops.ones(shape=(5, 5))) weights = {x.name: x for x in layer.variables} # Verify the correct output, losses, + variables were made self.assertEqual(weights.keys(), {"dense_one/bias:0", "dense_one/kernel:0", "nested_scope/dense_two/bias:0", "nested_scope/dense_two/kernel:0"}) self.assertAllEqual(out, array_ops.ones(shape=(5, 10)) * 50) self.assertAllEqual(math_ops.add_n(layer.losses), 1.5) # Verify reuse by updating the variables then re-running weights["dense_one/kernel:0"].assign(array_ops.ones(shape=(5, 10)) * 2) weights["nested_scope/dense_two/kernel:0"].assign( array_ops.ones(shape=(10, 10)) * 2) out = layer(array_ops.ones(shape=(5, 5))) self.assertAllEqual(out, array_ops.ones(shape=(5, 10)) * 200) self.assertAllEqual(math_ops.add_n(layer.losses), 6) if __name__ == "__main__": test.main()
Cytological and molecular analysis of centromere misdivision in maize. B chromosome derivatives suffering from breaks within their centromere were examined cytologically and molecularly. We showed by high resolution FISH that misdivision of the centromere of a univalent chromosome can occur during meiosis. The breaks divide the centromere repeat sequence cluster. A telocentric chromosome formed by misdivision was found to have the addition of telomeric repeats to the broken centromere. A ring chromosome formed after misdivision occurred by fusion of the broken centromere to the telomere. Pulsed-field electrophoresis analyses were performed on the telocentric and ring chromosomes to identify fragments that hybridize to both the telomeric repeat and the B-specific centromeric repeat. We conclude that healing of broken maize centromeres can be achieved through the mechanisms of addition or fusion of telomeric repeat sequences to the broken centromere.
def add_comment(payload): body = request.get_json() comments = body.get('comments', None) project_id = body.get('project_id', None) user_id = 1 if not comments or not project_id or not user_id: abort(400, 'invalid inputs of new comment') try: comment = Comment(comments=comments, project_id=project_id, user_id=user_id) comment.insert() return jsonify({ 'success': True, }) except Exception: flash('An error occur when adding new comment') abort(500, 'failed to add new comment')
def compute_polyline_length(polyline: np.ndarray) -> float: assert isinstance(polyline, np.ndarray) and polyline.ndim == 2 and len( polyline[:, 0]) > 2, 'Polyline malformed for path length computation p={}'.format(polyline) distance_between_points = np.diff(polyline, axis=0) return np.sum(np.sqrt(np.sum(distance_between_points ** 2, axis=1)))
def _upstream_area_message(self, area, commits): return '\n'.join( ['{} ({}):'.format(area, len(commits)), ''] + list(self.upstream_commit_line(c) for c in commits) + [''])
package com.pickcoverage; import com.pickcoverage.domain.coverages.Bike; import com.pickcoverage.domain.coverages.Electronics; import com.pickcoverage.domain.coverages.Jewelry; import com.pickcoverage.domain.coverages.SportsEquipment; import com.pickcoverage.domain.repository.IBikeRepository; import com.pickcoverage.domain.repository.IElectronicsRepository; import com.pickcoverage.domain.repository.IJewelryRepository; import com.pickcoverage.domain.repository.ISportsEquipmentRepository; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.CommandLineRunner; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.autoconfigure.domain.EntityScan; import org.springframework.context.annotation.Bean; import org.springframework.data.jpa.repository.config.EnableJpaRepositories; /** * Created by stefanbaychev on 3/30/17. */ @SpringBootApplication @EnableJpaRepositories(basePackages = "com.pickcoverage.domain.repository") @EntityScan(basePackages = "com.pickcoverage.domain") public class CoverageApplication { private static final Logger LOG = LoggerFactory.getLogger(CoverageApplication.class); /** * The entry point of application. * * @param args the input arguments */ public static void main(String[] args) { SpringApplication.run(CoverageApplication.class, args); } /** * Init command line runner for sample data * * @param iBikeRepository the bike repository * @param iJewelryRepository the jewelry repository * @param iElectronicsRepository the electronics repository * @param iSportsEquipmentRepository the sports equipment repository * @return the command line runner */ @Bean CommandLineRunner init(IBikeRepository iBikeRepository, IJewelryRepository iJewelryRepository, IElectronicsRepository iElectronicsRepository, ISportsEquipmentRepository iSportsEquipmentRepository) { return (args) -> { iBikeRepository.save(new Bike(Double.valueOf(0d), Double.valueOf(3000d), Double.valueOf(30d))); iElectronicsRepository.save(new Electronics(Double.valueOf(500d), Double.valueOf(6000d), Double.valueOf(35d))); iJewelryRepository.save(new Jewelry(Double.valueOf(500d), Double.valueOf(10000d), Double.valueOf(5d))); iSportsEquipmentRepository.save(new SportsEquipment(Double.valueOf(0d), Double.valueOf(20000d), Double.valueOf(30d))); LOG.info("Created some Default Coverage Templates to be used"); }; } }
/* Copyright (c) 2009, <NAME>. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the <organization> nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY <NAME> ''AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <NAME> BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /* cutest.c Based on simplereader.c, this is a test of some functions in libdwarf. It exits with 0 on success and 1 on failure. */ #include <sys/types.h> /* For open() */ #include <sys/stat.h> /* For open() */ #include <fcntl.h> /* For open() */ #include <stdlib.h> /* For exit() */ #include <unistd.h> /* For close() */ #include <string.h> /* for strcmp() */ #include <stdio.h> #include <errno.h> #include "dwarf.h" #include "libdwarf.h" static void read_cu_list(Dwarf_Debug dbg); static int print_die_data(Dwarf_Debug dbg, Dwarf_Die print_me,int level, const char *expected); static void get_die_and_siblings(Dwarf_Debug dbg, Dwarf_Die in_die,int in_level); int stoplimit = 329; int readcount = 0; int main(int argc, char **argv) { Dwarf_Debug dbg = 0; int fd = -1; const char *filepath = "<stdin>"; int res = DW_DLV_ERROR; Dwarf_Error error; Dwarf_Handler errhand = 0; Dwarf_Ptr errarg = 0; if(argc < 2) { fd = 0; /* stdin */ } else { filepath = argv[1]; fd = open(filepath,O_RDONLY); } if(fd < 0) { printf("Failure attempting to open %s\n",filepath); } res = dwarf_init_b(fd, DW_GROUPNUMBER_ANY,errhand,errarg, &dbg,&error); if(res != DW_DLV_OK) { printf("Giving up, cannot do DWARF processing\n"); exit(1); } read_cu_list(dbg); res = dwarf_finish(dbg); if(res != DW_DLV_OK) { printf("dwarf_finish failed!\n"); } close(fd); return 0; } static void read_cu_list(Dwarf_Debug dbg) { Dwarf_Unsigned cu_header_length = 0; Dwarf_Half version_stamp = 0; Dwarf_Unsigned abbrev_offset = 0; Dwarf_Half address_size = 0; Dwarf_Half length_size = 0; Dwarf_Half extension_size = 0; Dwarf_Sig8 type_signature; Dwarf_Unsigned typeoffset = 0; Dwarf_Unsigned next_cu_header = 0; Dwarf_Half header_cu_type = 0; Dwarf_Error error = 0; int cu_number = 0; Dwarf_Bool is_info = 1; for(;;++cu_number) { Dwarf_Die no_die = 0; Dwarf_Die cu_die = 0; int res = DW_DLV_ERROR; res = dwarf_next_cu_header_d(dbg,is_info,&cu_header_length, &version_stamp, &abbrev_offset, &address_size, &length_size,&extension_size, &type_signature,&typeoffset, &next_cu_header, &header_cu_type, &error); if(res == DW_DLV_ERROR) { printf("Error in dwarf_next_cu_header\n"); exit(1); } if(res == DW_DLV_NO_ENTRY) { /* Done. */ return; } /* The CU will have a single sibling, a cu_die. */ res = dwarf_siblingof_b(dbg,no_die,is_info,&cu_die,&error); if(res == DW_DLV_ERROR) { printf("Error in dwarf_siblingof_b on CU die \n"); exit(1); } if(res == DW_DLV_NO_ENTRY) { /* Impossible case. */ printf("no entry! in dwarf_siblingof_b on CU die \n"); exit(1); } get_die_and_siblings(dbg,cu_die,0); dwarf_dealloc(dbg,cu_die,DW_DLA_DIE); } } static void get_die_and_siblings(Dwarf_Debug dbg, Dwarf_Die in_die,int in_level) { int res = DW_DLV_ERROR; Dwarf_Die cur_die=in_die; Dwarf_Die child = 0; Dwarf_Error error; Dwarf_Bool is_info = dwarf_get_die_infotypes_flag(in_die); { /* To be consistent with simplereader we only count dies with names. Makes debugging easier. */ char *name = 0; res = dwarf_diename(in_die,&name,&error); if(res == DW_DLV_OK) { dwarf_dealloc(dbg,name,DW_DLA_STRING); readcount++; } } if(readcount >= stoplimit) { /* We will stop after printing and checking accuracy */ Dwarf_Off cuoff = 0; Dwarf_Off cudieoff = 0; Dwarf_Die cudie = 0; print_die_data(dbg,in_die,in_level,"doas"); res = dwarf_CU_dieoffset_given_die(in_die,&cudieoff,&error); if(res != DW_DLV_OK) { printf("FAIL: dwarf_CU_dieoffset_given_die did not work\n"); exit(1); } res = dwarf_offdie_b(dbg,cudieoff,is_info,&cudie,&error); if(res != DW_DLV_OK) { printf("FAIL: dwarf_offdie did not work\n"); exit(1); } print_die_data(dbg,cudie,0,"dwarf_init_finish.c"); exit(0); } for(;;) { Dwarf_Die sib_die = 0; res = dwarf_child(cur_die,&child,&error); if(res == DW_DLV_ERROR) { printf("Error in dwarf_child , level %d \n",in_level); exit(1); } if(res == DW_DLV_OK) { get_die_and_siblings(dbg,child,in_level+1); } /* res == DW_DLV_NO_ENTRY */ res = dwarf_siblingof_b (dbg,cur_die,is_info,&sib_die,&error); if(res == DW_DLV_ERROR) { printf("Error in dwarf_siblingof_b , level %d \n",in_level); exit(1); } if(res == DW_DLV_NO_ENTRY) { /* Done at this level. */ break; } /* res == DW_DLV_OK */ if(cur_die != in_die) { dwarf_dealloc(dbg,cur_die,DW_DLA_DIE); } cur_die = sib_die; } return; } static int print_die_data(Dwarf_Debug dbg, Dwarf_Die print_me,int level, const char *expected) { char *name = 0; Dwarf_Error error = 0; Dwarf_Half tag = 0; const char *tagname = 0; int res = dwarf_diename(print_me,&name,&error); if(res == DW_DLV_ERROR) { printf("Error in dwarf_diename , level %d \n",level); exit(1); } if(res == DW_DLV_NO_ENTRY) { /* Ignore entries with no name. */ return 0; } res = dwarf_tag(print_me,&tag,&error); if(res != DW_DLV_OK) { printf("Error in dwarf_tag , level %d \n",level); exit(1); } res = dwarf_get_TAG_name(tag,&tagname); if(res != DW_DLV_OK) { printf("Error in dwarf_get_TAG_name , level %d \n",level); exit(1); } printf("<%d> tag: %d %s name: %s\n",level,tag,tagname,name); dwarf_dealloc(dbg,name,DW_DLA_STRING); if(expected && strcmp(expected, name)) { printf("FAIL. die got %s expected %s\n",name,expected); exit(1); } return 1; }
/** * Builds the Vendor Image configurations. * * @param flavorMaps * the collection of flavors and the properties for those flavors * @param vfNodeTemplate * the node template for the VF * * @return a stream of VendorImageConfiguration objects * @throws IllegalArgumentException * if the VF has no child node templates which contain images (complex properties) that have software * version strings */ Stream<VendorImageConfiguration> buildVendorImageConfigurations( Collection<Map<String, Map<String, String>>> flavorMaps, NodeTemplate vfNodeTemplate) throws IllegalArgumentException { String resourceVendor = vfNodeTemplate.getMetaData().getValue("resourceVendor"); applicationLogger.debug("Resource Vendor " + resourceVendor); List<String> softwareVersions = extractSoftwareVersions(vfNodeTemplate.getSubMappingToscaTemplate().getNodeTemplates()); applicationLogger.debug("Software Versions: " + softwareVersions); if (softwareVersions.isEmpty()) { throw new IllegalArgumentException("No software versions could be found for this CSAR file"); } return flavorMaps.stream() .map(value -> value.entrySet().stream() .filter(entry -> VENDOR_INFO.equals(entry.getKey())) .map(e -> e.getValue().get(VENDOR_MODEL)) .findFirst()) .flatMap(vendorModel -> softwareVersions.stream().map( version -> new VendorImageConfiguration(vendorModel.orElse(null), resourceVendor, version))); }
#pragma once #include "geometry/point2d.hpp" #include "geometry/rect2d.hpp" #include "geometry/any_rect2d.hpp" namespace df { double InterpolateDouble(double startV, double endV, double t); m2::PointD InterpolatePoint(m2::PointD const & startPt, m2::PointD const & endPt, double t); double InterpolateAngle(double startAngle, double endAngle, double t); } // namespace df
<reponame>CoolBitX-Technology/coolwallet3-sdk import * as derivation from './derive'; import Transport from "../transport"; import * as utils from "../utils/index"; import { pathType } from '../config/param'; export default class EDDSACoin { coinType: string; constructor(coinType: string) { this.coinType = coinType; this.getPublicKey = this.getPublicKey.bind(this); } /** * For EdDSA based coins * @dev Temporarily only support 0 as account Index for speed optimization. * If you pass in accountIndex > 0, it will return the same publicKey. * @param {Number} accountIndex account index in BIP32 pointing to the target public key. * @param {string} protocol * @returns {Promise<string>} */ async getPublicKey(transport: Transport, appPrivateKey: string, appId: string, isSLIP0010: boolean = true) { const pathPrefix = isSLIP0010 ? pathType.SLIP0010 : pathType.BIP32_ED25519; const path = await utils.getPath(this.coinType, 0, 3, pathPrefix); return derivation.getEd25519PublicKey( transport, appId, appPrivateKey, path ); } }
/** * Delete an specific mapping for a meal * @param mealId */ public void deleteMappingMeal(int mealId) { database.delete(conn.TABLE_MAPPING_MEAL, conn.COLUMN_MEAL_ID + " = " + mealId, null); Log.w(DAOMappingMeal.class.getName(),"Deleted mapping for mealID " + mealId); }
<filename>src/main/java/com/sematext/solr/redis/command/Sort.java package com.sematext.solr.redis.command; import org.apache.solr.common.params.SolrParams; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import redis.clients.jedis.JedisCommands; import redis.clients.jedis.SortingParams; import java.util.Map; public final class Sort implements Command<JedisCommands> { private static final Logger log = LoggerFactory.getLogger(Sort.class); @Override public Map<String, Float> execute(final JedisCommands client, final SolrParams params) { final String key = ParamUtil.assertGetStringByName(params, "key"); final String algorithm = ParamUtil.tryGetStringByName(params, "algorithm", null); final String order = ParamUtil.tryGetStringByName(params, "order", null); final Integer limit = ParamUtil.tryGetIntByName(params, "limit", null); final Integer offset = ParamUtil.tryGetIntByName(params, "offset", null); final String byValue = ParamUtil.tryGetStringByName(params, "by", null); final String[] get = ParamUtil.getStringByPrefix(params, "get"); final SortingParams sortingParams = new SortingParams(); if ("alpha".equalsIgnoreCase(algorithm)) { sortingParams.alpha(); } if ("desc".equalsIgnoreCase(order)) { sortingParams.desc(); } else if ("asc".equalsIgnoreCase(order)) { sortingParams.asc(); } if (limit != null || offset != null) { sortingParams.limit(offset == null ? 0 : offset, limit == null ? 0 : limit); } if (byValue != null) { sortingParams.by(byValue); } sortingParams.get(get); log.debug("Fetching SORT from Redis for keys: {} GET {} BY {} ORDER {} OFFSET {} LIMIT {} ALGORITHM {}", key, get, byValue, order, offset, limit, algorithm); // Use decrementing Scorer to preserve list ordering return ResultUtil.stringIteratorToMap(client.sort(key, sortingParams), 0F, new ResultUtil.Scorer() { @Override public Float score(final Float score) { return score - 1F; } }); } }
Calcium antagonists and sympathetic nerve activation: are there differences between classes? ACTIONS OF THE SYMPATHETIC NERVOUS SYSTEM: The sympathetic nervous system is an important cardiovascular regulator, particularly during stress and exercise; its sympathetic nervous activity is regulated in centers in the brain stem and transmitted to organs and blood vessels that are innervated by sympathetic nerve endings. In the heart, the sympathetic nervous system increases heart rate and contractility. The effect of the sympathetic nervous system in different vascular beds depends on the degree of innervation, the distribution of postjunctional receptors and the effect of local mediators. Overactivation of the sympathetic nervous system may lead to hypertension and is involved in heart failure. The degree of sympathetic activation determines prognosis in heart failure. Hence, vasodilators ideally should also blunt sympathetic activity, or at least avoid activating it. DIFFERENCES AMONG CALCIUM ANTAGONISTS: Calcium antagonists are widely used for the treatment of hypertension and coronary artery disease. Their main mechanism of action is inhibition of L-type Ca2+ channels. Short-acting nifedipine leads to a marked increase in heart rate, sympathetic nerve activity and plasma catecholamines, similar to those induced by a cold pressor test. With long-acting nifedipine heart rate does not increase, but sympathetic nerve activity does increase. Other calcium antagonists have been less thoroughly investigated, but indirect evidence suggests differences between the different classes. Verapamil and diltiazem lower heart rate. Plasma noradrenalin measurements suggest that verapamil does not stimulate the sympathetic nervous system, but tends to suppress it. Second-generation dihydropyridines with longer duration of action do not increase heart rate; their effects on peripheral sympathetic nerve activity are not clear. Thus, in summary, the different classes of calcium antagonists differ with regard to their effects on sympathetic nerve activation. A decrease in heart rate and nerve activity might be beneficial for long-term prognosis, particularly in hypertension and heart failure.
def validate_absent(self, schema_key): schema = self.schemas.get(schema_key, None) if schema is None: return False return schema.get('absent', False)
import { Entity, PrimaryGeneratedColumn, Column, CreateDateColumn, UpdateDateColumn, ObjectType, ManyToMany, } from 'typeorm' import { AccountEntity } from '../account/entities/account.entity' import { JobEntity } from '../job/entities/job.entity' import { OrderEntity } from '../order/entities/order.entity' @Entity({ name: 'tag' }) export class TagEntity { @PrimaryGeneratedColumn() id!: number @Column() name!: string @CreateDateColumn() created!: Date @UpdateDateColumn() updated!: Date @ManyToMany((): ObjectType<OrderEntity> => OrderEntity, (o) => o.tags) orders!: OrderEntity[] @ManyToMany((): ObjectType<AccountEntity> => AccountEntity, (a) => a.tags) accounts!: AccountEntity[] @ManyToMany((): ObjectType<JobEntity> => JobEntity, (a) => a.tags) jobs!: JobEntity[] @Column({ nullable: true }) createdBy?: string @Column({ nullable: true }) updatedBy?: string }
/** * A zone is a delegated portion of DNS. We use the word {@code zone} instead of * {@code domain}, as denominator focuses on configuration aspects of DNS. * * @since 1.2 See <a href="http://www.ietf.org/rfc/rfc1035.txt">RFC 1035</a> */ public class Zone { /** * Represent a zone without an {@link #id() id}. * * @param name * corresponds to {@link #name()} */ public static Zone create(String name) { return create(name, null); } /** * Represent a zone with an {@link #id() id}. * * @param name * corresponds to {@link #name()} * @param id * nullable; corresponds to {@link #id()} */ public static Zone create(String name, String id) { return new Zone(name, id); } private final String name; private final String id; @ConstructorProperties({ "name", "id" }) Zone(String name, String id) { this.name = checkNotNull(name, "name"); this.id = id; } /** * The origin or starting point for the zone in the DNS tree. Usually * includes a trailing dot, ex. "{@code netflix.com.}" */ public String name() { return name; } /** * When present, the service supports multiple zones with the same * {@link #name}. When absent, it doesn't. The value is likely to have been * system generated. Even if a provider has an id associated with a zone, if * it isn't used by their api calls, this method will return null. * * @see #idOrName() */ public String id() { return id; } /** * It is possible that some zones do not have an id, and in this case the * name is used. The following form will ensure you get a reference * regardless. * * In implementation, this method is the same as calling: * {@code zone.id().or(zone.name())} * * <br> * If {@code denominator.Provider#supportsDuplicateZoneNames()} is true, * this will return an id. * * @return {@link #id() id} or {@link #name() name} if absent */ public String idOrName() { return id() != null ? id() : name(); } @Override public boolean equals(Object o) { if (o == this) return true; if (!(o instanceof Zone)) return false; Zone that = Zone.class.cast(o); return equal(name(), that.name()) && equal(id(), that.id()); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + name().hashCode(); result = prime * result + ((id() == null) ? 0 : id().hashCode()); return result; } @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append("Zone ["); builder.append("name=").append(name()); if (id() != null) builder.append(", ").append("id=").append(id()); builder.append("]"); return builder.toString(); } }
// Receives a list of Redis entities to convert func writeEntities(connection *redistimeseries.Client, entities []models.NodeMetricsEntity) { CheckAndInitTSKeys(connection, entities) for i := range entities { cpuEntityKey := GenerateCpuKey(entities[i].MetricsType, entities[i].Name) memEntityKey := GenerateMemKey(entities[i].MetricsType, entities[i].Name) jsonEntity, _ := json.Marshal(entities[i]) fmt.Println(fmt.Sprintf("Writing cpu metrics for -> %s", cpuEntityKey)) fmt.Println(fmt.Sprintf("Writing mem metrics for -> %s", memEntityKey)) cpuUsage := strings.TrimSuffix(entities[i].Usage.CPU, "n") memUsage := strings.TrimSuffix(entities[i].Usage.Memory, "Ki") cpuAsFloat, err := strconv.ParseFloat(cpuUsage, 64) if err != nil { fmt.Println(err) } memAsFloat, err := strconv.ParseFloat(memUsage, 64) if err != nil { fmt.Println(err) } connection.AddAutoTs(cpuEntityKey, cpuAsFloat) connection.AddAutoTs(memEntityKey, memAsFloat) fmt.Println(string(jsonEntity)) } }
<filename>signer/api/api_test.go package api_test import ( "encoding/json" "fmt" "io" "io/ioutil" "net/http" "net/http/httptest" "os" "strings" "testing" "github.com/docker/notary/cryptoservice" "github.com/docker/notary/signer" "github.com/docker/notary/signer/api" "github.com/docker/notary/trustmanager" "github.com/endophage/gotuf/data" "github.com/miekg/pkcs11" "github.com/stretchr/testify/assert" pb "github.com/docker/notary/proto" ) var ( server *httptest.Server reader io.Reader deleteKeyBaseURL string createKeyBaseURL string keyInfoBaseURL string signBaseURL string passphraseRetriever = func(string, string, bool, int) (string, bool, error) { return "passphrase", false, nil } ) func SetupHSMEnv(t *testing.T) (*pkcs11.Ctx, pkcs11.SessionHandle) { var libPath = "/usr/local/lib/softhsm/libsofthsm2.so" if _, err := os.Stat(libPath); err != nil { t.Skipf("Skipping test. Library path: %s does not exist", libPath) } p := pkcs11.New(libPath) if p == nil { t.Fatalf("Failed to init library") } if err := p.Initialize(); err != nil { t.Fatalf("Initialize error %s\n", err.Error()) } slots, err := p.GetSlotList(true) if err != nil { t.Fatalf("Failed to list HSM slots %s", err) } session, err := p.OpenSession(slots[0], pkcs11.CKF_SERIAL_SESSION|pkcs11.CKF_RW_SESSION) if err != nil { t.Fatalf("Failed to Start Session with HSM %s", err) } if err = p.Login(session, pkcs11.CKU_USER, "1234"); err != nil { t.Fatalf("User PIN %s\n", err.Error()) } return p, session } func setup(cryptoServices signer.CryptoServiceIndex) { server = httptest.NewServer(api.Handlers(cryptoServices)) deleteKeyBaseURL = fmt.Sprintf("%s/delete", server.URL) createKeyBaseURL = fmt.Sprintf("%s/new", server.URL) keyInfoBaseURL = fmt.Sprintf("%s", server.URL) signBaseURL = fmt.Sprintf("%s/sign", server.URL) } func TestDeleteKeyHandlerReturns404WithNonexistentKey(t *testing.T) { keyStore := trustmanager.NewKeyMemoryStore(passphraseRetriever) cryptoService := cryptoservice.NewCryptoService("", keyStore) setup(signer.CryptoServiceIndex{data.ED25519Key: cryptoService, data.RSAKey: cryptoService, data.ECDSAKey: cryptoService}) fakeID := "c62e6d68851cef1f7e55a9d56e3b0c05f3359f16838cad43600f0554e7d3b54d" keyID := &pb.KeyID{ID: fakeID} requestJson, _ := json.Marshal(keyID) reader = strings.NewReader(string(requestJson)) request, err := http.NewRequest("POST", deleteKeyBaseURL, reader) assert.Nil(t, err) res, err := http.DefaultClient.Do(request) assert.Nil(t, err) assert.Equal(t, 404, res.StatusCode) } func TestDeleteKeyHandler(t *testing.T) { keyStore := trustmanager.NewKeyMemoryStore(passphraseRetriever) cryptoService := cryptoservice.NewCryptoService("", keyStore) setup(signer.CryptoServiceIndex{data.ED25519Key: cryptoService, data.RSAKey: cryptoService, data.ECDSAKey: cryptoService}) tufKey, _ := cryptoService.Create("", data.ED25519Key) assert.NotNil(t, tufKey) requestJson, _ := json.Marshal(&pb.KeyID{ID: tufKey.ID()}) reader = strings.NewReader(string(requestJson)) request, err := http.NewRequest("POST", deleteKeyBaseURL, reader) assert.Nil(t, err) res, err := http.DefaultClient.Do(request) assert.Nil(t, err) assert.Equal(t, 200, res.StatusCode) } func TestKeyInfoHandler(t *testing.T) { keyStore := trustmanager.NewKeyMemoryStore(passphraseRetriever) cryptoService := cryptoservice.NewCryptoService("", keyStore) setup(signer.CryptoServiceIndex{data.ED25519Key: cryptoService, data.RSAKey: cryptoService, data.ECDSAKey: cryptoService}) tufKey, _ := cryptoService.Create("", data.ED25519Key) assert.NotNil(t, tufKey) keyInfoURL := fmt.Sprintf("%s/%s", keyInfoBaseURL, tufKey.ID()) request, err := http.NewRequest("GET", keyInfoURL, nil) assert.Nil(t, err) res, err := http.DefaultClient.Do(request) assert.Nil(t, err) jsonBlob, err := ioutil.ReadAll(res.Body) assert.Nil(t, err) var pubKey *pb.PublicKey err = json.Unmarshal(jsonBlob, &pubKey) assert.Nil(t, err) assert.Equal(t, tufKey.ID(), pubKey.KeyInfo.KeyID.ID) assert.Equal(t, 200, res.StatusCode) } func TestKeyInfoHandlerReturns404WithNonexistentKey(t *testing.T) { // We associate both key types with this signing service to bypass the // ID -> keyType logic in the tests keyStore := trustmanager.NewKeyMemoryStore(passphraseRetriever) cryptoService := cryptoservice.NewCryptoService("", keyStore) setup(signer.CryptoServiceIndex{data.ED25519Key: cryptoService, data.RSAKey: cryptoService, data.ECDSAKey: cryptoService}) fakeID := "c62e6d68851cef1f7e55a9d56e3b0c05f3359f16838cad43600f0554e7d3b54d" keyInfoURL := fmt.Sprintf("%s/%s", keyInfoBaseURL, fakeID) request, err := http.NewRequest("GET", keyInfoURL, nil) assert.Nil(t, err) res, err := http.DefaultClient.Do(request) assert.Nil(t, err) assert.Equal(t, 404, res.StatusCode) } func TestHSMCreateKeyHandler(t *testing.T) { ctx, session := SetupHSMEnv(t) defer ctx.Destroy() defer ctx.Finalize() defer ctx.CloseSession(session) defer ctx.Logout(session) cryptoService := api.NewRSAHardwareCryptoService(ctx, session) setup(signer.CryptoServiceIndex{data.RSAKey: cryptoService}) createKeyURL := fmt.Sprintf("%s/%s", createKeyBaseURL, data.RSAKey) request, err := http.NewRequest("POST", createKeyURL, nil) assert.Nil(t, err) res, err := http.DefaultClient.Do(request) assert.Nil(t, err) jsonBlob, err := ioutil.ReadAll(res.Body) assert.Nil(t, err) var keyInfo *pb.PublicKey err = json.Unmarshal(jsonBlob, &keyInfo) assert.Nil(t, err) assert.Equal(t, 200, res.StatusCode) } func TestSoftwareCreateKeyHandler(t *testing.T) { keyStore := trustmanager.NewKeyMemoryStore(passphraseRetriever) cryptoService := cryptoservice.NewCryptoService("", keyStore) setup(signer.CryptoServiceIndex{data.ED25519Key: cryptoService, data.RSAKey: cryptoService, data.ECDSAKey: cryptoService}) createKeyURL := fmt.Sprintf("%s/%s", createKeyBaseURL, data.ED25519Key) request, err := http.NewRequest("POST", createKeyURL, nil) assert.Nil(t, err) res, err := http.DefaultClient.Do(request) assert.Nil(t, err) assert.Equal(t, 200, res.StatusCode) jsonBlob, err := ioutil.ReadAll(res.Body) assert.Nil(t, err) var keyInfo *pb.PublicKey err = json.Unmarshal(jsonBlob, &keyInfo) assert.Nil(t, err) } func TestHSMSignHandler(t *testing.T) { ctx, session := SetupHSMEnv(t) defer ctx.Destroy() defer ctx.Finalize() defer ctx.CloseSession(session) defer ctx.Logout(session) cryptoService := api.NewRSAHardwareCryptoService(ctx, session) setup(signer.CryptoServiceIndex{data.RSAKey: cryptoService}) tufKey, _ := cryptoService.Create("", data.RSAKey) sigRequest := &pb.SignatureRequest{KeyID: &pb.KeyID{ID: tufKey.ID()}, Content: make([]byte, 10)} requestJson, _ := json.Marshal(sigRequest) reader = strings.NewReader(string(requestJson)) request, err := http.NewRequest("POST", signBaseURL, reader) assert.Nil(t, err) res, err := http.DefaultClient.Do(request) assert.Nil(t, err) jsonBlob, err := ioutil.ReadAll(res.Body) assert.Nil(t, err) var sig *pb.Signature err = json.Unmarshal(jsonBlob, &sig) assert.Nil(t, err) assert.Equal(t, tufKey.ID, sig.KeyInfo.KeyID.ID) assert.Equal(t, 200, res.StatusCode) } func TestSoftwareSignHandler(t *testing.T) { keyStore := trustmanager.NewKeyMemoryStore(passphraseRetriever) cryptoService := cryptoservice.NewCryptoService("", keyStore) setup(signer.CryptoServiceIndex{data.ED25519Key: cryptoService, data.RSAKey: cryptoService, data.ECDSAKey: cryptoService}) tufKey, err := cryptoService.Create("", data.ED25519Key) assert.Nil(t, err) sigRequest := &pb.SignatureRequest{KeyID: &pb.KeyID{ID: tufKey.ID()}, Content: make([]byte, 10)} requestJson, _ := json.Marshal(sigRequest) reader = strings.NewReader(string(requestJson)) request, err := http.NewRequest("POST", signBaseURL, reader) assert.Nil(t, err) res, err := http.DefaultClient.Do(request) assert.Nil(t, err) assert.Equal(t, 200, res.StatusCode) jsonBlob, err := ioutil.ReadAll(res.Body) assert.Nil(t, err) var sig *pb.Signature err = json.Unmarshal(jsonBlob, &sig) assert.Nil(t, err) assert.Equal(t, tufKey.ID(), sig.KeyInfo.KeyID.ID) } func TestSoftwareSignWithInvalidRequestHandler(t *testing.T) { keyStore := trustmanager.NewKeyMemoryStore(passphraseRetriever) cryptoService := cryptoservice.NewCryptoService("", keyStore) setup(signer.CryptoServiceIndex{data.ED25519Key: cryptoService, data.RSAKey: cryptoService, data.ECDSAKey: cryptoService}) requestJson := "{\"blob\":\"7d16f1d0b95310a7bc557747fc4f20fcd41c1c5095ae42f189df0717e7d7f4a0a2b55debce630f43c4ac099769c612965e3fda3cd4c0078ee6a460f14fa19307\"}" reader = strings.NewReader(requestJson) request, err := http.NewRequest("POST", signBaseURL, reader) assert.Nil(t, err) res, err := http.DefaultClient.Do(request) assert.Nil(t, err) jsonBlob, err := ioutil.ReadAll(res.Body) assert.Nil(t, err) var sig *pb.Signature err = json.Unmarshal(jsonBlob, &sig) assert.Equal(t, 400, res.StatusCode) } func TestSignHandlerReturns404WithNonexistentKey(t *testing.T) { keyStore := trustmanager.NewKeyMemoryStore(passphraseRetriever) cryptoService := cryptoservice.NewCryptoService("", keyStore) setup(signer.CryptoServiceIndex{data.ED25519Key: cryptoService, data.RSAKey: cryptoService, data.ECDSAKey: cryptoService}) fakeID := "c62e6d68851cef1f7e55a9d56e3b0c05f3359f16838cad43600f0554e7d3b54d" cryptoService.Create("", data.ED25519Key) sigRequest := &pb.SignatureRequest{KeyID: &pb.KeyID{ID: fakeID}, Content: make([]byte, 10)} requestJson, _ := json.Marshal(sigRequest) reader = strings.NewReader(string(requestJson)) request, err := http.NewRequest("POST", signBaseURL, reader) assert.Nil(t, err) res, err := http.DefaultClient.Do(request) assert.Nil(t, err) assert.Equal(t, 404, res.StatusCode) }