text
stringlengths
2
1.04M
meta
dict
// Copyright (c) 2009-2010 Satoshi Nakamoto // Copyright (c) 2009-2012 The Bitcoin developers // Distributed under the MIT/X11 software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. #ifndef BITCOIN_NET_H #define BITCOIN_NET_H #include <deque> #include <boost/array.hpp> #include <boost/foreach.hpp> #include <openssl/rand.h> #ifndef WIN32 #include <arpa/inet.h> #endif #include "mruset.h" #include "netbase.h" #include "protocol.h" #include "addrman.h" #include "hash.h" #include "bloom.h" class CNode; class CBlockIndex; extern int nBestHeight; inline unsigned int ReceiveBufferSize() { return 1000*GetArg("-maxreceivebuffer", 5*1000); } inline unsigned int SendBufferSize() { return 1000*GetArg("-maxsendbuffer", 1*1000); } void AddOneShot(std::string strDest); bool RecvLine(SOCKET hSocket, std::string& strLine); bool GetMyExternalIP(CNetAddr& ipRet); void AddressCurrentlyConnected(const CService& addr); CNode* FindNode(const CNetAddr& ip); CNode* FindNode(const CService& ip); CNode* ConnectNode(CAddress addrConnect, const char *strDest = NULL, int64 nTimeout=0); void MapPort(); unsigned short GetListenPort(); bool BindListenPort(const CService &bindAddr, std::string& strError=REF(std::string())); void StartNode(void* parg); bool StopNode(); enum { LOCAL_NONE, // unknown LOCAL_IF, // address a local interface listens on LOCAL_BIND, // address explicit bound to LOCAL_UPNP, // address reported by UPnP LOCAL_IRC, // address reported by IRC (deprecated) LOCAL_HTTP, // address reported by whatismyip.com and similar LOCAL_MANUAL, // address explicitly specified (-externalip=) LOCAL_MAX }; void SetLimited(enum Network net, bool fLimited = true); bool IsLimited(enum Network net); bool IsLimited(const CNetAddr& addr); bool AddLocal(const CService& addr, int nScore = LOCAL_NONE); bool AddLocal(const CNetAddr& addr, int nScore = LOCAL_NONE); bool SeenLocal(const CService& addr); bool IsLocal(const CService& addr); bool GetLocal(CService &addr, const CNetAddr *paddrPeer = NULL); bool IsReachable(const CNetAddr &addr); void SetReachable(enum Network net, bool fFlag = true); CAddress GetLocalAddress(const CNetAddr *paddrPeer = NULL); /** Thread types */ enum threadId { THREAD_SOCKETHANDLER, THREAD_OPENCONNECTIONS, THREAD_MESSAGEHANDLER, THREAD_MINER, THREAD_RPCLISTENER, THREAD_UPNP, THREAD_DNSSEED, THREAD_ADDEDCONNECTIONS, THREAD_DUMPADDRESS, THREAD_RPCHANDLER, THREAD_IMPORT, THREAD_SCRIPTCHECK, THREAD_MAX }; extern bool fDiscover; extern bool fUseUPnP; extern uint64 nLocalServices; extern uint64 nLocalHostNonce; extern boost::array<int, THREAD_MAX> vnThreadsRunning; extern CAddrMan addrman; extern std::vector<CNode*> vNodes; extern CCriticalSection cs_vNodes; extern std::map<CInv, CDataStream> mapRelay; extern std::deque<std::pair<int64, CInv> > vRelayExpiration; extern CCriticalSection cs_mapRelay; extern std::map<CInv, int64> mapAlreadyAskedFor; extern std::vector<std::string> vAddedNodes; extern CCriticalSection cs_vAddedNodes; class CNodeStats { public: uint64 nServices; int64 nLastSend; int64 nLastRecv; int64 nTimeConnected; std::string addrName; int nVersion; std::string strSubVer; bool fInbound; int64 nReleaseTime; int nStartingHeight; int nMisbehavior; }; /** Information about a peer */ class CNode { public: // socket uint64 nServices; SOCKET hSocket; CDataStream vSend; CDataStream vRecv; CCriticalSection cs_vSend; CCriticalSection cs_vRecv; int64 nLastSend; int64 nLastRecv; int64 nLastSendEmpty; int64 nTimeConnected; int nHeaderStart; unsigned int nMessageStart; CAddress addr; std::string addrName; CService addrLocal; int nVersion; std::string strSubVer; bool fOneShot; bool fClient; bool fInbound; bool fNetworkNode; bool fSuccessfullyConnected; bool fDisconnect; // We use fRelayTxes for two purposes - // a) it allows us to not relay tx invs before receiving the peer's version message // b) the peer may tell us in their version message that we should not relay tx invs // until they have initialized their bloom filter. bool fRelayTxes; CSemaphoreGrant grantOutbound; CCriticalSection cs_filter; CBloomFilter* pfilter; protected: int nRefCount; // Denial-of-service detection/prevention // Key is IP address, value is banned-until-time static std::map<CNetAddr, int64> setBanned; static CCriticalSection cs_setBanned; int nMisbehavior; public: int64 nReleaseTime; uint256 hashContinue; CBlockIndex* pindexLastGetBlocksBegin; uint256 hashLastGetBlocksEnd; int nStartingHeight; // flood relay std::vector<CAddress> vAddrToSend; std::set<CAddress> setAddrKnown; bool fGetAddr; std::set<uint256> setKnown; // inventory based relay mruset<CInv> setInventoryKnown; std::vector<CInv> vInventoryToSend; CCriticalSection cs_inventory; std::multimap<int64, CInv> mapAskFor; CNode(SOCKET hSocketIn, CAddress addrIn, std::string addrNameIn = "", bool fInboundIn=false) : vSend(SER_NETWORK, MIN_PROTO_VERSION), vRecv(SER_NETWORK, MIN_PROTO_VERSION) { nServices = 0; hSocket = hSocketIn; nLastSend = 0; nLastRecv = 0; nLastSendEmpty = GetTime(); nTimeConnected = GetTime(); nHeaderStart = -1; nMessageStart = -1; addr = addrIn; addrName = addrNameIn == "" ? addr.ToStringIPPort() : addrNameIn; nVersion = 0; strSubVer = ""; fOneShot = false; fClient = false; // set by version message fInbound = fInboundIn; fNetworkNode = false; fSuccessfullyConnected = false; fDisconnect = false; nRefCount = 0; nReleaseTime = 0; hashContinue = 0; pindexLastGetBlocksBegin = 0; hashLastGetBlocksEnd = 0; nStartingHeight = -1; fGetAddr = false; nMisbehavior = 0; fRelayTxes = false; setInventoryKnown.max_size(SendBufferSize() / 1000); pfilter = NULL; // Be shy and don't send version until we hear if (!fInbound) PushVersion(); } ~CNode() { if (hSocket != INVALID_SOCKET) { closesocket(hSocket); hSocket = INVALID_SOCKET; } if (pfilter) delete pfilter; } private: CNode(const CNode&); void operator=(const CNode&); public: int GetRefCount() { return std::max(nRefCount, 0) + (GetTime() < nReleaseTime ? 1 : 0); } CNode* AddRef(int64 nTimeout=0) { if (nTimeout != 0) nReleaseTime = std::max(nReleaseTime, GetTime() + nTimeout); else nRefCount++; return this; } void Release() { nRefCount--; } void AddAddressKnown(const CAddress& addr) { setAddrKnown.insert(addr); } void PushAddress(const CAddress& addr) { // Known checking here is only to save space from duplicates. // SendMessages will filter it again for knowns that were added // after addresses were pushed. if (addr.IsValid() && !setAddrKnown.count(addr)) vAddrToSend.push_back(addr); } void AddInventoryKnown(const CInv& inv) { { LOCK(cs_inventory); setInventoryKnown.insert(inv); } } void PushInventory(const CInv& inv) { { LOCK(cs_inventory); if (!setInventoryKnown.count(inv)) vInventoryToSend.push_back(inv); } } void AskFor(const CInv& inv) { // We're using mapAskFor as a priority queue, // the key is the earliest time the request can be sent int64& nRequestTime = mapAlreadyAskedFor[inv]; if (fDebugNet) printf("askfor %s %"PRI64d" (%s)\n", inv.ToString().c_str(), nRequestTime, DateTimeStrFormat("%H:%M:%S", nRequestTime/1000000).c_str()); // Make sure not to reuse time indexes to keep things in the same order int64 nNow = (GetTime() - 1) * 1000000; static int64 nLastTime; ++nLastTime; nNow = std::max(nNow, nLastTime); nLastTime = nNow; // Each retry is 2 minutes after the last nRequestTime = std::max(nRequestTime + 2 * 60 * 1000000, nNow); mapAskFor.insert(std::make_pair(nRequestTime, inv)); } // TODO: Document the postcondition of this function. Is cs_vSend locked? void BeginMessage(const char* pszCommand) EXCLUSIVE_LOCK_FUNCTION(cs_vSend) { ENTER_CRITICAL_SECTION(cs_vSend); if (nHeaderStart != -1) AbortMessage(); nHeaderStart = vSend.size(); vSend << CMessageHeader(pszCommand, 0); nMessageStart = vSend.size(); if (fDebug) printf("sending: %s ", pszCommand); } // TODO: Document the precondition of this function. Is cs_vSend locked? void AbortMessage() UNLOCK_FUNCTION(cs_vSend) { if (nHeaderStart < 0) return; vSend.resize(nHeaderStart); nHeaderStart = -1; nMessageStart = -1; LEAVE_CRITICAL_SECTION(cs_vSend); if (fDebug) printf("(aborted)\n"); } // TODO: Document the precondition of this function. Is cs_vSend locked? void EndMessage() UNLOCK_FUNCTION(cs_vSend) { if (mapArgs.count("-dropmessagestest") && GetRand(atoi(mapArgs["-dropmessagestest"])) == 0) { printf("dropmessages DROPPING SEND MESSAGE\n"); AbortMessage(); return; } if (nHeaderStart < 0) return; // Set the size unsigned int nSize = vSend.size() - nMessageStart; memcpy((char*)&vSend[nHeaderStart] + CMessageHeader::MESSAGE_SIZE_OFFSET, &nSize, sizeof(nSize)); // Set the checksum uint256 hash = Hash(vSend.begin() + nMessageStart, vSend.end()); unsigned int nChecksum = 0; memcpy(&nChecksum, &hash, sizeof(nChecksum)); assert(nMessageStart - nHeaderStart >= CMessageHeader::CHECKSUM_OFFSET + sizeof(nChecksum)); memcpy((char*)&vSend[nHeaderStart] + CMessageHeader::CHECKSUM_OFFSET, &nChecksum, sizeof(nChecksum)); if (fDebug) { printf("(%d bytes)\n", nSize); } nHeaderStart = -1; nMessageStart = -1; LEAVE_CRITICAL_SECTION(cs_vSend); } void PushVersion(); void PushMessage(const char* pszCommand) { try { BeginMessage(pszCommand); EndMessage(); } catch (...) { AbortMessage(); throw; } } template<typename T1> void PushMessage(const char* pszCommand, const T1& a1) { try { BeginMessage(pszCommand); vSend << a1; EndMessage(); } catch (...) { AbortMessage(); throw; } } template<typename T1, typename T2> void PushMessage(const char* pszCommand, const T1& a1, const T2& a2) { try { BeginMessage(pszCommand); vSend << a1 << a2; EndMessage(); } catch (...) { AbortMessage(); throw; } } template<typename T1, typename T2, typename T3> void PushMessage(const char* pszCommand, const T1& a1, const T2& a2, const T3& a3) { try { BeginMessage(pszCommand); vSend << a1 << a2 << a3; EndMessage(); } catch (...) { AbortMessage(); throw; } } template<typename T1, typename T2, typename T3, typename T4> void PushMessage(const char* pszCommand, const T1& a1, const T2& a2, const T3& a3, const T4& a4) { try { BeginMessage(pszCommand); vSend << a1 << a2 << a3 << a4; EndMessage(); } catch (...) { AbortMessage(); throw; } } template<typename T1, typename T2, typename T3, typename T4, typename T5> void PushMessage(const char* pszCommand, const T1& a1, const T2& a2, const T3& a3, const T4& a4, const T5& a5) { try { BeginMessage(pszCommand); vSend << a1 << a2 << a3 << a4 << a5; EndMessage(); } catch (...) { AbortMessage(); throw; } } template<typename T1, typename T2, typename T3, typename T4, typename T5, typename T6> void PushMessage(const char* pszCommand, const T1& a1, const T2& a2, const T3& a3, const T4& a4, const T5& a5, const T6& a6) { try { BeginMessage(pszCommand); vSend << a1 << a2 << a3 << a4 << a5 << a6; EndMessage(); } catch (...) { AbortMessage(); throw; } } template<typename T1, typename T2, typename T3, typename T4, typename T5, typename T6, typename T7> void PushMessage(const char* pszCommand, const T1& a1, const T2& a2, const T3& a3, const T4& a4, const T5& a5, const T6& a6, const T7& a7) { try { BeginMessage(pszCommand); vSend << a1 << a2 << a3 << a4 << a5 << a6 << a7; EndMessage(); } catch (...) { AbortMessage(); throw; } } template<typename T1, typename T2, typename T3, typename T4, typename T5, typename T6, typename T7, typename T8> void PushMessage(const char* pszCommand, const T1& a1, const T2& a2, const T3& a3, const T4& a4, const T5& a5, const T6& a6, const T7& a7, const T8& a8) { try { BeginMessage(pszCommand); vSend << a1 << a2 << a3 << a4 << a5 << a6 << a7 << a8; EndMessage(); } catch (...) { AbortMessage(); throw; } } template<typename T1, typename T2, typename T3, typename T4, typename T5, typename T6, typename T7, typename T8, typename T9> void PushMessage(const char* pszCommand, const T1& a1, const T2& a2, const T3& a3, const T4& a4, const T5& a5, const T6& a6, const T7& a7, const T8& a8, const T9& a9) { try { BeginMessage(pszCommand); vSend << a1 << a2 << a3 << a4 << a5 << a6 << a7 << a8 << a9; EndMessage(); } catch (...) { AbortMessage(); throw; } } void PushGetBlocks(CBlockIndex* pindexBegin, uint256 hashEnd); bool IsSubscribed(unsigned int nChannel); void Subscribe(unsigned int nChannel, unsigned int nHops=0); void CancelSubscribe(unsigned int nChannel); void CloseSocketDisconnect(); void Cleanup(); // Denial-of-service detection/prevention // The idea is to detect peers that are behaving // badly and disconnect/ban them, but do it in a // one-coding-mistake-won't-shatter-the-entire-network // way. // IMPORTANT: There should be nothing I can give a // node that it will forward on that will make that // node's peers drop it. If there is, an attacker // can isolate a node and/or try to split the network. // Dropping a node for sending stuff that is invalid // now but might be valid in a later version is also // dangerous, because it can cause a network split // between nodes running old code and nodes running // new code. static void ClearBanned(); // needed for unit testing static bool IsBanned(CNetAddr ip); bool Misbehaving(int howmuch); // 1 == a little, 100 == a lot void copyStats(CNodeStats &stats); }; class CTransaction; void RelayTransaction(const CTransaction& tx, const uint256& hash); void RelayTransaction(const CTransaction& tx, const uint256& hash, const CDataStream& ss); #endif
{ "content_hash": "26ab2ea48451dc17d659fdb53f5d43d3", "timestamp": "", "source": "github", "line_count": 579, "max_line_length": 175, "avg_line_length": 28.07944732297064, "alnum_prop": 0.6067782015007996, "repo_name": "gbalme/Anacoinda", "id": "14e12aa102f255d0fee1467ec8c7b9339022513f", "size": "16258", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "src/net.h", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "103707" }, { "name": "C++", "bytes": "2393977" }, { "name": "CSS", "bytes": "1127" }, { "name": "IDL", "bytes": "13837" }, { "name": "Objective-C", "bytes": "2451" }, { "name": "Python", "bytes": "36361" }, { "name": "Shell", "bytes": "13135" }, { "name": "TypeScript", "bytes": "4329160" } ], "symlink_target": "" }
var java = require("../"); var assert = require("assert"); var _ = require('lodash'); var when = require('when'); module.exports = { launch: function(test) { test.expect(7); var api = _.functions(java); test.ok(_.includes(api, 'isJvmCreated'), 'Expected `isJvmCreated` to be present, but it is NOT.'); test.ok(!java.isJvmCreated()); java.asyncOptions = { syncSuffix: "Sync", promiseSuffix: 'Promise', promisify: require('when/node').lift }; function before() { var promise = when.promise(function(resolve, reject) { test.ok(!java.isJvmCreated()); resolve(); }); return promise; } function after() { var promise = when.promise(function(resolve, reject) { test.ok(java.isJvmCreated()); resolve(); }); return promise; } java.registerClientP(before, after); java.registerClientP(null, after); java.registerClientP(before); java.ensureJvm().done(function() { test.ok(java.isJvmCreated()); test.done(); }); }, testAPI: function(test) { test.expect(6); var arrayList = java.newInstanceSync("java.util.ArrayList"); test.ok(arrayList); test.ok(java.instanceOf(arrayList, "java.util.ArrayList")); var api = _.functions(arrayList); test.ok(_.includes(api, 'addSync'), 'Expected `addSync` to be present, but it is NOT.'); test.ok(_.includes(api, 'addPromise'), 'Expected `addPromise` to be present, but it is NOT.'); test.ok(!_.includes(api, 'add'), 'Expected `add` to NOT be present, but it is.'); test.ok(!_.includes(api, 'addAsync'), 'Expected `addAsync` to NOT be present, but it is.'); test.done(); }, testImportClass: function(test) { test.expect(3); // Note: java.import executes javascript code in lib/nodeJavaBridge that makes sync calls to java classes. var ArrayList = java.import("java.util.ArrayList"); test.ok(ArrayList); var arrayList = new ArrayList(); test.ok(arrayList); test.strictEqual(arrayList.sizeSync(), 0); test.done(); }, testStaticAPI: function(test) { test.expect(6); var String = java.import("java.lang.String"); test.ok(String); var api = _.functions(String); test.ok(_.includes(api, 'formatSync'), 'Expected `formatSync` to be present, but it is NOT.'); test.ok(_.includes(api, 'formatPromise'), 'Expected `formatPromise` to be present, but it is NOT.'); test.ok(!_.includes(api, 'format'), 'Expected `format` to NOT be present, but it is.'); test.ok(!_.includes(api, 'formatAsync'), 'Expected `formatAsync` to NOT be present, but it is.'); test.ok(!_.includes(api, 'formatundefined'), 'Expected `formatundefined` to NOT be present, but it is.'); test.done(); }, testSyncCalls: function(test) { test.expect(1); var arrayList = java.newInstanceSync("java.util.ArrayList"); arrayList.addSync("hello"); arrayList.addSync("world"); test.strictEqual(arrayList.sizeSync(), 2); test.done(); }, testStaticSyncCalls: function(test) { test.expect(1); // Note: java.import executes javascript code in lib/nodeJavaBridge that makes sync calls to java classes. // Among other things, java.import creates Sync functions for static methods. var String = java.import("java.lang.String"); test.strictEqual(String.formatSync('%s--%s', "hello", "world"), "hello--world"); test.done(); }, testPromiseCalls: function(test) { test.expect(1); var arrayList = java.newInstanceSync("java.util.ArrayList"); arrayList.addPromise("hello") .then(function () { return arrayList.addPromise("world"); }) .then(function () { return arrayList.sizePromise(); }) .then(function (size) { test.strictEqual(size, 2); test.done(); }); } }
{ "content_hash": "e8bf80aca3c57e3884f242bd54d08512", "timestamp": "", "source": "github", "line_count": 113, "max_line_length": 110, "avg_line_length": 33.89380530973451, "alnum_prop": 0.6368146214099216, "repo_name": "lantanagroup/node-java", "id": "59e4c733b12f74092f21536005dccdfb2059fb13", "size": "3906", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "testAsyncOptions/testNoAsync.js", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "112589" }, { "name": "HTML", "bytes": "259" }, { "name": "Java", "bytes": "13566" }, { "name": "JavaScript", "bytes": "108813" }, { "name": "Python", "bytes": "5751" }, { "name": "Shell", "bytes": "1632" } ], "symlink_target": "" }
"""Replay buffer that performs relabeling.""" import gin import numpy as np import tensorflow as tf from tf_agents.replay_buffers import tf_uniform_replay_buffer from tf_agents.utils import common @gin.configurable class RelabellingReplayBuffer(tf_uniform_replay_buffer.TFUniformReplayBuffer): """A replay buffer that relabels experience.""" def __init__(self, *args, **kwargs): """Initialize the replay buffer. Args: *args: Arguments. **kwargs: Keyword arguments. Additional arguments: task_distribution: an instance of multitask.TaskDistribution. sample_batch_size: (int) the batch size. num_parallel_calls: (int) number of parallel calls for sampling. num_future_states: (int) number of future states to consider for future state relabeling. actor: the actor network. critic: the critic network. gamma: (float) the discount factor. relabel_type: (str) indicator of the relabeling strategy. candidate_task_type: (str) within each back, should we use the states, next_states, or originally commanded tasks as possible tasks when relabeling. relabel_prob: (float) fraction of experience to relabel when sampling. keep_current_goal: (bool) for ``last'' and ``final'' relabeling, should we add both the originally commanded task and the relabeled task when inserting new experience into the replay buffer. normalize_cols: (bool) Normalizing the columns has the effect of including the partition function. """ self._task_distribution = kwargs.pop("task_distribution") self._sample_batch_size = kwargs.pop("sample_batch_size") self._num_parallel_calls = kwargs.pop("num_parallel_calls") self._num_future_states = kwargs.pop("num_future_states", 4) self._actor = kwargs.pop("actor") self._critic = kwargs.pop("critic") self._gamma = kwargs.pop("gamma") self._relabel_type = kwargs.pop("relabel_type", None) assert self._relabel_type in [None, "last", "future", "soft", "random"] self._candidate_task_type = kwargs.pop("candidate_task_type", "states") assert self._candidate_task_type in ["states", "next_states", "tasks"] self._relabel_prob = kwargs.pop("relabel_prob", 1.0) self._keep_current_goal = kwargs.pop("keep_current_goal", False) self._normalize_cols = kwargs.pop("normalize_cols", True) self._iterator = None super(RelabellingReplayBuffer, self).__init__(*args, **kwargs) def get_batch(self): if self._iterator is None: dataset = self.as_dataset( sample_batch_size=self._sample_batch_size, num_parallel_calls=self._num_parallel_calls, num_steps=2, ).prefetch(3) self._iterator = iter(dataset) experience, unused_info = next(self._iterator) if self._relabel_type in ["soft", "random"]: experience = self._soft_relabel(experience) elif self._relabel_type in ["last", "future"]: # Reassign the next_states to have the same goal as the current states _, tasks = self._task_distribution.split(experience.observation[:, 0]) next_states, _ = self._task_distribution.split(experience.observation[:, 1]) next_states_and_tasks = self._task_distribution.combine( next_states, tasks) new_observation = tf.concat( [ experience.observation[:, 0][:, None], next_states_and_tasks[:, None] ], axis=1, ) assert new_observation.shape == experience.observation.shape experience = experience.replace(observation=new_observation) if self._relabel_type is not None: # Recompute rewards and done flags states, tasks = self._task_distribution.split(experience.observation[:, 0]) next_states, next_tasks = self._task_distribution.split( experience.observation[:, 1]) rewards, dones = self._task_distribution.evaluate(states, experience.action[:, 0], tasks) # Strictly speaking, we don't need to relabel the next rewards and next # dones because they end up being thrown away. Only the current rewards # and dones end up being important. next_rewards, next_dones = self._task_distribution.evaluate( next_states, experience.action[:, 1], next_tasks) new_rewards = tf.concat([rewards[:, None], next_rewards[:, None]], axis=1) new_dones = tf.concat([dones[:, None], next_dones[:, None]], axis=1) # 0 if episode is done, 1 if episode is continuing new_discount = 1.0 - tf.cast(new_dones, tf.float32) assert new_rewards.shape == experience.reward.shape assert new_discount.shape == experience.discount.shape experience = experience.replace(reward=new_rewards, discount=new_discount) return experience def _soft_relabel(self, experience): """Reassigns tasks to each state and next state. Does not recompute the rewards or done flags. Args: experience: The experience that we want to relabel with inverse RL. Returns: relabeled_experience: The relabeled experience. """ raise NotImplementedError def _add_batch(self, items): """Adds a trajectory to the replay buffer.""" assert items[0].is_first() for item in items: # The items are batched already, so we remove the first dimension. assert item.observation.shape[1:] == self.data_spec.observation.shape super(RelabellingReplayBuffer, self)._add_batch(item) class GoalRelabellingReplayBuffer(RelabellingReplayBuffer): """Implements a replay buffer for relabeling goals.""" def _add_batch(self, items): """Adds a trajectory to the replay buffer.""" batch_size = len(items) if self._relabel_type in ["future", "last"]: relabelled_items = [] for i in range(batch_size): if self._relabel_type == "future": relabel_indices = np.random.randint( i, batch_size, size=self._num_future_states) else: relabel_indices = [batch_size - 1] if self._keep_current_goal: relabelled_items.append(items[i]) for j in relabel_indices: state, _ = self._task_distribution.split(items[i].observation) next_state, _ = self._task_distribution.split(items[j].observation) task = self._task_distribution.state_to_task(next_state) state_and_task = self._task_distribution.combine(state, task) new_item = items[i].replace(observation=state_and_task) relabelled_items.append(new_item) items = relabelled_items super(GoalRelabellingReplayBuffer, self)._add_batch(items) @tf.function def _soft_relabel(self, experience): # experience.observation.shape = [B x T=2 x obs_dim+state_dim] states, orig_tasks = self._task_distribution.split( experience.observation[:, 0]) if self._task_distribution.tasks is None: tasks = orig_tasks else: tasks = tf.constant(self._task_distribution.tasks, dtype=tf.float32) next_states, _ = self._task_distribution.split(experience.observation[:, 1]) if self._candidate_task_type == "states": candidate_tasks = self._task_distribution.state_to_task(states) elif self._candidate_task_type == "next_states": candidate_tasks = self._task_distribution.state_to_task(next_states) else: assert self._candidate_task_type == "tasks" candidate_tasks = tasks actions = experience.action[:, 0] num_tasks = tasks.shape[0] batch_size = states.shape[0] task_dim = tasks.shape[1] obs_dim = states.shape[1] action_dim = actions.shape[1] action_spec = self._actor.output_tensor_spec states_tiled = tf.tile(states[:, None], [1, num_tasks, 1]) # B x B x D states_tiled = tf.reshape(states_tiled, [batch_size * num_tasks, obs_dim]) # B*B x D actions_tiled = tf.tile(actions[:, None], [1, num_tasks, 1]) # B x B x D actions_tiled = tf.reshape(actions_tiled, [batch_size * num_tasks, action_dim]) # B*B x D tasks_tiled = tf.tile(tasks[None], [batch_size, 1, 1]) # B x B x D tasks_tiled = tf.reshape(tasks_tiled, [batch_size * num_tasks, task_dim]) # B*B x D next_states_tiled = tf.tile(next_states[:, None], [1, num_tasks, 1]) next_states_tiled = tf.reshape(next_states_tiled, [batch_size * num_tasks, obs_dim]) # B*B x D next_relabelled_obs = self._task_distribution.combine( next_states_tiled, tasks_tiled) sampled_actions_tiled = self._actor( next_relabelled_obs, step_type=(), network_state=())[0].sample() critic_input = (next_relabelled_obs, sampled_actions_tiled) q_vals, _ = self._critic(critic_input, training=False) q_vals_vec = tf.reshape(q_vals, (batch_size, num_tasks)) rewards, dones = self._task_distribution.evaluate(states_tiled, actions_tiled, tasks_tiled) dones = tf.cast(dones, tf.float32) rewards_vec = tf.reshape(rewards, (batch_size, num_tasks)) dones_vec = tf.reshape(dones, (batch_size, num_tasks)) relabelled_obs = self._task_distribution.combine(states_tiled, tasks_tiled) action_distribution = self._actor( relabelled_obs, step_type=(), network_state=())[0] log_pi = common.log_probability(action_distribution, actions_tiled, action_spec) log_pi_vec = tf.reshape(log_pi, (batch_size, num_tasks)) logits_vec = ( rewards_vec - log_pi_vec + self._gamma * (1.0 - dones_vec) * q_vals_vec) if self._relabel_type == "random": logits_vec = tf.ones_like(logits_vec) # Hack to make sampling random ## End new version if self._normalize_cols: logits_vec = logits_vec - tf.math.reduce_logsumexp( logits_vec, axis=0)[None] relabel_indices = tf.random.categorical(logits=logits_vec, num_samples=1) ### Metrics global_step = tf.compat.v1.train.get_or_create_global_step() orig_indices = tf.range( self._sample_batch_size, dtype=relabel_indices.dtype) with tf.name_scope("relabelling"): # How often are the originally commanded goals most optimal? opt_indices = tf.argmax(logits_vec, axis=1) orig_is_opt = opt_indices == orig_indices orig_opt_frac = tf.reduce_mean(tf.cast(orig_is_opt, tf.float32)) tf.compat.v2.summary.scalar( name="orig_task_optimal", data=orig_opt_frac, step=global_step) # How often is the relabelled goal optimal? # The relabel_indices are [B, 1], so we need to remove the extra dim. relabel_is_opt = tf.squeeze(relabel_indices) == orig_indices relabel_opt_frac = tf.reduce_mean(tf.cast(relabel_is_opt, tf.float32)) tf.compat.v2.summary.scalar( name="relabel_task_optimal", data=relabel_opt_frac, step=global_step) # What are the average Q values of the original tasks? if batch_size == num_tasks: indices = tf.transpose(tf.stack([orig_indices, orig_indices], axis=0)) orig_q_vals = tf.gather_nd(logits_vec, indices) tf.compat.v2.summary.scalar( name="orig_q_vals", data=tf.reduce_mean(orig_q_vals), step=global_step, ) # What are the average Q values of the relabelled tasks? indices = tf.transpose( tf.stack([orig_indices, tf.squeeze(relabel_indices)], axis=0)) relabel_q_vals = tf.gather_nd(logits_vec, indices) tf.compat.v2.summary.scalar( name="relabel_q_vals", data=tf.reduce_mean(relabel_q_vals), step=global_step, ) max_q = tf.reduce_max(logits_vec, axis=1) tf.compat.v2.summary.scalar( name="max_q", data=tf.reduce_mean(max_q), step=global_step) ### End metrics # For both state-centric and goal-centric relabelling, the implementation of # mixing is the same: we randomly replace some of the indices with the # diagonal. relabelled_tasks = tf.gather(candidate_tasks, tf.squeeze(relabel_indices)) if self._relabel_prob == 0: relabelled_tasks = orig_tasks elif 0 < self._relabel_prob < 1: logits = tf.log([1.0 - self._relabel_prob, self._relabel_prob]) mask = tf.squeeze( tf.random.categorical( logits[None], num_samples=self._sample_batch_size)) mask = tf.cast(mask, tf.float32)[:, None] relabelled_tasks = mask * orig_tasks + (1 - mask) * relabelled_tasks states_and_tasks = self._task_distribution.combine(states, relabelled_tasks) next_states_and_tasks = self._task_distribution.combine( next_states, relabelled_tasks) new_observation = tf.concat( [states_and_tasks[:, None], next_states_and_tasks[:, None]], axis=1) assert new_observation.shape == experience.observation.shape experience = experience.replace(observation=new_observation) return experience
{ "content_hash": "94d7166c28ba5335584f26ee557b5e2f", "timestamp": "", "source": "github", "line_count": 298, "max_line_length": 80, "avg_line_length": 44.84228187919463, "alnum_prop": 0.631519868293048, "repo_name": "google-research/google-research", "id": "54716e6008a390eb643f7a60244110c0f0e4131b", "size": "13971", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "hipi/relabelling_replay_buffer.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "9817" }, { "name": "C++", "bytes": "4166670" }, { "name": "CMake", "bytes": "6412" }, { "name": "CSS", "bytes": "27092" }, { "name": "Cuda", "bytes": "1431" }, { "name": "Dockerfile", "bytes": "7145" }, { "name": "Gnuplot", "bytes": "11125" }, { "name": "HTML", "bytes": "77599" }, { "name": "ImageJ Macro", "bytes": "50488" }, { "name": "Java", "bytes": "487585" }, { "name": "JavaScript", "bytes": "896512" }, { "name": "Julia", "bytes": "67986" }, { "name": "Jupyter Notebook", "bytes": "71290299" }, { "name": "Lua", "bytes": "29905" }, { "name": "MATLAB", "bytes": "103813" }, { "name": "Makefile", "bytes": "5636" }, { "name": "NASL", "bytes": "63883" }, { "name": "Perl", "bytes": "8590" }, { "name": "Python", "bytes": "53790200" }, { "name": "R", "bytes": "101058" }, { "name": "Roff", "bytes": "1208" }, { "name": "Rust", "bytes": "2389" }, { "name": "Shell", "bytes": "730444" }, { "name": "Smarty", "bytes": "5966" }, { "name": "Starlark", "bytes": "245038" } ], "symlink_target": "" }
SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
{ "content_hash": "c4f9dbe04e1d253de93d29ab8f4c464b", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 10.23076923076923, "alnum_prop": 0.6917293233082706, "repo_name": "mdoering/backbone", "id": "45c2bb5fae022c45b81b5395c509a81c7bc0d5dc", "size": "188", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Liliopsida/Asparagales/Orchidaceae/Lepanthopsis/Lepanthopsis peniculus/ Syn. Pleurothallis peniculus/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
- Now defaults to throwing an Error when a required module is not found, but this behavior can be changed by setting `options.allowUnresolvedModules` - `options.excludeNodeModules` can now be set to `true` or to an Array containing modules to be excluded from the project - `stats.unresolvedModules` contains information about modules that could not be found. Only relevant when `options.allowUnresolvedModules` is `true`. # 2.0.0 Almost a complete re-write of the original project. - Supports streaming CommonJS modules to a Readable stream - Supports bundling for the browser, Node.js, and others - Added Promise support - Dropped support for Node < 4 - Changed `options` a bit (i.e. `excludeNodeModules` instead of `includeNodeModules`) - Added `options.extensions` and `options.compilers` (see README) - Changed format of `cb`: now `cb(err, stats)` where `stats` exposes files included / excluded in the project build - Now depends on NPM `resolve` package - Fixed a few bugs - Updated docs and added a bit more complexity to the test_project # 1.x It's old now, and I'm too lazy to fill in the changelog. :)
{ "content_hash": "9f173b9b1374328a0e62b77673979157", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 77, "avg_line_length": 41.629629629629626, "alnum_prop": 0.7633451957295374, "repo_name": "bminer/node-module-concat", "id": "435e4385a14033f77985994ba283900f6eecd1f3", "size": "1133", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "CHANGELOG.md", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "16544" } ], "symlink_target": "" }
class GCHandleStore : public IGCHandleStore { public: virtual void Uproot(); virtual bool ContainsHandle(OBJECTHANDLE handle); virtual OBJECTHANDLE CreateHandleOfType(Object* object, HandleType type); virtual OBJECTHANDLE CreateHandleOfType(Object* object, HandleType type, int heapToAffinitizeTo); virtual OBJECTHANDLE CreateHandleWithExtraInfo(Object* object, HandleType type, void* pExtraInfo); virtual OBJECTHANDLE CreateDependentHandle(Object* primary, Object* secondary); virtual ~GCHandleStore(); HandleTableBucket _underlyingBucket; }; extern GCHandleStore* g_gcGlobalHandleStore; class GCHandleManager : public IGCHandleManager { public: virtual bool Initialize(); virtual void Shutdown(); virtual IGCHandleStore* GetGlobalHandleStore(); virtual IGCHandleStore* CreateHandleStore(); virtual void DestroyHandleStore(IGCHandleStore* store); virtual OBJECTHANDLE CreateGlobalHandleOfType(Object* object, HandleType type); virtual OBJECTHANDLE CreateDuplicateHandle(OBJECTHANDLE handle); virtual void DestroyHandleOfType(OBJECTHANDLE handle, HandleType type); virtual void DestroyHandleOfUnknownType(OBJECTHANDLE handle); virtual void SetExtraInfoForHandle(OBJECTHANDLE handle, HandleType type, void* pExtraInfo); virtual void* GetExtraInfoFromHandle(OBJECTHANDLE handle); virtual void StoreObjectInHandle(OBJECTHANDLE handle, Object* object); virtual bool StoreObjectInHandleIfNull(OBJECTHANDLE handle, Object* object); virtual void SetDependentHandleSecondary(OBJECTHANDLE handle, Object* object); virtual Object* GetDependentHandleSecondary(OBJECTHANDLE handle); virtual Object* InterlockedCompareExchangeObjectInHandle(OBJECTHANDLE handle, Object* object, Object* comparandObject); virtual HandleType HandleFetchType(OBJECTHANDLE handle); virtual void TraceRefCountedHandles(HANDLESCANPROC callback, uintptr_t param1, uintptr_t param2); }; #endif // GCHANDLETABLE_H_
{ "content_hash": "ffeed2001eb9ca3b823787e806d4cf70", "timestamp": "", "source": "github", "line_count": 63, "max_line_length": 123, "avg_line_length": 31.873015873015873, "alnum_prop": 0.7923306772908366, "repo_name": "poizan42/coreclr", "id": "48eb2ab17dfa05cfc646246a5a4fa481daf40861", "size": "2315", "binary": false, "copies": "48", "ref": "refs/heads/master", "path": "src/gc/gchandletableimpl.h", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "939689" }, { "name": "Awk", "bytes": "5861" }, { "name": "Batchfile", "bytes": "149949" }, { "name": "C", "bytes": "3034023" }, { "name": "C#", "bytes": "134177151" }, { "name": "C++", "bytes": "68856945" }, { "name": "CMake", "bytes": "641497" }, { "name": "Groovy", "bytes": "202934" }, { "name": "Makefile", "bytes": "2736" }, { "name": "Objective-C", "bytes": "471678" }, { "name": "PAWN", "bytes": "903" }, { "name": "Perl", "bytes": "23640" }, { "name": "PowerShell", "bytes": "9319" }, { "name": "Python", "bytes": "242544" }, { "name": "Roff", "bytes": "529523" }, { "name": "Shell", "bytes": "223037" }, { "name": "Smalltalk", "bytes": "1162648" }, { "name": "SuperCollider", "bytes": "4752" }, { "name": "XSLT", "bytes": "1016" }, { "name": "Yacc", "bytes": "157348" } ], "symlink_target": "" }
--- layout: post category: [algorithm] title: strStr tags: [string, for loop] --- For a given source string and a target string, you should output the first index(from 0) of target string in source string. If target does not exist in source, just return -1. O(n2) is acceptable. Can you implement an O(n) algorithm? when you meet this problem in a real interview, the interviewer may just want to test your basic implementation ability. But make sure your confirm with the interviewer first. <!--more--> class Solution { public: /** * Returns a index to the first occurrence of target in source, * or -1 if target is not part of source. * @param source string to be scanned. * @param target string containing the sequence of characters to match. */ int strStr(const char *source, const char *target) { if (source == NULL||target == NULL) { return -1; } int len_source = strlen(source); int len_target = strlen(target); for (int i = 0; i < len_source-len_target+1; i++) { int j = 0; for (j = 0; j < len_target; j++) { if (target[j] != source[i+j]) break; } if (j == len_target) return i; } return -1; } }; ---
{ "content_hash": "66d6bde35cc6b0519d300d9d87fec0bd", "timestamp": "", "source": "github", "line_count": 43, "max_line_length": 175, "avg_line_length": 30.186046511627907, "alnum_prop": 0.5993836671802774, "repo_name": "jianw851/jianw851.github.io", "id": "8cee94a6e8c982a9316ab37f7fd8446677da35b6", "size": "1298", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "_posts/2015-12-11-strStr.md", "mode": "33261", "license": "mit", "language": [ { "name": "CSS", "bytes": "85506" }, { "name": "HTML", "bytes": "769244" }, { "name": "JavaScript", "bytes": "7472" }, { "name": "Ruby", "bytes": "11807" } ], "symlink_target": "" }
using System; using Mono.Unix; using Gtk; using Hyena; using Hyena.Data; using Hyena.Widgets; using Banshee.Gui; using Banshee.Widgets; using Banshee.Gui.Dialogs; using Banshee.Playlist; using Banshee.Collection; using Banshee.Collection.Gui; using Banshee.ServiceStack; using Banshee.Configuration; using Banshee.Collection.Database; using Banshee.PlaybackController; using Banshee.MediaEngine; namespace Muinshee { internal class MuinsheeSearchEntry : SearchEntry { protected override bool OnKeyPressEvent (Gdk.EventKey evnt) { // The default behavior is to have Esc clear the search entry // Close the dialog if there is already nothing in the search entry if (evnt.Key == Gdk.Key.Escape && String.IsNullOrEmpty (Query)) { return false; } return base.OnKeyPressEvent (evnt); } } public abstract class BaseDialog : BansheeDialog { private SearchEntry search_entry; private PlaylistSource queue; private PersistentWindowController window_controller; public BaseDialog (PlaylistSource queue, string title, string addType) : base (title) { this.queue = queue; VBox.Spacing = 6; HBox filter_box = new HBox (); filter_box.Spacing = 6; Label search_label = new Label ("_Search:"); filter_box.PackStart (search_label, false, false, 0); search_entry = new MuinsheeSearchEntry (); search_entry.Show (); search_entry.Changed += OnFilterChanged; search_entry.Ready = true; OnFilterChanged (null, null); filter_box.PackStart (search_entry, true, true, 0); VBox.PackStart (filter_box, false, false, 0); Hyena.Widgets.ScrolledWindow sw = new Hyena.Widgets.ScrolledWindow (); sw.Add (GetItemWidget ()); VBox.PackStart (sw, true, true, 0); AddDefaultCloseButton (); Button queue_button = new ImageButton (Catalog.GetString ("En_queue"), "gtk-add"); AddActionWidget (queue_button, Gtk.ResponseType.Apply); Button play_button = new ImageButton (Catalog.GetString ("_Play"), "media-playback-start"); AddButton (play_button, Gtk.ResponseType.Ok, true); window_controller = new PersistentWindowController (this, String.Format ("muinshee.{0}", addType), 500, 475, WindowPersistOptions.Size); window_controller.Restore (); ShowAll (); Response += OnResponse; } private void OnResponse (object o, ResponseArgs args) { ResponseType response = args.ResponseId; if (response == ResponseType.Apply) { Queue (); return; } if (response == ResponseType.Ok) { Play (); } Destroy (); } private void OnFilterChanged (object o, EventArgs args) { Music.FilterQuery = search_entry.Query; } protected void Play () { TrackInfo to_play = FirstTrack; Hyena.Log.InformationFormat ("first to play is {0}", to_play); Queue (); if (to_play != null) { int i = QueueSource.DatabaseTrackModel.IndexOfFirst (to_play); Hyena.Log.InformationFormat ("but in queue is index {0}", i); if (i != -1) { ServiceManager.PlayerEngine.OpenPlay (QueueSource.TrackModel[i]); } } } protected abstract void Queue (); protected abstract Widget GetItemWidget (); protected abstract TrackInfo FirstTrack { get; } protected PlaylistSource QueueSource { get { return queue; } } protected static Banshee.Library.MusicLibrarySource Music { get { return ServiceManager.SourceManager.MusicLibrary; } } public override void Destroy () { OnFilterChanged (null, null); base.Destroy (); } } }
{ "content_hash": "af3ff5b4b61fca9a331b738113d98dcb", "timestamp": "", "source": "github", "line_count": 129, "max_line_length": 148, "avg_line_length": 32.17054263565891, "alnum_prop": 0.5908433734939759, "repo_name": "Dynalon/banshee-osx", "id": "261ae86a350d64b1f9be7742e7f42cb4a1b8368d", "size": "5379", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Clients/Muinshee/Muinshee/BaseDialog.cs", "mode": "33188", "license": "mit", "language": [ { "name": "Boo", "bytes": "3305" }, { "name": "C", "bytes": "228979" }, { "name": "C#", "bytes": "5860829" }, { "name": "C++", "bytes": "13690" }, { "name": "JavaScript", "bytes": "7633" }, { "name": "Perl", "bytes": "6338" }, { "name": "Python", "bytes": "8488" }, { "name": "R", "bytes": "3362" }, { "name": "Shell", "bytes": "33196" } ], "symlink_target": "" }
#define AVMB1_COMPAT #include "kcapi.h" #include <linux/module.h> #include <linux/mm.h> #include <linux/interrupt.h> #include <linux/ioport.h> #include <linux/proc_fs.h> #include <linux/sched.h> #include <linux/seq_file.h> #include <linux/skbuff.h> #include <linux/workqueue.h> #include <linux/capi.h> #include <linux/kernelcapi.h> #include <linux/init.h> #include <linux/moduleparam.h> #include <linux/delay.h> #include <linux/slab.h> #include <asm/uaccess.h> #include <linux/isdn/capicmd.h> #include <linux/isdn/capiutil.h> #ifdef AVMB1_COMPAT #include <linux/b1lli.h> #endif #include <linux/mutex.h> #include <linux/rcupdate.h> static int showcapimsgs = 0; static struct workqueue_struct *kcapi_wq; MODULE_DESCRIPTION("CAPI4Linux: kernel CAPI layer"); MODULE_AUTHOR("Carsten Paeth"); MODULE_LICENSE("GPL"); module_param(showcapimsgs, uint, 0); /* ------------------------------------------------------------- */ struct capictr_event { struct work_struct work; unsigned int type; u32 controller; }; /* ------------------------------------------------------------- */ static struct capi_version driver_version = {2, 0, 1, 1 << 4}; static char driver_serial[CAPI_SERIAL_LEN] = "0004711"; static char capi_manufakturer[64] = "AVM Berlin"; #define NCCI2CTRL(ncci) (((ncci) >> 24) & 0x7f) LIST_HEAD(capi_drivers); DEFINE_MUTEX(capi_drivers_lock); struct capi_ctr *capi_controller[CAPI_MAXCONTR]; DEFINE_MUTEX(capi_controller_lock); struct capi20_appl *capi_applications[CAPI_MAXAPPL]; static int ncontrollers; static BLOCKING_NOTIFIER_HEAD(ctr_notifier_list); /* -------- controller ref counting -------------------------------------- */ static inline struct capi_ctr * capi_ctr_get(struct capi_ctr *ctr) { if (!try_module_get(ctr->owner)) return NULL; return ctr; } static inline void capi_ctr_put(struct capi_ctr *ctr) { module_put(ctr->owner); } /* ------------------------------------------------------------- */ static inline struct capi_ctr *get_capi_ctr_by_nr(u16 contr) { if (contr < 1 || contr - 1 >= CAPI_MAXCONTR) return NULL; return capi_controller[contr - 1]; } static inline struct capi20_appl *__get_capi_appl_by_nr(u16 applid) { lockdep_assert_held(&capi_controller_lock); if (applid < 1 || applid - 1 >= CAPI_MAXAPPL) return NULL; return capi_applications[applid - 1]; } static inline struct capi20_appl *get_capi_appl_by_nr(u16 applid) { if (applid < 1 || applid - 1 >= CAPI_MAXAPPL) return NULL; return rcu_dereference(capi_applications[applid - 1]); } /* -------- util functions ------------------------------------ */ static inline int capi_cmd_valid(u8 cmd) { switch (cmd) { case CAPI_ALERT: case CAPI_CONNECT: case CAPI_CONNECT_ACTIVE: case CAPI_CONNECT_B3_ACTIVE: case CAPI_CONNECT_B3: case CAPI_CONNECT_B3_T90_ACTIVE: case CAPI_DATA_B3: case CAPI_DISCONNECT_B3: case CAPI_DISCONNECT: case CAPI_FACILITY: case CAPI_INFO: case CAPI_LISTEN: case CAPI_MANUFACTURER: case CAPI_RESET_B3: case CAPI_SELECT_B_PROTOCOL: return 1; } return 0; } static inline int capi_subcmd_valid(u8 subcmd) { switch (subcmd) { case CAPI_REQ: case CAPI_CONF: case CAPI_IND: case CAPI_RESP: return 1; } return 0; } /* ------------------------------------------------------------ */ static void register_appl(struct capi_ctr *ctr, u16 applid, capi_register_params *rparam) { ctr = capi_ctr_get(ctr); if (ctr) ctr->register_appl(ctr, applid, rparam); else printk(KERN_WARNING "%s: cannot get controller resources\n", __func__); } static void release_appl(struct capi_ctr *ctr, u16 applid) { DBG("applid %#x", applid); ctr->release_appl(ctr, applid); capi_ctr_put(ctr); } static void notify_up(u32 contr) { struct capi20_appl *ap; struct capi_ctr *ctr; u16 applid; mutex_lock(&capi_controller_lock); if (showcapimsgs & 1) printk(KERN_DEBUG "kcapi: notify up contr %d\n", contr); ctr = get_capi_ctr_by_nr(contr); if (ctr) { if (ctr->state == CAPI_CTR_RUNNING) goto unlock_out; ctr->state = CAPI_CTR_RUNNING; for (applid = 1; applid <= CAPI_MAXAPPL; applid++) { ap = __get_capi_appl_by_nr(applid); if (ap) register_appl(ctr, applid, &ap->rparam); } wake_up_interruptible_all(&ctr->state_wait_queue); } else printk(KERN_WARNING "%s: invalid contr %d\n", __func__, contr); unlock_out: mutex_unlock(&capi_controller_lock); } static void ctr_down(struct capi_ctr *ctr, int new_state) { struct capi20_appl *ap; u16 applid; if (ctr->state == CAPI_CTR_DETECTED || ctr->state == CAPI_CTR_DETACHED) return; ctr->state = new_state; memset(ctr->manu, 0, sizeof(ctr->manu)); memset(&ctr->version, 0, sizeof(ctr->version)); memset(&ctr->profile, 0, sizeof(ctr->profile)); memset(ctr->serial, 0, sizeof(ctr->serial)); for (applid = 1; applid <= CAPI_MAXAPPL; applid++) { ap = __get_capi_appl_by_nr(applid); if (ap) capi_ctr_put(ctr); } wake_up_interruptible_all(&ctr->state_wait_queue); } static void notify_down(u32 contr) { struct capi_ctr *ctr; mutex_lock(&capi_controller_lock); if (showcapimsgs & 1) printk(KERN_DEBUG "kcapi: notify down contr %d\n", contr); ctr = get_capi_ctr_by_nr(contr); if (ctr) ctr_down(ctr, CAPI_CTR_DETECTED); else printk(KERN_WARNING "%s: invalid contr %d\n", __func__, contr); mutex_unlock(&capi_controller_lock); } static int notify_handler(struct notifier_block *nb, unsigned long val, void *v) { u32 contr = (long)v; switch (val) { case CAPICTR_UP: notify_up(contr); break; case CAPICTR_DOWN: notify_down(contr); break; } return NOTIFY_OK; } static void do_notify_work(struct work_struct *work) { struct capictr_event *event = container_of(work, struct capictr_event, work); blocking_notifier_call_chain(&ctr_notifier_list, event->type, (void *)(long)event->controller); kfree(event); } /* * The notifier will result in adding/deleteing of devices. Devices can * only removed in user process, not in bh. */ static int notify_push(unsigned int event_type, u32 controller) { struct capictr_event *event = kmalloc(sizeof(*event), GFP_ATOMIC); if (!event) return -ENOMEM; INIT_WORK(&event->work, do_notify_work); event->type = event_type; event->controller = controller; queue_work(kcapi_wq, &event->work); return 0; } int register_capictr_notifier(struct notifier_block *nb) { return blocking_notifier_chain_register(&ctr_notifier_list, nb); } EXPORT_SYMBOL_GPL(register_capictr_notifier); int unregister_capictr_notifier(struct notifier_block *nb) { return blocking_notifier_chain_unregister(&ctr_notifier_list, nb); } EXPORT_SYMBOL_GPL(unregister_capictr_notifier); /* -------- Receiver ------------------------------------------ */ static void recv_handler(struct work_struct *work) { struct sk_buff *skb; struct capi20_appl *ap = container_of(work, struct capi20_appl, recv_work); if ((!ap) || (ap->release_in_progress)) return; mutex_lock(&ap->recv_mtx); while ((skb = skb_dequeue(&ap->recv_queue))) { if (CAPIMSG_CMD(skb->data) == CAPI_DATA_B3_IND) ap->nrecvdatapkt++; else ap->nrecvctlpkt++; ap->recv_message(ap, skb); } mutex_unlock(&ap->recv_mtx); } /** * capi_ctr_handle_message() - handle incoming CAPI message * @ctr: controller descriptor structure. * @appl: application ID. * @skb: message. * * Called by hardware driver to pass a CAPI message to the application. */ void capi_ctr_handle_message(struct capi_ctr *ctr, u16 appl, struct sk_buff *skb) { struct capi20_appl *ap; int showctl = 0; u8 cmd, subcmd; _cdebbuf *cdb; if (ctr->state != CAPI_CTR_RUNNING) { cdb = capi_message2str(skb->data); if (cdb) { printk(KERN_INFO "kcapi: controller [%03d] not active, got: %s", ctr->cnr, cdb->buf); cdebbuf_free(cdb); } else printk(KERN_INFO "kcapi: controller [%03d] not active, cannot trace\n", ctr->cnr); goto error; } cmd = CAPIMSG_COMMAND(skb->data); subcmd = CAPIMSG_SUBCOMMAND(skb->data); if (cmd == CAPI_DATA_B3 && subcmd == CAPI_IND) { ctr->nrecvdatapkt++; if (ctr->traceflag > 2) showctl |= 2; } else { ctr->nrecvctlpkt++; if (ctr->traceflag) showctl |= 2; } showctl |= (ctr->traceflag & 1); if (showctl & 2) { if (showctl & 1) { printk(KERN_DEBUG "kcapi: got [%03d] id#%d %s len=%u\n", ctr->cnr, CAPIMSG_APPID(skb->data), capi_cmd2str(cmd, subcmd), CAPIMSG_LEN(skb->data)); } else { cdb = capi_message2str(skb->data); if (cdb) { printk(KERN_DEBUG "kcapi: got [%03d] %s\n", ctr->cnr, cdb->buf); cdebbuf_free(cdb); } else printk(KERN_DEBUG "kcapi: got [%03d] id#%d %s len=%u, cannot trace\n", ctr->cnr, CAPIMSG_APPID(skb->data), capi_cmd2str(cmd, subcmd), CAPIMSG_LEN(skb->data)); } } rcu_read_lock(); ap = get_capi_appl_by_nr(CAPIMSG_APPID(skb->data)); if (!ap) { rcu_read_unlock(); cdb = capi_message2str(skb->data); if (cdb) { printk(KERN_ERR "kcapi: handle_message: applid %d state released (%s)\n", CAPIMSG_APPID(skb->data), cdb->buf); cdebbuf_free(cdb); } else printk(KERN_ERR "kcapi: handle_message: applid %d state released (%s) cannot trace\n", CAPIMSG_APPID(skb->data), capi_cmd2str(cmd, subcmd)); goto error; } skb_queue_tail(&ap->recv_queue, skb); queue_work(kcapi_wq, &ap->recv_work); rcu_read_unlock(); return; error: kfree_skb(skb); } EXPORT_SYMBOL(capi_ctr_handle_message); /** * capi_ctr_ready() - signal CAPI controller ready * @ctr: controller descriptor structure. * * Called by hardware driver to signal that the controller is up and running. */ void capi_ctr_ready(struct capi_ctr *ctr) { printk(KERN_NOTICE "kcapi: controller [%03d] \"%s\" ready.\n", ctr->cnr, ctr->name); notify_push(CAPICTR_UP, ctr->cnr); } EXPORT_SYMBOL(capi_ctr_ready); /** * capi_ctr_down() - signal CAPI controller not ready * @ctr: controller descriptor structure. * * Called by hardware driver to signal that the controller is down and * unavailable for use. */ void capi_ctr_down(struct capi_ctr *ctr) { printk(KERN_NOTICE "kcapi: controller [%03d] down.\n", ctr->cnr); notify_push(CAPICTR_DOWN, ctr->cnr); } EXPORT_SYMBOL(capi_ctr_down); /** * capi_ctr_suspend_output() - suspend controller * @ctr: controller descriptor structure. * * Called by hardware driver to stop data flow. * * Note: The caller is responsible for synchronizing concurrent state changes * as well as invocations of capi_ctr_handle_message. */ void capi_ctr_suspend_output(struct capi_ctr *ctr) { if (!ctr->blocked) { printk(KERN_DEBUG "kcapi: controller [%03d] suspend\n", ctr->cnr); ctr->blocked = 1; } } EXPORT_SYMBOL(capi_ctr_suspend_output); /** * capi_ctr_resume_output() - resume controller * @ctr: controller descriptor structure. * * Called by hardware driver to resume data flow. * * Note: The caller is responsible for synchronizing concurrent state changes * as well as invocations of capi_ctr_handle_message. */ void capi_ctr_resume_output(struct capi_ctr *ctr) { if (ctr->blocked) { printk(KERN_DEBUG "kcapi: controller [%03d] resumed\n", ctr->cnr); ctr->blocked = 0; } } EXPORT_SYMBOL(capi_ctr_resume_output); /* ------------------------------------------------------------- */ /** * attach_capi_ctr() - register CAPI controller * @ctr: controller descriptor structure. * * Called by hardware driver to register a controller with the CAPI subsystem. * Return value: 0 on success, error code < 0 on error */ int attach_capi_ctr(struct capi_ctr *ctr) { int i; mutex_lock(&capi_controller_lock); for (i = 0; i < CAPI_MAXCONTR; i++) { if (!capi_controller[i]) break; } if (i == CAPI_MAXCONTR) { mutex_unlock(&capi_controller_lock); printk(KERN_ERR "kcapi: out of controller slots\n"); return -EBUSY; } capi_controller[i] = ctr; ctr->nrecvctlpkt = 0; ctr->nrecvdatapkt = 0; ctr->nsentctlpkt = 0; ctr->nsentdatapkt = 0; ctr->cnr = i + 1; ctr->state = CAPI_CTR_DETECTED; ctr->blocked = 0; ctr->traceflag = showcapimsgs; init_waitqueue_head(&ctr->state_wait_queue); sprintf(ctr->procfn, "capi/controllers/%d", ctr->cnr); ctr->procent = proc_create_data(ctr->procfn, 0, NULL, ctr->proc_fops, ctr); ncontrollers++; mutex_unlock(&capi_controller_lock); printk(KERN_NOTICE "kcapi: controller [%03d]: %s attached\n", ctr->cnr, ctr->name); return 0; } EXPORT_SYMBOL(attach_capi_ctr); /** * detach_capi_ctr() - unregister CAPI controller * @ctr: controller descriptor structure. * * Called by hardware driver to remove the registration of a controller * with the CAPI subsystem. * Return value: 0 on success, error code < 0 on error */ int detach_capi_ctr(struct capi_ctr *ctr) { int err = 0; mutex_lock(&capi_controller_lock); ctr_down(ctr, CAPI_CTR_DETACHED); if (capi_controller[ctr->cnr - 1] != ctr) { err = -EINVAL; goto unlock_out; } capi_controller[ctr->cnr - 1] = NULL; ncontrollers--; if (ctr->procent) remove_proc_entry(ctr->procfn, NULL); printk(KERN_NOTICE "kcapi: controller [%03d]: %s unregistered\n", ctr->cnr, ctr->name); unlock_out: mutex_unlock(&capi_controller_lock); return err; } EXPORT_SYMBOL(detach_capi_ctr); /** * register_capi_driver() - register CAPI driver * @driver: driver descriptor structure. * * Called by hardware driver to register itself with the CAPI subsystem. */ void register_capi_driver(struct capi_driver *driver) { mutex_lock(&capi_drivers_lock); list_add_tail(&driver->list, &capi_drivers); mutex_unlock(&capi_drivers_lock); } EXPORT_SYMBOL(register_capi_driver); /** * unregister_capi_driver() - unregister CAPI driver * @driver: driver descriptor structure. * * Called by hardware driver to unregister itself from the CAPI subsystem. */ void unregister_capi_driver(struct capi_driver *driver) { mutex_lock(&capi_drivers_lock); list_del(&driver->list); mutex_unlock(&capi_drivers_lock); } EXPORT_SYMBOL(unregister_capi_driver); /* ------------------------------------------------------------- */ /* -------- CAPI2.0 Interface ---------------------------------- */ /* ------------------------------------------------------------- */ /** * capi20_isinstalled() - CAPI 2.0 operation CAPI_INSTALLED * * Return value: CAPI result code (CAPI_NOERROR if at least one ISDN controller * is ready for use, CAPI_REGNOTINSTALLED otherwise) */ u16 capi20_isinstalled(void) { u16 ret = CAPI_REGNOTINSTALLED; int i; mutex_lock(&capi_controller_lock); for (i = 0; i < CAPI_MAXCONTR; i++) if (capi_controller[i] && capi_controller[i]->state == CAPI_CTR_RUNNING) { ret = CAPI_NOERROR; break; } mutex_unlock(&capi_controller_lock); return ret; } EXPORT_SYMBOL(capi20_isinstalled); /** * capi20_register() - CAPI 2.0 operation CAPI_REGISTER * @ap: CAPI application descriptor structure. * * Register an application's presence with CAPI. * A unique application ID is assigned and stored in @ap->applid. * After this function returns successfully, the message receive * callback function @ap->recv_message() may be called at any time * until capi20_release() has been called for the same @ap. * Return value: CAPI result code */ u16 capi20_register(struct capi20_appl *ap) { int i; u16 applid; DBG(""); if (ap->rparam.datablklen < 128) return CAPI_LOGBLKSIZETOSMALL; ap->nrecvctlpkt = 0; ap->nrecvdatapkt = 0; ap->nsentctlpkt = 0; ap->nsentdatapkt = 0; mutex_init(&ap->recv_mtx); skb_queue_head_init(&ap->recv_queue); INIT_WORK(&ap->recv_work, recv_handler); ap->release_in_progress = 0; mutex_lock(&capi_controller_lock); for (applid = 1; applid <= CAPI_MAXAPPL; applid++) { if (capi_applications[applid - 1] == NULL) break; } if (applid > CAPI_MAXAPPL) { mutex_unlock(&capi_controller_lock); return CAPI_TOOMANYAPPLS; } ap->applid = applid; capi_applications[applid - 1] = ap; for (i = 0; i < CAPI_MAXCONTR; i++) { if (!capi_controller[i] || capi_controller[i]->state != CAPI_CTR_RUNNING) continue; register_appl(capi_controller[i], applid, &ap->rparam); } mutex_unlock(&capi_controller_lock); if (showcapimsgs & 1) { printk(KERN_DEBUG "kcapi: appl %d up\n", applid); } return CAPI_NOERROR; } EXPORT_SYMBOL(capi20_register); /** * capi20_release() - CAPI 2.0 operation CAPI_RELEASE * @ap: CAPI application descriptor structure. * * Terminate an application's registration with CAPI. * After this function returns successfully, the message receive * callback function @ap->recv_message() will no longer be called. * Return value: CAPI result code */ u16 capi20_release(struct capi20_appl *ap) { int i; DBG("applid %#x", ap->applid); mutex_lock(&capi_controller_lock); ap->release_in_progress = 1; capi_applications[ap->applid - 1] = NULL; synchronize_rcu(); for (i = 0; i < CAPI_MAXCONTR; i++) { if (!capi_controller[i] || capi_controller[i]->state != CAPI_CTR_RUNNING) continue; release_appl(capi_controller[i], ap->applid); } mutex_unlock(&capi_controller_lock); flush_workqueue(kcapi_wq); skb_queue_purge(&ap->recv_queue); if (showcapimsgs & 1) { printk(KERN_DEBUG "kcapi: appl %d down\n", ap->applid); } return CAPI_NOERROR; } EXPORT_SYMBOL(capi20_release); /** * capi20_put_message() - CAPI 2.0 operation CAPI_PUT_MESSAGE * @ap: CAPI application descriptor structure. * @skb: CAPI message. * * Transfer a single message to CAPI. * Return value: CAPI result code */ u16 capi20_put_message(struct capi20_appl *ap, struct sk_buff *skb) { struct capi_ctr *ctr; int showctl = 0; u8 cmd, subcmd; DBG("applid %#x", ap->applid); if (ncontrollers == 0) return CAPI_REGNOTINSTALLED; if ((ap->applid == 0) || ap->release_in_progress) return CAPI_ILLAPPNR; if (skb->len < 12 || !capi_cmd_valid(CAPIMSG_COMMAND(skb->data)) || !capi_subcmd_valid(CAPIMSG_SUBCOMMAND(skb->data))) return CAPI_ILLCMDORSUBCMDORMSGTOSMALL; /* * The controller reference is protected by the existence of the * application passed to us. We assume that the caller properly * synchronizes this service with capi20_release. */ ctr = get_capi_ctr_by_nr(CAPIMSG_CONTROLLER(skb->data)); if (!ctr || ctr->state != CAPI_CTR_RUNNING) return CAPI_REGNOTINSTALLED; if (ctr->blocked) return CAPI_SENDQUEUEFULL; cmd = CAPIMSG_COMMAND(skb->data); subcmd = CAPIMSG_SUBCOMMAND(skb->data); if (cmd == CAPI_DATA_B3 && subcmd == CAPI_REQ) { ctr->nsentdatapkt++; ap->nsentdatapkt++; if (ctr->traceflag > 2) showctl |= 2; } else { ctr->nsentctlpkt++; ap->nsentctlpkt++; if (ctr->traceflag) showctl |= 2; } showctl |= (ctr->traceflag & 1); if (showctl & 2) { if (showctl & 1) { printk(KERN_DEBUG "kcapi: put [%03d] id#%d %s len=%u\n", CAPIMSG_CONTROLLER(skb->data), CAPIMSG_APPID(skb->data), capi_cmd2str(cmd, subcmd), CAPIMSG_LEN(skb->data)); } else { _cdebbuf *cdb = capi_message2str(skb->data); if (cdb) { printk(KERN_DEBUG "kcapi: put [%03d] %s\n", CAPIMSG_CONTROLLER(skb->data), cdb->buf); cdebbuf_free(cdb); } else printk(KERN_DEBUG "kcapi: put [%03d] id#%d %s len=%u cannot trace\n", CAPIMSG_CONTROLLER(skb->data), CAPIMSG_APPID(skb->data), capi_cmd2str(cmd, subcmd), CAPIMSG_LEN(skb->data)); } } return ctr->send_message(ctr, skb); } EXPORT_SYMBOL(capi20_put_message); /** * capi20_get_manufacturer() - CAPI 2.0 operation CAPI_GET_MANUFACTURER * @contr: controller number. * @buf: result buffer (64 bytes). * * Retrieve information about the manufacturer of the specified ISDN controller * or (for @contr == 0) the driver itself. * Return value: CAPI result code */ u16 capi20_get_manufacturer(u32 contr, u8 *buf) { struct capi_ctr *ctr; u16 ret; if (contr == 0) { strlcpy(buf, capi_manufakturer, CAPI_MANUFACTURER_LEN); return CAPI_NOERROR; } mutex_lock(&capi_controller_lock); ctr = get_capi_ctr_by_nr(contr); if (ctr && ctr->state == CAPI_CTR_RUNNING) { strlcpy(buf, ctr->manu, CAPI_MANUFACTURER_LEN); ret = CAPI_NOERROR; } else ret = CAPI_REGNOTINSTALLED; mutex_unlock(&capi_controller_lock); return ret; } EXPORT_SYMBOL(capi20_get_manufacturer); /** * capi20_get_version() - CAPI 2.0 operation CAPI_GET_VERSION * @contr: controller number. * @verp: result structure. * * Retrieve version information for the specified ISDN controller * or (for @contr == 0) the driver itself. * Return value: CAPI result code */ u16 capi20_get_version(u32 contr, struct capi_version *verp) { struct capi_ctr *ctr; u16 ret; if (contr == 0) { *verp = driver_version; return CAPI_NOERROR; } mutex_lock(&capi_controller_lock); ctr = get_capi_ctr_by_nr(contr); if (ctr && ctr->state == CAPI_CTR_RUNNING) { memcpy(verp, &ctr->version, sizeof(capi_version)); ret = CAPI_NOERROR; } else ret = CAPI_REGNOTINSTALLED; mutex_unlock(&capi_controller_lock); return ret; } EXPORT_SYMBOL(capi20_get_version); /** * capi20_get_serial() - CAPI 2.0 operation CAPI_GET_SERIAL_NUMBER * @contr: controller number. * @serial: result buffer (8 bytes). * * Retrieve the serial number of the specified ISDN controller * or (for @contr == 0) the driver itself. * Return value: CAPI result code */ u16 capi20_get_serial(u32 contr, u8 *serial) { struct capi_ctr *ctr; u16 ret; if (contr == 0) { strlcpy(serial, driver_serial, CAPI_SERIAL_LEN); return CAPI_NOERROR; } mutex_lock(&capi_controller_lock); ctr = get_capi_ctr_by_nr(contr); if (ctr && ctr->state == CAPI_CTR_RUNNING) { strlcpy(serial, ctr->serial, CAPI_SERIAL_LEN); ret = CAPI_NOERROR; } else ret = CAPI_REGNOTINSTALLED; mutex_unlock(&capi_controller_lock); return ret; } EXPORT_SYMBOL(capi20_get_serial); /** * capi20_get_profile() - CAPI 2.0 operation CAPI_GET_PROFILE * @contr: controller number. * @profp: result structure. * * Retrieve capability information for the specified ISDN controller * or (for @contr == 0) the number of installed controllers. * Return value: CAPI result code */ u16 capi20_get_profile(u32 contr, struct capi_profile *profp) { struct capi_ctr *ctr; u16 ret; if (contr == 0) { profp->ncontroller = ncontrollers; return CAPI_NOERROR; } mutex_lock(&capi_controller_lock); ctr = get_capi_ctr_by_nr(contr); if (ctr && ctr->state == CAPI_CTR_RUNNING) { memcpy(profp, &ctr->profile, sizeof(struct capi_profile)); ret = CAPI_NOERROR; } else ret = CAPI_REGNOTINSTALLED; mutex_unlock(&capi_controller_lock); return ret; } EXPORT_SYMBOL(capi20_get_profile); /* Must be called with capi_controller_lock held. */ static int wait_on_ctr_state(struct capi_ctr *ctr, unsigned int state) { DEFINE_WAIT(wait); int retval = 0; ctr = capi_ctr_get(ctr); if (!ctr) return -ESRCH; for (;;) { prepare_to_wait(&ctr->state_wait_queue, &wait, TASK_INTERRUPTIBLE); if (ctr->state == state) break; if (ctr->state == CAPI_CTR_DETACHED) { retval = -ESRCH; break; } if (signal_pending(current)) { retval = -EINTR; break; } mutex_unlock(&capi_controller_lock); schedule(); mutex_lock(&capi_controller_lock); } finish_wait(&ctr->state_wait_queue, &wait); capi_ctr_put(ctr); return retval; } #ifdef AVMB1_COMPAT static int old_capi_manufacturer(unsigned int cmd, void __user *data) { avmb1_loadandconfigdef ldef; avmb1_extcarddef cdef; avmb1_resetdef rdef; capicardparams cparams; struct capi_ctr *ctr; struct capi_driver *driver = NULL; capiloaddata ldata; struct list_head *l; int retval; switch (cmd) { case AVMB1_ADDCARD: case AVMB1_ADDCARD_WITH_TYPE: if (cmd == AVMB1_ADDCARD) { if ((retval = copy_from_user(&cdef, data, sizeof(avmb1_carddef)))) return -EFAULT; cdef.cardtype = AVM_CARDTYPE_B1; } else { if ((retval = copy_from_user(&cdef, data, sizeof(avmb1_extcarddef)))) return -EFAULT; } cparams.port = cdef.port; cparams.irq = cdef.irq; cparams.cardnr = cdef.cardnr; mutex_lock(&capi_drivers_lock); switch (cdef.cardtype) { case AVM_CARDTYPE_B1: list_for_each(l, &capi_drivers) { driver = list_entry(l, struct capi_driver, list); if (strcmp(driver->name, "b1isa") == 0) break; } break; case AVM_CARDTYPE_T1: list_for_each(l, &capi_drivers) { driver = list_entry(l, struct capi_driver, list); if (strcmp(driver->name, "t1isa") == 0) break; } break; default: driver = NULL; break; } if (!driver) { printk(KERN_ERR "kcapi: driver not loaded.\n"); retval = -EIO; } else if (!driver->add_card) { printk(KERN_ERR "kcapi: driver has no add card function.\n"); retval = -EIO; } else retval = driver->add_card(driver, &cparams); mutex_unlock(&capi_drivers_lock); return retval; case AVMB1_LOAD: case AVMB1_LOAD_AND_CONFIG: if (cmd == AVMB1_LOAD) { if (copy_from_user(&ldef, data, sizeof(avmb1_loaddef))) return -EFAULT; ldef.t4config.len = 0; ldef.t4config.data = NULL; } else { if (copy_from_user(&ldef, data, sizeof(avmb1_loadandconfigdef))) return -EFAULT; } mutex_lock(&capi_controller_lock); ctr = get_capi_ctr_by_nr(ldef.contr); if (!ctr) { retval = -EINVAL; goto load_unlock_out; } if (ctr->load_firmware == NULL) { printk(KERN_DEBUG "kcapi: load: no load function\n"); retval = -ESRCH; goto load_unlock_out; } if (ldef.t4file.len <= 0) { printk(KERN_DEBUG "kcapi: load: invalid parameter: length of t4file is %d ?\n", ldef.t4file.len); retval = -EINVAL; goto load_unlock_out; } if (ldef.t4file.data == NULL) { printk(KERN_DEBUG "kcapi: load: invalid parameter: dataptr is 0\n"); retval = -EINVAL; goto load_unlock_out; } ldata.firmware.user = 1; ldata.firmware.data = ldef.t4file.data; ldata.firmware.len = ldef.t4file.len; ldata.configuration.user = 1; ldata.configuration.data = ldef.t4config.data; ldata.configuration.len = ldef.t4config.len; if (ctr->state != CAPI_CTR_DETECTED) { printk(KERN_INFO "kcapi: load: contr=%d not in detect state\n", ldef.contr); retval = -EBUSY; goto load_unlock_out; } ctr->state = CAPI_CTR_LOADING; retval = ctr->load_firmware(ctr, &ldata); if (retval) { ctr->state = CAPI_CTR_DETECTED; goto load_unlock_out; } retval = wait_on_ctr_state(ctr, CAPI_CTR_RUNNING); load_unlock_out: mutex_unlock(&capi_controller_lock); return retval; case AVMB1_RESETCARD: if (copy_from_user(&rdef, data, sizeof(avmb1_resetdef))) return -EFAULT; retval = 0; mutex_lock(&capi_controller_lock); ctr = get_capi_ctr_by_nr(rdef.contr); if (!ctr) { retval = -ESRCH; goto reset_unlock_out; } if (ctr->state == CAPI_CTR_DETECTED) goto reset_unlock_out; if (ctr->reset_ctr == NULL) { printk(KERN_DEBUG "kcapi: reset: no reset function\n"); retval = -ESRCH; goto reset_unlock_out; } ctr->reset_ctr(ctr); retval = wait_on_ctr_state(ctr, CAPI_CTR_DETECTED); reset_unlock_out: mutex_unlock(&capi_controller_lock); return retval; } return -EINVAL; } #endif /** * capi20_manufacturer() - CAPI 2.0 operation CAPI_MANUFACTURER * @cmd: command. * @data: parameter. * * Perform manufacturer specific command. * Return value: CAPI result code */ int capi20_manufacturer(unsigned long cmd, void __user *data) { struct capi_ctr *ctr; int retval; switch (cmd) { #ifdef AVMB1_COMPAT case AVMB1_LOAD: case AVMB1_LOAD_AND_CONFIG: case AVMB1_RESETCARD: case AVMB1_GET_CARDINFO: case AVMB1_REMOVECARD: return old_capi_manufacturer(cmd, data); #endif case KCAPI_CMD_TRACE: { kcapi_flagdef fdef; if (copy_from_user(&fdef, data, sizeof(kcapi_flagdef))) return -EFAULT; mutex_lock(&capi_controller_lock); ctr = get_capi_ctr_by_nr(fdef.contr); if (ctr) { ctr->traceflag = fdef.flag; printk(KERN_INFO "kcapi: contr [%03d] set trace=%d\n", ctr->cnr, ctr->traceflag); retval = 0; } else retval = -ESRCH; mutex_unlock(&capi_controller_lock); return retval; } case KCAPI_CMD_ADDCARD: { struct list_head *l; struct capi_driver *driver = NULL; capicardparams cparams; kcapi_carddef cdef; if ((retval = copy_from_user(&cdef, data, sizeof(cdef)))) return -EFAULT; cparams.port = cdef.port; cparams.irq = cdef.irq; cparams.membase = cdef.membase; cparams.cardnr = cdef.cardnr; cparams.cardtype = 0; cdef.driver[sizeof(cdef.driver) - 1] = 0; mutex_lock(&capi_drivers_lock); list_for_each(l, &capi_drivers) { driver = list_entry(l, struct capi_driver, list); if (strcmp(driver->name, cdef.driver) == 0) break; } if (driver == NULL) { printk(KERN_ERR "kcapi: driver \"%s\" not loaded.\n", cdef.driver); retval = -ESRCH; } else if (!driver->add_card) { printk(KERN_ERR "kcapi: driver \"%s\" has no add card function.\n", cdef.driver); retval = -EIO; } else retval = driver->add_card(driver, &cparams); mutex_unlock(&capi_drivers_lock); return retval; } default: printk(KERN_ERR "kcapi: manufacturer command %lu unknown.\n", cmd); break; } return -EINVAL; } EXPORT_SYMBOL(capi20_manufacturer); /* ------------------------------------------------------------- */ /* -------- Init & Cleanup ------------------------------------- */ /* ------------------------------------------------------------- */ /* * init / exit functions */ static struct notifier_block capictr_nb = { .notifier_call = notify_handler, .priority = INT_MAX, }; static int __init kcapi_init(void) { int err; kcapi_wq = alloc_workqueue("kcapi", 0, 0); if (!kcapi_wq) return -ENOMEM; register_capictr_notifier(&capictr_nb); err = cdebug_init(); if (err) { unregister_capictr_notifier(&capictr_nb); destroy_workqueue(kcapi_wq); return err; } kcapi_proc_init(); return 0; } static void __exit kcapi_exit(void) { kcapi_proc_exit(); unregister_capictr_notifier(&capictr_nb); cdebug_exit(); destroy_workqueue(kcapi_wq); } module_init(kcapi_init); module_exit(kcapi_exit);
{ "content_hash": "f4515e7fa379d607b9530661dbefecca", "timestamp": "", "source": "github", "line_count": 1306, "max_line_length": 100, "avg_line_length": 23.031393568147013, "alnum_prop": 0.6490907277502577, "repo_name": "AlbandeCrevoisier/ldd-athens", "id": "823f6985b2603198ff7b7ea7c8869db6032ba168", "size": "30434", "binary": false, "copies": "1513", "ref": "refs/heads/master", "path": "linux-socfpga/drivers/isdn/capi/kcapi.c", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Assembly", "bytes": "10184236" }, { "name": "Awk", "bytes": "40418" }, { "name": "Batchfile", "bytes": "81753" }, { "name": "C", "bytes": "566858455" }, { "name": "C++", "bytes": "21399133" }, { "name": "Clojure", "bytes": "971" }, { "name": "Cucumber", "bytes": "5998" }, { "name": "FORTRAN", "bytes": "11832" }, { "name": "GDB", "bytes": "18113" }, { "name": "Groff", "bytes": "2686457" }, { "name": "HTML", "bytes": "34688334" }, { "name": "Lex", "bytes": "56961" }, { "name": "Logos", "bytes": "133810" }, { "name": "M4", "bytes": "3325" }, { "name": "Makefile", "bytes": "1685015" }, { "name": "Objective-C", "bytes": "920162" }, { "name": "Perl", "bytes": "752477" }, { "name": "Perl6", "bytes": "3783" }, { "name": "Python", "bytes": "533352" }, { "name": "Shell", "bytes": "468244" }, { "name": "SourcePawn", "bytes": "2711" }, { "name": "UnrealScript", "bytes": "12824" }, { "name": "XC", "bytes": "33970" }, { "name": "XS", "bytes": "34909" }, { "name": "Yacc", "bytes": "113516" } ], "symlink_target": "" }
<h3>Image Crop <small>Simple Image Crop directive for AngularJS</small> </h3> <div class="container-fluid" ng-controller="ImageCropController as imgcrop"> <div class="row"> <div class="col-md-3"> <div class="panel"> <div class="panel-heading"> <a class="pull-right" href="#" ng-click="imgcrop.reset()"> <small class="fa fa-refresh text-muted"></small> </a>Select an image file</div> <div class="panel-body"> <div class="form-group"> <input class="form-control" id="fileInput" filestyle="" type="file" data-class-button="btn btn-default" data-class-input="form-control" data-button-text="" /> </div> <p class="pv">Crop type:</p> <div class="btn-group btn-group-justified mb"> <label class="btn btn-default" ng-model="imgcrop.imgcropType" uib-btn-radio="'square'">Square</label> <label class="btn btn-default" ng-model="imgcrop.imgcropType" uib-btn-radio="'circle'">Circle</label> </div> <br/> <div class="imgcrop-preview" data-text="Cropped Image"> <img ng-src="{{imgcrop.myCroppedImage}}" /> </div> </div> </div> </div> <div class="col-md-9"> <div class="panel"> <div class="panel-body"> <div class="imgcrop-area"> <img-crop image="imgcrop.myImage" result-image="imgcrop.myCroppedImage" area-type="{{imgcrop.imgcropType}}"></img-crop> </div> </div> </div> </div> </div> </div>
{ "content_hash": "91caaba333086faf7437c9e5b45373bf", "timestamp": "", "source": "github", "line_count": 38, "max_line_length": 176, "avg_line_length": 45.526315789473685, "alnum_prop": 0.5144508670520231, "repo_name": "gbhu/studynotes-projects", "id": "dd5fa64fd63d6dce85d8137bf57f03fc89fdb955", "size": "1730", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "AngularjsAndAngle/webapp/app/views/form-imagecrop.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "4157034" }, { "name": "FreeMarker", "bytes": "137530" }, { "name": "HTML", "bytes": "6046110" }, { "name": "Java", "bytes": "310388" }, { "name": "JavaScript", "bytes": "17188564" }, { "name": "PHP", "bytes": "1792" } ], "symlink_target": "" }
class Cacher # Example: # Cacher.fetch("user_#{user_id}_entry_#{id}_prior_entry", expires_in: 5.minutes) do # ... # end def self.fetch(key, opts = {}, &block) Rails.cache.fetch(key, opts) do Rails.logger.info "Cacher: Generating cache for key \"#{key}\"." yield end end def self.delete_matched(pattern) Rails.logger.info "Cacher: Deleting entries matching pattern \"#{pattern}\"." Rails.cache.delete_matched(pattern) end end
{ "content_hash": "6a6e523a0519c3e27478192f0766d78e", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 85, "avg_line_length": 27.764705882352942, "alnum_prop": 0.6419491525423728, "repo_name": "topherhunt/timesheet", "id": "88f523585dbc32c3ba42d5e21dbc68bae2224cc7", "size": "472", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "app/logic/cacher.rb", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "2613" }, { "name": "HTML", "bytes": "43268" }, { "name": "JavaScript", "bytes": "7248" }, { "name": "Ruby", "bytes": "105510" } ], "symlink_target": "" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>file-sync: Not compatible 👼</title> <link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" /> <link href="../../../../../bootstrap.min.css" rel="stylesheet"> <link href="../../../../../bootstrap-custom.css" rel="stylesheet"> <link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet"> <script src="../../../../../moment.min.js"></script> <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries --> <!-- WARNING: Respond.js doesn't work if you view the page via file:// --> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script> <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script> <![endif]--> </head> <body> <div class="container"> <div class="navbar navbar-default" role="navigation"> <div class="container-fluid"> <div class="navbar-header"> <a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a> </div> <div id="navbar" class="collapse navbar-collapse"> <ul class="nav navbar-nav"> <li><a href="../..">clean / released</a></li> <li class="active"><a href="">8.5.0~camlp4 / file-sync - 0.1.0</a></li> </ul> </div> </div> </div> <div class="article"> <div class="row"> <div class="col-md-12"> <a href="../..">« Up</a> <h1> file-sync <small> 0.1.0 <span class="label label-info">Not compatible 👼</span> </small> </h1> <p>📅 <em><script>document.write(moment("2022-11-04 11:54:16 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-11-04 11:54:16 UTC)</em><p> <h2>Context</h2> <pre># Packages matching: installed # Name # Installed # Synopsis base-bigarray base base-num base Num library distributed with the OCaml compiler base-threads base base-unix base camlp4 4.04+1 Camlp4 is a system for writing extensible parsers for programming languages conf-findutils 1 Virtual package relying on findutils coq 8.5.0~camlp4 Formal proof management system num 0 The Num library for arbitrary-precision integer and rational arithmetic ocaml 4.04.2 The OCaml compiler (virtual package) ocaml-base-compiler 4.04.2 Official 4.04.2 release ocaml-config 1 OCaml Switch Configuration ocamlbuild 0.14.2 OCamlbuild is a build system with builtin rules to easily build most OCaml projects # opam file: opam-version: &quot;2.0&quot; synopsis: &quot;File synchroniser specification in Coq&quot; description: &quot;Coq formalisation of the Unison file synchroniser&quot; maintainer: &quot;Yishuai Li &lt;[email protected]&gt;&quot; authors: &quot;Yishuai Li &lt;[email protected]&gt;&quot; license: &quot;MPL-2.0&quot; tags: &quot;logpath:FileSync&quot; homepage: &quot;https://github.com/liyishuai/file-sync&quot; bug-reports: &quot;https://github.com/liyishuai/file-sync/issues&quot; depends: [ &quot;ocaml&quot; {&gt;= &quot;4.12&quot;} &quot;coq&quot; {&gt;= &quot;8.14~&quot;} &quot;coq-simple-io&quot; {&gt;= &quot;1.8.0&quot;} &quot;coq-async-test&quot; &quot;ocamlbuild&quot; &quot;fileutils&quot; ] build: [make &quot;-j%{jobs}%&quot;] run-test: [make &quot;-j%{jobs}%&quot; &quot;test&quot;] install: [make &quot;install&quot; &quot;INSTALLDIR=%{bin}%&quot;] dev-repo: &quot;git+https://github.com/liyishuai/file-sync.git&quot; url { src: &quot;https://github.com/liyishuai/file-sync/archive/v0.1.0.tar.gz&quot; checksum: [ &quot;md5=1f86c96adea2a14c8f622298fda3dbfb&quot; &quot;sha512=416c1a4356126545ba08532b412ce83347a2104204d93344a9ab4f1b02db518faf8128d0d86b2c01073fade543cc02319e2f17dbab04c2b21115a75da21afc2c&quot; ] } </pre> <h2>Lint</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Dry install 🏜️</h2> <p>Dry install with the current Coq version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam install -y --show-action coq-file-sync.0.1.0 coq.8.5.0~camlp4</code></dd> <dt>Return code</dt> <dd>5120</dd> <dt>Output</dt> <dd><pre>[NOTE] Package coq is already installed (current version is 8.5.0~camlp4). The following dependencies couldn&#39;t be met: - coq-file-sync -&gt; ocaml &gt;= 4.12 base of this switch (use `--unlock-base&#39; to force) No solution found, exiting </pre></dd> </dl> <p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-file-sync.0.1.0</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Install dependencies</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Install 🚀</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Installation size</h2> <p>No files were installed.</p> <h2>Uninstall 🧹</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Missing removes</dt> <dd> none </dd> <dt>Wrong removes</dt> <dd> none </dd> </dl> </div> </div> </div> <hr/> <div class="footer"> <p class="text-center"> Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣 </p> </div> </div> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script> <script src="../../../../../bootstrap.min.js"></script> </body> </html>
{ "content_hash": "bf63d605a22dacd3a52676d6b835cb61", "timestamp": "", "source": "github", "line_count": 168, "max_line_length": 159, "avg_line_length": 42.023809523809526, "alnum_prop": 0.5481586402266289, "repo_name": "coq-bench/coq-bench.github.io", "id": "a472735b8cfb496e2b9425b1e76bc9786aca0b8b", "size": "7085", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.04.2-2.0.5/released/8.5.0~camlp4/file-sync/0.1.0.html", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
herokuExpressTest =================
{ "content_hash": "aef747913bf1886160cfcef18849df5a", "timestamp": "", "source": "github", "line_count": 2, "max_line_length": 17, "avg_line_length": 18, "alnum_prop": 0.4722222222222222, "repo_name": "saitodisse/herokuExpressTest", "id": "65cc3df4de2c00bce492b142d895c1b8f7092589", "size": "36", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "JavaScript", "bytes": "308" } ], "symlink_target": "" }
""" kinto Kinto is a minimalist JSON storage service with synchronisation and sharing abilities. It is meant to be easy to use and easy to self-host. **Limitations of this OpenAPI specification:** 1. Validation on OR clauses is not supported (e.g. provide `data` or `permissions` in patch operations). 2. [Filtering](http://kinto.readthedocs.io/en/stable/api/1.x/filtering.html) is supported on any field by using `?{prefix}{field_name}={value}`. 3. [Backoff headers](http://kinto.readthedocs.io/en/stable/api/1.x/backoff.html) may occur with any response, but they are only present if the server is under in heavy load, so we cannot validate them on every request. They are listed only on the default error message. 4. [Collection schemas](http://kinto.readthedocs.io/en/stable/api/1.x/collections.html#collection-json-schema) can be provided when defining a collection, but they are not validated by this specification. OpenAPI spec version: 1.13 Generated by: https://github.com/swagger-api/swagger-codegen.git Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import sys from setuptools import setup, find_packages NAME = "swagger_client" VERSION = "1.0.0" # To install the library, run the following # # python setup.py install # # prerequisite: setuptools # http://pypi.python.org/pypi/setuptools REQUIRES = ["urllib3 >= 1.15", "six >= 1.10", "certifi", "python-dateutil"] setup( name=NAME, version=VERSION, description="kinto", author_email="", url="", keywords=["Swagger", "kinto"], install_requires=REQUIRES, packages=find_packages(), include_package_data=True, long_description="""\ Kinto is a minimalist JSON storage service with synchronisation and sharing abilities. It is meant to be easy to use and easy to self-host. **Limitations of this OpenAPI specification:** 1. Validation on OR clauses is not supported (e.g. provide &#x60;data&#x60; or &#x60;permissions&#x60; in patch operations). 2. [Filtering](http://kinto.readthedocs.io/en/stable/api/1.x/filtering.html) is supported on any field by using &#x60;?{prefix}{field_name}&#x3D;{value}&#x60;. 3. [Backoff headers](http://kinto.readthedocs.io/en/stable/api/1.x/backoff.html) may occur with any response, but they are only present if the server is under in heavy load, so we cannot validate them on every request. They are listed only on the default error message. 4. [Collection schemas](http://kinto.readthedocs.io/en/stable/api/1.x/collections.html#collection-json-schema) can be provided when defining a collection, but they are not validated by this specification. """ )
{ "content_hash": "67a23858cf93082e79a80373a4aba0f0", "timestamp": "", "source": "github", "line_count": 52, "max_line_length": 973, "avg_line_length": 61.23076923076923, "alnum_prop": 0.7283291457286433, "repo_name": "gabisurita/kinto-codegen-tutorial", "id": "a094f2ec051d01ba496489b2bddbc6472c2e6bcf", "size": "3201", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "python-client/setup.py", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "95504" }, { "name": "Python", "bytes": "662063" }, { "name": "Shell", "bytes": "3259" } ], "symlink_target": "" }
<html> <head> <meta http-equiv="Content-Type" content="text/html; charset=US-ASCII"> <title>Struct template basic_expr</title> <link rel="stylesheet" href="../../../../doc/src/boostbook.css" type="text/css"> <meta name="generator" content="DocBook XSL Stylesheets V1.78.1"> <link rel="home" href="../../index.html" title="The Boost C++ Libraries BoostBook Documentation Subset"> <link rel="up" href="../../proto/reference.html#header.boost.proto.expr_hpp" title="Header &lt;boost/proto/expr.hpp&gt;"> <link rel="prev" href="eval_idp204962752.html" title="Function eval"> <link rel="next" href="expr.html" title="Struct template expr"> </head> <body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF"> <table cellpadding="2" width="100%"><tr> <td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../boost.png"></td> <td align="center"><a href="../../../../index.html">Home</a></td> <td align="center"><a href="../../../../libs/libraries.htm">Libraries</a></td> <td align="center"><a href="http://www.boost.org/users/people.html">People</a></td> <td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td> <td align="center"><a href="../../../../more/index.htm">More</a></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="eval_idp204962752.html"><img src="../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../../proto/reference.html#header.boost.proto.expr_hpp"><img src="../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../index.html"><img src="../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="expr.html"><img src="../../../../doc/src/images/next.png" alt="Next"></a> </div> <div class="refentry"> <a name="boost.proto.basic_expr"></a><div class="titlepage"></div> <div class="refnamediv"> <h2><span class="refentrytitle">Struct template basic_expr</span></h2> <p>boost::proto::basic_expr &#8212; Simplified representation of a node in an expression tree.</p> </div> <h2 xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" class="refsynopsisdiv-title">Synopsis</h2> <div xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" class="refsynopsisdiv"><pre class="synopsis"><span class="comment">// In header: &lt;<a class="link" href="../../proto/reference.html#header.boost.proto.expr_hpp" title="Header &lt;boost/proto/expr.hpp&gt;">boost/proto/expr.hpp</a>&gt; </span><span class="keyword">template</span><span class="special">&lt;</span><span class="keyword">typename</span> Tag<span class="special">,</span> <span class="keyword">typename</span> Args<span class="special">,</span> <span class="keyword">long</span> Arity <span class="special">=</span> <span class="identifier">Args</span><span class="special">::</span><span class="identifier">arity</span><span class="special">&gt;</span> <span class="keyword">struct</span> <a class="link" href="basic_expr.html" title="Struct template basic_expr">basic_expr</a> <span class="special">{</span> <span class="comment">// types</span> <span class="keyword">typedef</span> <span class="identifier">Tag</span> <a name="boost.proto.basic_expr.proto_tag"></a><span class="identifier">proto_tag</span><span class="special">;</span> <span class="keyword">typedef</span> <span class="identifier">Args</span> <a name="boost.proto.basic_expr.proto_args"></a><span class="identifier">proto_args</span><span class="special">;</span> <span class="keyword">typedef</span> <span class="identifier">mpl</span><span class="special">::</span><span class="identifier">long_</span><span class="special">&lt;</span> <span class="identifier">Arity</span> <span class="special">&gt;</span> <a name="boost.proto.basic_expr.proto_arity"></a><span class="identifier">proto_arity</span><span class="special">;</span> <span class="keyword">typedef</span> <a class="link" href="basic_default_domain.html" title="Struct basic_default_domain">proto::basic_default_domain</a> <a name="boost.proto.basic_expr.proto_domain"></a><span class="identifier">proto_domain</span><span class="special">;</span> <span class="keyword">typedef</span> <span class="identifier">basic_expr</span> <a name="boost.proto.basic_expr.proto_grammar"></a><span class="identifier">proto_grammar</span><span class="special">;</span> <span class="keyword">typedef</span> <span class="identifier">basic_expr</span> <a name="boost.proto.basic_expr.proto_base_expr"></a><span class="identifier">proto_base_expr</span><span class="special">;</span> <span class="keyword">typedef</span> <span class="identifier">basic_expr</span> <a name="boost.proto.basic_expr.proto_derived_expr"></a><span class="identifier">proto_derived_expr</span><span class="special">;</span> <span class="keyword">typedef</span> <span class="keyword">typename</span> <span class="identifier">Args</span><span class="special">::</span><span class="identifier">child</span><em class="replaceable"><code><span class="identifier">N</span></code></em> <a name="boost.proto.basic_expr.proto_childN"></a><span class="identifier">proto_childN</span><span class="special">;</span> <span class="comment">// For each <em class="replaceable"><code>N</code></em> in <em class="replaceable"><code>[0,max(Arity,1))</code></em>.</span> <span class="comment">// <a class="link" href="basic_expr.html#idp205007968-bb">public static functions</a></span> <span class="keyword">template</span><span class="special">&lt;</span><span class="keyword">typename</span><span class="special">...</span> A<span class="special">&gt;</span> <span class="keyword">static</span> <span class="identifier">basic_expr</span> <span class="keyword">const</span> <a class="link" href="basic_expr.html#idp205008848-bb"><span class="identifier">make</span></a><span class="special">(</span><span class="identifier">A</span> <span class="keyword">const</span> <span class="special">&amp;</span><span class="special">...</span><span class="special">)</span><span class="special">;</span> <span class="comment">// <a class="link" href="basic_expr.html#idp205015952-bb">public member functions</a></span> <span class="identifier">basic_expr</span> <span class="special">&amp;</span> <a class="link" href="basic_expr.html#idp205016512-bb"><span class="identifier">proto_base</span></a><span class="special">(</span><span class="special">)</span><span class="special">;</span> <span class="identifier">basic_expr</span> <span class="keyword">const</span> <span class="special">&amp;</span> <a class="link" href="basic_expr.html#idp205019024-bb"><span class="identifier">proto_base</span></a><span class="special">(</span><span class="special">)</span> <span class="keyword">const</span><span class="special">;</span> <span class="special">}</span><span class="special">;</span></pre></div> <div class="refsect1"> <a name="idp369905808"></a><h2>Description</h2> <p> <code class="computeroutput">proto::basic_expr&lt;&gt;</code> is a node in an expression template tree. It is a container for its child sub-trees. It also serves as the terminal nodes of the tree. </p> <p> <code class="computeroutput">Tag</code> is type that represents the operation encoded by this expression. It is typically one of the structs in the <code class="computeroutput">boost::proto::tag</code> namespace, but it doesn't have to be. If <code class="computeroutput">Arity</code> is 0 then this <code class="computeroutput">expr&lt;&gt;</code> type represents a leaf in the expression tree. </p> <p> <code class="computeroutput">Args</code> is a list of types representing the children of this expression. It is an instantiation of one of <code class="computeroutput"><a class="link" href="listN.html" title="Struct template listN">proto::list1&lt;&gt;</a></code>, <code class="computeroutput"><a class="link" href="listN.html" title="Struct template listN">proto::list2&lt;&gt;</a></code>, etc. The child types must all themselves be either <code class="computeroutput">proto::expr&lt;&gt;</code> or <code class="computeroutput">proto::basic_expr&lt;&gt;&amp;</code> (or extensions thereof via <code class="computeroutput"><a class="link" href="extends.html" title="Struct template extends">proto::extends&lt;&gt;</a></code> or <code class="computeroutput"><a class="link" href="../../BOOST_PROTO_EXTENDS.html" title="Macro BOOST_PROTO_EXTENDS">BOOST_PROTO_EXTENDS</a>()</code>), unless <code class="computeroutput">Arity</code> is 0, in which case <code class="computeroutput">Args</code> must be <code class="computeroutput">proto::term&lt;T&gt;</code>, where <code class="computeroutput">T</code> can be any type. </p> <p> <code class="computeroutput">proto::basic_expr&lt;&gt;</code> is a valid Fusion random-access sequence, where the elements of the sequence are the child expressions. </p> <div class="refsect2"> <a name="idp369923104"></a><h3> <a name="idp205007968-bb"></a><code class="computeroutput">basic_expr</code> public static functions</h3> <div class="orderedlist"><ol class="orderedlist" type="1"><li class="listitem"> <pre class="literallayout"><span class="keyword">template</span><span class="special">&lt;</span><span class="keyword">typename</span><span class="special">...</span> A<span class="special">&gt;</span> <span class="keyword">static</span> <span class="identifier">basic_expr</span> <span class="keyword">const</span> <a name="idp205008848-bb"></a><span class="identifier">make</span><span class="special">(</span><span class="identifier">A</span> <span class="keyword">const</span> <span class="special">&amp;</span><span class="special">...</span> a<span class="special">)</span><span class="special">;</span></pre> <div class="variablelist"><table border="0" class="variablelist compact"> <colgroup> <col align="left" valign="top"> <col> </colgroup> <tbody> <tr> <td><p><span class="term">Requires:</span></p></td> <td><p> The number of supplied arguments must be <code class="computeroutput">max(Arity,1)</code>. </p></td> </tr> <tr> <td><p><span class="term">Returns:</span></p></td> <td><p> A new <code class="computeroutput">basic_expr</code> object initialized with the specified arguments. </p></td> </tr> </tbody> </table></div> </li></ol></div> </div> <div class="refsect2"> <a name="idp369940320"></a><h3> <a name="idp205015952-bb"></a><code class="computeroutput">basic_expr</code> public member functions</h3> <div class="orderedlist"><ol class="orderedlist" type="1"> <li class="listitem"> <pre class="literallayout"><span class="identifier">basic_expr</span> <span class="special">&amp;</span> <a name="idp205016512-bb"></a><span class="identifier">proto_base</span><span class="special">(</span><span class="special">)</span><span class="special">;</span></pre> <div class="variablelist"><table border="0" class="variablelist compact"> <colgroup> <col align="left" valign="top"> <col> </colgroup> <tbody><tr> <td><p><span class="term">Returns:</span></p></td> <td><p> <code class="computeroutput">*this</code> </p></td> </tr></tbody> </table></div> </li> <li class="listitem"> <pre class="literallayout"><span class="identifier">basic_expr</span> <span class="keyword">const</span> <span class="special">&amp;</span> <a name="idp205019024-bb"></a><span class="identifier">proto_base</span><span class="special">(</span><span class="special">)</span> <span class="keyword">const</span><span class="special">;</span></pre> <p> This is an overloaded member function, provided for convenience. It differs from the above function only in what argument(s) it accepts. </p> </li> </ol></div> </div> </div> </div> <table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr> <td align="left"></td> <td align="right"><div class="copyright-footer">Copyright &#169; 2008 Eric Niebler<p> Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>) </p> </div></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="eval_idp204962752.html"><img src="../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../../proto/reference.html#header.boost.proto.expr_hpp"><img src="../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../index.html"><img src="../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="expr.html"><img src="../../../../doc/src/images/next.png" alt="Next"></a> </div> </body> </html>
{ "content_hash": "746ec0de493123159fda2c06a16e6d49", "timestamp": "", "source": "github", "line_count": 158, "max_line_length": 615, "avg_line_length": 83.4367088607595, "alnum_prop": 0.658044451187135, "repo_name": "Ant-OS/android_packages_apps_OTAUpdates", "id": "6ca62814f3b5d968f8b4ff3f0fd69c0ab8f22f59", "size": "13183", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "jni/boost_1_57_0/doc/html/boost/proto/basic_expr.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "173932" }, { "name": "Batchfile", "bytes": "31151" }, { "name": "C", "bytes": "2920428" }, { "name": "C#", "bytes": "40804" }, { "name": "C++", "bytes": "156317088" }, { "name": "CMake", "bytes": "13760" }, { "name": "CSS", "bytes": "229619" }, { "name": "Cuda", "bytes": "26521" }, { "name": "FORTRAN", "bytes": "1387" }, { "name": "Gnuplot", "bytes": "2361" }, { "name": "Groff", "bytes": "8039" }, { "name": "HTML", "bytes": "144391206" }, { "name": "IDL", "bytes": "14" }, { "name": "Java", "bytes": "160971" }, { "name": "JavaScript", "bytes": "132031" }, { "name": "Lex", "bytes": "1231" }, { "name": "Makefile", "bytes": "1026404" }, { "name": "Max", "bytes": "36857" }, { "name": "Objective-C", "bytes": "3127" }, { "name": "Objective-C++", "bytes": "207" }, { "name": "PHP", "bytes": "59030" }, { "name": "Perl", "bytes": "29502" }, { "name": "Perl6", "bytes": "2053" }, { "name": "Python", "bytes": "1859144" }, { "name": "QML", "bytes": "593" }, { "name": "QMake", "bytes": "6974" }, { "name": "Rebol", "bytes": "354" }, { "name": "Shell", "bytes": "365897" }, { "name": "Tcl", "bytes": "1172" }, { "name": "TeX", "bytes": "13404" }, { "name": "XSLT", "bytes": "751497" }, { "name": "Yacc", "bytes": "18910" } ], "symlink_target": "" }
News for type-storage ============= This file lists the major changes between versions. For a more detailed list of every change, see the Git log. Latest ------ * tbd 3.0.0 ----- * Major: Upgrade to waf-tools 4 * Minor: Upgrade to gtest 4 2.2.0 ----- * Minor: Now ignoring ``const`` and ``volatile`` qualifiers on types. * Patch: Fixed bug in const version of ``type_storage::baget()`` 2.1.0 ----- * Added baget function to get an object based on a specified base type. 2.0.2 ----- * Added const get function 2.0.1 ----- * Added const getter function for wrapped type storage 2.0.0 ----- * Change type_storage to raw tuple and made type_storage get() function a free function within the type_storage namespace 1.0.0 ----- * Initial release of the type-storage library. * Included examples in example/ directory
{ "content_hash": "960bfd92c2b6f4ea183799e8a743710a", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 121, "avg_line_length": 20.5, "alnum_prop": 0.6878048780487804, "repo_name": "steinwurf/type-storage", "id": "af8fdcf2c03466e924e54413168683b5944581f7", "size": "820", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "NEWS.rst", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C++", "bytes": "13943" }, { "name": "Python", "bytes": "4666" } ], "symlink_target": "" }
package br.gov.sp.fatec.security; import io.jsonwebtoken.Jwts; import io.jsonwebtoken.SignatureAlgorithm; import java.io.IOException; import java.util.Date; import br.gov.sp.fatec.model.Usuario; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; public class JwtUtils { private static final String secretKey = "spring.jwt.sec"; public static String generateToken(Usuario usuario) throws JsonProcessingException { ObjectMapper mapper = new ObjectMapper(); String usuarioJson = mapper.writeValueAsString(usuario); Date agora = new Date(); Long hora = 1000L * 60L * 60L; return Jwts.builder().claim("userDetails", usuarioJson) .setIssuer("br.gov.sp.fatec") .setSubject(usuario.getNome()) .setExpiration(new Date(agora.getTime() + hora)) .signWith(SignatureAlgorithm.HS512, secretKey) .compact(); } public static Usuario parseToken(String token) throws JsonParseException, JsonMappingException, IOException { ObjectMapper mapper = new ObjectMapper(); String credentialsJson = Jwts.parser() .setSigningKey(secretKey) .parseClaimsJws(token) .getBody() .get("userDetails", String.class); return mapper.readValue(credentialsJson, Usuario.class); } }
{ "content_hash": "5ce5707e961c6e7dd778ef962594700d", "timestamp": "", "source": "github", "line_count": 43, "max_line_length": 113, "avg_line_length": 36.93023255813954, "alnum_prop": 0.6693954659949622, "repo_name": "sHilgert/webcars", "id": "569b9466ac2387e5371068261db8a9d6a6df376f", "size": "1588", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "webcars-back/src/main/java/br/gov/sp/fatec/security/JwtUtils.java", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "168" }, { "name": "HTML", "bytes": "6936" }, { "name": "Java", "bytes": "31151" }, { "name": "JavaScript", "bytes": "7723" } ], "symlink_target": "" }
declare enum RequestContext { "audio", "beacon", "cspreport", "download", "embed", "eventsource", "favicon", "fetch", "font", "form", "frame", "hyperlink", "iframe", "image", "imageset", "import", "internal", "location", "manifest", "object", "ping", "plugin", "prefetch", "script", "serviceworker", "sharedworker", "subresource", "style", "track", "video", "worker", "xmlhttprequest", "xslt" } declare enum RequestMode { "same-origin", "no-cors", "cors" } declare enum RequestCredentials { "omit", "same-origin", "include" } declare enum RequestCache { "default", "no-store", "reload", "no-cache", "force-cache", "only-if-cached" } declare enum ResponseType { "basic", "cors", "default", "error", "opaque" } declare type HeaderInit = Headers | Array<string>; declare type BodyInit = Blob | FormData | string; declare type RequestInfo = Request | string; interface RequestInit { method?: string; headers?: HeaderInit | { [index: string]: string }; body?: BodyInit; mode?: string | RequestMode; credentials?: string | RequestCredentials; cache?: string | RequestCache; } interface IHeaders { get(name: string): string; getAll(name: string): Array<string>; has(name: string): boolean; } declare class Headers implements IHeaders { append(name: string, value: string): void; delete(name: string):void; get(name: string): string; getAll(name: string): Array<string>; has(name: string): boolean; set(name: string, value: string): void; } interface IBody { bodyUsed: boolean; arrayBuffer(): Promise<ArrayBuffer>; blob(): Promise<Blob>; formData(): Promise<FormData>; json(): Promise<any>; json<T>(): Promise<T>; text(): Promise<string>; } declare class Body implements IBody { bodyUsed: boolean; arrayBuffer(): Promise<ArrayBuffer>; blob(): Promise<Blob>; formData(): Promise<FormData>; json(): Promise<any>; json<T>(): Promise<T>; text(): Promise<string>; } interface IRequest extends IBody { method: string; url: string; headers: Headers; context: string | RequestContext; referrer: string; mode: string | RequestMode; credentials: string | RequestCredentials; cache: string | RequestCache; } declare class Request extends Body implements IRequest { constructor(input: string | Request, init?: RequestInit); method: string; url: string; headers: Headers; context: string | RequestContext; referrer: string; mode: string | RequestMode; credentials: string | RequestCredentials; cache: string | RequestCache; } interface IResponse extends IBody { url: string; status: number; statusText: string; ok: boolean; headers: IHeaders; type: string | ResponseType; size: number; timeout: number; redirect(url: string, status: number): IResponse; error(): IResponse; clone(): IResponse; } interface IFetchStatic { Promise: any; Headers: IHeaders Request: IRequest; Response: IResponse; (url: string | IRequest, init?: RequestInit): Promise<IResponse>; } declare module "isomorphic-fetch" { export default IFetchStatic; } declare var fetch: IFetchStatic;
{ "content_hash": "b8279df9854c60658776ce16728a9506", "timestamp": "", "source": "github", "line_count": 114, "max_line_length": 75, "avg_line_length": 28.42982456140351, "alnum_prop": 0.660290033940142, "repo_name": "trystanclarke/DefinitelyTyped", "id": "824c8a8ae012e9527b13d04f6b1288babb361513", "size": "3475", "binary": false, "copies": "14", "ref": "refs/heads/master", "path": "isomorphic-fetch/isomorphic-fetch.d.ts", "mode": "33188", "license": "mit", "language": [ { "name": "CoffeeScript", "bytes": "15" }, { "name": "HTML", "bytes": "308" }, { "name": "Protocol Buffer", "bytes": "678" }, { "name": "TypeScript", "bytes": "20329243" } ], "symlink_target": "" }
// ---------------------------------------------------------------------------- // Copyright (c) Microsoft Corporation. All rights reserved. // ---------------------------------------------------------------------------- using System.Collections.Generic; using System.Threading.Tasks; namespace Microsoft.WindowsAzure.MobileServices { internal class MobileServiceUIAuthentication : MobileServiceAuthentication { /// <summary> /// Instantiates a new instance of <see cref="MobileServiceUIAuthentication"/>. /// </summary> /// <param name="client"> /// The client. /// </param> /// <param name="provider"> /// The authentication provider. /// </param> /// <param name="parameters"> /// Provider specific extra parameters that are sent as query string parameters to login endpoint. /// </param> public MobileServiceUIAuthentication(IMobileServiceClient client, string provider, IDictionary<string, string> parameters) : base(client, provider, parameters) { } /// <summary> /// Provides Login logic by showing a login UI. /// </summary> /// <returns> /// Task that will complete with the response string when the user has finished authentication. /// </returns> protected override Task<string> LoginAsyncOverride() { AuthenticationBroker broker = new AuthenticationBroker(); return broker.AuthenticateAsync(this.StartUri, this.EndUri, false); } } }
{ "content_hash": "d38ec4e3203f1b9ed7088742bf91a976", "timestamp": "", "source": "github", "line_count": 42, "max_line_length": 130, "avg_line_length": 38.142857142857146, "alnum_prop": 0.5593008739076155, "repo_name": "apuyana/azure-mobile-services", "id": "4eb4023dee0179b7385405c554ba1c13b5e777a2", "size": "1604", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "sdk/Managed/src/Microsoft.WindowsAzure.MobileServices.WindowsStore/Authentication/MobileServiceUIAuthentication.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ASP", "bytes": "110" }, { "name": "Batchfile", "bytes": "16004" }, { "name": "C#", "bytes": "3294461" }, { "name": "CSS", "bytes": "37465" }, { "name": "Groff", "bytes": "32858" }, { "name": "HTML", "bytes": "1905697" }, { "name": "Java", "bytes": "1346475" }, { "name": "JavaScript", "bytes": "2664514" }, { "name": "Objective-C", "bytes": "1319249" }, { "name": "PowerShell", "bytes": "5873" }, { "name": "Shell", "bytes": "15643" }, { "name": "Swift", "bytes": "11165" }, { "name": "Visual Basic", "bytes": "34079" } ], "symlink_target": "" }
* { -webkit-tap-highlight-color: rgba(0,0,0,0); } body { -webkit-touch-callout: none; /* prevent callout to copy image, etc when tap to hold */ -webkit-text-size-adjust: none; /* prevent webkit from resizing text to fit */ -webkit-user-select: none; /* prevent copy paste, to allow, change 'none' to 'text' */ background-color: #E4E4E4; font-family: 'Open Sans', sans-serif; font-size: 48px; width: 100%; height: 100%; margin: 0px; padding: 0px; text-align: center; } button { font-size: 1.2em; padding: 0.6em 0.8em; margin-bottom: 1em; } select { font-size: 1.2em; } /* landscape */ @media screen and (min-aspect-ratio: 1/1) and (min-width:400px) { }
{ "content_hash": "74c25282339f0fcea03135bc1a91d5b5", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 87, "avg_line_length": 21.5, "alnum_prop": 0.6497093023255814, "repo_name": "48design/phonegap-test", "id": "1de1f2c0b0c1c7ff441886f28588f6f9e1f9b3f7", "size": "688", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "css/reset-android.css", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "2090" }, { "name": "JavaScript", "bytes": "90820" } ], "symlink_target": "" }
// @flow import Icon from '@conveyal/woonerf/components/icon' import React, {Component} from 'react' import {Alert, Button, ButtonGroup, ButtonToolbar, OverlayTrigger, Tooltip} from 'react-bootstrap' import ll from '@conveyal/lonlat' import numeral from 'numeral' import lineDistance from 'turf-line-distance' import lineString from 'turf-linestring' import * as activeActions from '../../actions/active' import * as mapActions from '../../actions/map' import {ARROW_MAGENTA, PATTERN_TO_STOP_DISTANCE_THRESHOLD_METERS} from '../../constants' import * as tripPatternActions from '../../actions/tripPattern' import OptionButton from '../../../common/components/OptionButton' import * as statusActions from '../../../manager/actions/status' import {polyline as getPolyline} from '../../../scenario-editor/utils/valhalla' import { controlPointsFromSegments, generateControlPointsFromPatternStops, getPatternDistance, isValidStopControlPoint } from '../../util/map' import type {ControlPoint, LatLng, Pattern, GtfsStop} from '../../../types' import type {EditSettingsUndoState} from '../../../types/reducers' import EditSettings from './EditSettings' type Props = { activePattern: Pattern, controlPoints: Array<ControlPoint>, editSettings: EditSettingsUndoState, patternSegment: number, resetActiveGtfsEntity: typeof activeActions.resetActiveGtfsEntity, saveActiveGtfsEntity: typeof activeActions.saveActiveGtfsEntity, setActivePatternSegment: typeof tripPatternActions.setActivePatternSegment, setErrorMessage: typeof statusActions.setErrorMessage, showConfirmModal: any, stops: Array<GtfsStop>, togglePatternEditing: typeof tripPatternActions.togglePatternEditing, undoActiveTripPatternEdits: typeof tripPatternActions.undoActiveTripPatternEdits, updateActiveGtfsEntity: typeof activeActions.updateActiveGtfsEntity, updateEditSetting: typeof activeActions.updateEditSetting, updatePatternGeometry: typeof mapActions.updatePatternGeometry, } export default class EditShapePanel extends Component<Props> { /** * Construct new pattern geometry from the pattern stop locations. */ async drawPatternFromStops (pattern: Pattern, stopsCoordinates: Array<LatLng>, followStreets: boolean): Promise<any> { const {editSettings, saveActiveGtfsEntity, setErrorMessage, updatePatternGeometry} = this.props let patternSegments = [] if (followStreets) { patternSegments = await getPolyline(stopsCoordinates, true, editSettings.present.avoidMotorways) } else { // Construct straight-line segments using stop coordinates stopsCoordinates .forEach((stop, i) => { if (i < stopsCoordinates.length - 1) { const segment = [ll.toCoordinates(stop), ll.toCoordinates(stopsCoordinates[i + 1])] patternSegments.push(segment) } }) } if (patternSegments && patternSegments.length > 0) { const controlPoints = controlPointsFromSegments(pattern.patternStops, patternSegments) updatePatternGeometry({ controlPoints, patternSegments }) saveActiveGtfsEntity('trippattern') return true } else { setErrorMessage({message: 'Error drawing pattern from stops! Some stops may be unreachable by streets.'}) return false } } _cancelEdits = () => { const {activePattern, resetActiveGtfsEntity, togglePatternEditing} = this.props if (this._hasEdits()) { if (!window.confirm('You have unsaved shape edits. Are you sure you want to cancel and revert these changes?')) { return } } togglePatternEditing() resetActiveGtfsEntity({ component: 'trippattern', entity: activePattern }) } _generateShapeFromStops = () => { const {activePattern, editSettings, stops} = this.props const stopLocations = stops && activePattern.patternStops && activePattern.patternStops.length ? activePattern.patternStops .map((s, index) => { const stop = stops.find(st => st.stop_id === s.stopId) if (!stop) { console.warn(`Could not locate stop with stop_id=${s.stopId}`) return {lng: 0, lat: 0} } return {lng: stop.stop_lon, lat: stop.stop_lat} }) : [] this.drawPatternFromStops(activePattern, stopLocations, editSettings.present.followStreets) } _confirmCreateFromStops = () => { const title = 'Create pattern shape from stops?' const onConfirm = this._generateShapeFromStops const body = this._hasShapePoints() ? 'Are you sure you want to overwrite the existing shape for this trip pattern?' : 'Are you sure you want to create an auto-generated shape for this trip pattern?' this.props.showConfirmModal({title, body, onConfirm}) } _deleteShape = () => { const { activePattern, saveActiveGtfsEntity, showConfirmModal, stops, updateActiveGtfsEntity, updatePatternGeometry } = this.props const shapeId = activePattern.shapeId || '(undefined)' showConfirmModal({ title: `Delete shape for trip pattern?`, body: `Are you sure you would like to delete this trip pattern shape (shape_id: ${shapeId})?`, onConfirm: () => { // FIXME: Do we need to update pattern geometry, too? updatePatternGeometry(generateControlPointsFromPatternStops(activePattern.patternStops, stops)) updateActiveGtfsEntity({ component: 'trippattern', entity: activePattern, props: {shapePoints: [], shapeId: null} }) saveActiveGtfsEntity('trippattern') } }) } /** * Checks the control points for stop control points that are located too far * from the actual stop location. This is used to give instructions to the * user on resolving the issue. */ _getPatternStopsWithShapeIssues = () => { const {controlPoints, stops} = this.props return controlPoints .filter(isValidStopControlPoint) .map((controlPoint, index) => { const {point, stopId} = controlPoint let exceedsThreshold = false const {coordinates: cpCoord} = point.geometry // Find stop entity for control point. const stop = stops.find(s => s.stop_id === stopId) if (!stop) { // If no stop entity found, do not attempt to draw a line to the // missing stop. return {controlPoint, index, stop: null, distance: 0, exceedsThreshold} } const coordinates = [[cpCoord[1], cpCoord[0]], [stop.stop_lat, stop.stop_lon]] const distance: number = lineDistance(lineString(coordinates), 'meters') exceedsThreshold = distance > PATTERN_TO_STOP_DISTANCE_THRESHOLD_METERS return { controlPoint, distance, exceedsThreshold, index, stop } }) // TODO: This can be removed if at some point we need to show stops where // the distance threshold is not exceeded. .filter(item => item.exceedsThreshold) } _beginEditing = () => { const {togglePatternEditing} = this.props togglePatternEditing() } _hasShapePoints = () => this.props.activePattern.shapePoints && this.props.activePattern.shapePoints.length > 0 save = () => { const {editSettings, saveActiveGtfsEntity, updateEditSetting} = this.props saveActiveGtfsEntity('trippattern') // $FlowFixMe action is actually wrapped in promise when connected .then(() => updateEditSetting({ setting: 'editGeometry', value: !editSettings.present.editGeometry })) } _hasEdits = () => this.props.editSettings.past.length > 0 render () { const { activePattern, controlPoints, // FIXME use to describe which segment user is editing patternSegment, editSettings: editSettingsState, setActivePatternSegment, updateEditSetting, undoActiveTripPatternEdits } = this.props const {present: editSettings} = editSettingsState const hasEdits = this._hasEdits() const fromStopsButton = <OverlayTrigger placement='bottom' overlay={ <Tooltip id='from-stops'>Generate pattern shape from stops</Tooltip> }> <Button onClick={this._confirmCreateFromStops} bsSize='small' style={{width: '102px'}}> <span><Icon type='map-marker' /> From stops</span> </Button> </OverlayTrigger> const dist = getPatternDistance(activePattern, controlPoints) const formattedShapeDistance = numeral(dist).format('0,0.00') const nextSegment = (!patternSegment && patternSegment !== 0) ? 0 : patternSegment + 1 const patternStopsWithShapeIssues = this._getPatternStopsWithShapeIssues() return ( <div> <h4 className='line'> Pattern shape {' '} ({formattedShapeDistance} miles) </h4> <div style={{margin: '5px 0'}}> {!activePattern.shapeId ? <small className='text-warning'> <Icon type='exclamation-triangle' />{' '} No shape associated with this pattern. </small> : <small> <span className='overflow' style={{width: '250px'}}> shape_id:{' '} <span title={activePattern.shapeId}>{activePattern.shapeId}</span> </span> <Button bsStyle='link' bsSize='small' style={{padding: '0 2px 10px 2px'}} title='Delete shape for pattern' onClick={this._deleteShape}> <span className='text-danger'><Icon type='trash' /></span> </Button> </small> } </div> {patternStopsWithShapeIssues.length > 0 ? <Alert bsStyle='warning' style={{fontSize: 'small'}}> <h4><Icon type='exclamation-triangle' /> Pattern stop snapping issue</h4> <ul className='list-unstyled' style={{marginBottom: '5px'}}> {patternStopsWithShapeIssues .map(item => { const {distance, index, stop} = item if (!stop) return null const roundedDist = Math.round(distance * 100) / 100 return ( <li key={index}> #{index + 1} {stop.stop_name}{' '} <span style={{color: 'red'}}> {roundedDist} m </span> </li> ) }) } </ul> <p> The stop(s) listed above are located too far (max = {PATTERN_TO_STOP_DISTANCE_THRESHOLD_METERS}{' '} meters) from the pattern shape. </p> <p> This can be resolved by: <ol> <li> moving the stop itself closer to the street's edge; </li> <li> changing where the stop is "snapped" to the shape: click{' '} <strong>Edit pattern geometry</strong>, uncheck{' '} <strong>Hide stop handles</strong>, and move the stop handle closer to the stop. Checking <strong>Hide inactive segments</strong>{' '} can help isolate the problematic stop handle; or </li> <li> regenerating the shape from existing stops: click{' '} <strong>From stops</strong>. </li> </ol> </p> </Alert> : null } {editSettings.editGeometry ? <div> <ButtonToolbar> <Button block style={{width: '167px'}} onClick={this._cancelEdits} bsSize='small'> <Icon type='ban' /> Cancel shape editing </Button> {fromStopsButton} </ButtonToolbar> <ButtonGroup style={{margin: '5px 0'}} block> <OptionButton onClick={setActivePatternSegment} value={patternSegment - 1} disabled={!patternSegment || patternSegment < 1} bsSize='xsmall'> <Icon type='caret-left' style={{color: 'blue'}} /> Prev </OptionButton> <OptionButton onClick={setActivePatternSegment} style={{minWidth: '165px', fontSize: '80%', padding: '2px 0'}} disabled={patternSegment >= controlPoints.length - 1} value={nextSegment} bsSize='xsmall'> {!patternSegment && patternSegment !== 0 ? `Click line to begin editing` : `Editing anchor ${patternSegment + 1} of ${controlPoints.length}` } </OptionButton> <OptionButton onClick={setActivePatternSegment} className='pull-right' value={nextSegment} disabled={patternSegment >= controlPoints.length - 1} bsSize='xsmall'> Next <Icon type='caret-right' style={{color: ARROW_MAGENTA}} /> </OptionButton> </ButtonGroup> <ButtonToolbar> <Button bsSize='small' disabled={!hasEdits} onClick={this.save}> <Icon type='check' /> Save </Button> <Button bsSize='small' disabled={!hasEdits} onClick={undoActiveTripPatternEdits}> <Icon type='undo' /> Undo </Button> </ButtonToolbar> <EditSettings editSettings={editSettings} patternSegment={patternSegment} updateEditSetting={updateEditSetting} /> </div> : <ButtonToolbar> <Button onClick={this._beginEditing} bsSize='small' style={{width: '167px'}} bsStyle='warning'> <span><Icon type='pencil' /> Edit pattern geometry</span> </Button> {fromStopsButton} </ButtonToolbar> } </div> ) } }
{ "content_hash": "ef8472e5e2761a93a48230cd02318210", "timestamp": "", "source": "github", "line_count": 381, "max_line_length": 120, "avg_line_length": 37.8503937007874, "alnum_prop": 0.5911517925247902, "repo_name": "conveyal/datatools-manager", "id": "9f72213de2132f6fa730b9989fb0695f0abc66fd", "size": "14421", "binary": false, "copies": "1", "ref": "refs/heads/dev", "path": "lib/editor/components/pattern/EditShapePanel.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "3101" }, { "name": "HTML", "bytes": "800" }, { "name": "Java", "bytes": "703335" }, { "name": "JavaScript", "bytes": "1068102" }, { "name": "Python", "bytes": "1466" } ], "symlink_target": "" }
import Koa from 'koa'; import staticFiles from 'koa-static'; import koaNodeResolve from 'koa-node-resolve'; import {URL} from 'url'; import * as path from 'path'; import {renderModule} from '../../lib/render-module.js'; import {Readable} from 'stream'; const {nodeResolve} = koaNodeResolve; const moduleUrl = new URL(import.meta.url); const packageRoot = path.resolve(moduleUrl.pathname, '../../..'); const port = 8080; // This is a fairly standard Koa server that represents how the SSR API might // be used. const app = new Koa(); app.use(async (ctx: Koa.Context, next: Function) => { // Pass through anything not the root path to static file serving if (ctx.URL.pathname !== '/') { await next(); return; } const ssrResult = await (renderModule( './app-server.js', import.meta.url, 'renderAppWithInitialData', [] ) as Promise<Iterable<unknown>>); ctx.type = 'text/html'; ctx.body = Readable.from(ssrResult); }); app.use(nodeResolve({})); app.use(staticFiles(packageRoot)); app.listen(port, () => { console.log(`Server listening on port ${port}`); });
{ "content_hash": "dc1cb5ae52890a8996d951953c58b5cf", "timestamp": "", "source": "github", "line_count": 43, "max_line_length": 77, "avg_line_length": 25.627906976744185, "alnum_prop": 0.6733212341197822, "repo_name": "Polymer/lit-html", "id": "a2eebf284664f38fa2897c9ce6b0f19da26b3745", "size": "1191", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "packages/labs/ssr/src/demo/vm-modules/server.ts", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "12772" }, { "name": "HTML", "bytes": "121659" }, { "name": "JavaScript", "bytes": "2109757" }, { "name": "Shell", "bytes": "648" }, { "name": "TypeScript", "bytes": "2339677" }, { "name": "Vue", "bytes": "6999" } ], "symlink_target": "" }
import { Component, ChangeDetectionStrategy, Output, EventEmitter, ChangeDetectorRef, ViewChild, AfterViewInit, OnDestroy, } from '@angular/core'; import { HttpClient } from '@angular/common/http'; import { Subscription } from 'rxjs'; import { AbstractOverlayContentDirective, InputComponent, OverlayInitService } from 'cd-common'; import * as consts from './sheets-dataset-modal.consts'; import * as cd from 'cd-interfaces'; const SHEETS_ENDPOINT_SHEET_ID = 'sheetId'; const SHEETS_ENDPOINT_TAB_ID = 'tabId'; @Component({ selector: 'app-sheets-dataset-modal', templateUrl: './sheets-dataset-modal.component.html', styleUrls: ['./sheets-dataset-modal.component.scss'], changeDetection: ChangeDetectionStrategy.OnPush, }) export class SheetsDatasetModalComponent extends AbstractOverlayContentDirective implements AfterViewInit, OnDestroy { private _subscriptions = new Subscription(); private _focusTimer = 0; public results: cd.IStringMap<any> = {}; public hasResults = false; // Required fields for fetch public SheetIdHelpText = consts.SHEET_ID_HELP_TEXT; public sheetId = ''; public TabIdHelpText = consts.TAB_ID_HELP_TEXT; public tabId = ''; // State of overlay public RequestState = consts.RequestState; public requestState = consts.RequestState.Default; public errorMessage = ''; @Output() saveData = new EventEmitter<consts.ICreateSheetsDataset>(); get loading() { return this.requestState === consts.RequestState.Loading; } get runDisabled() { const { loading, sheetId, tabId } = this; return loading || !sheetId || !tabId; } @ViewChild('sheetIdInputRef', { read: InputComponent }) sheetIdInputRef!: InputComponent; constructor( public overlayInit: OverlayInitService, private _httpClient: HttpClient, private _cdRef: ChangeDetectorRef ) { super(overlayInit); } ngOnDestroy() { this._subscriptions.unsubscribe(); window.clearTimeout(this._focusTimer); } ngAfterViewInit() { super.ngAfterViewInit(); // setTimeout is required here as other methods of autofocusing elements seem to not work this._focusTimer = window.setTimeout(() => { const input = this.sheetIdInputRef.inputRef.nativeElement; if (!input) return; input.focus(); input.select(); }, 100); } onSheetIdChange(sheetId: string) { this.sheetId = sheetId; this._runQuery(); this._cdRef.markForCheck(); } onTabIdChange(tabId: string) { this.tabId = tabId; this._runQuery(); this._cdRef.markForCheck(); } private _runQuery() { const { sheetId, tabId } = this; if (!sheetId || !tabId) return; this.errorMessage = ''; this.hasResults = false; this.requestState = consts.RequestState.Loading; this._getData(sheetId, tabId); } private _getData(sheetId: string, tabId: string) { const url = new URL(consts.SHEETS_ENDPOINT_BASE); url.searchParams.append(SHEETS_ENDPOINT_SHEET_ID, sheetId); url.searchParams.append(SHEETS_ENDPOINT_TAB_ID, tabId); this._subscriptions.add( this._httpClient .get<consts.SheetsResponse>(url.toString(), { withCredentials: true }) .subscribe(this._onDataResponse, this._onError) ); } private _onDataResponse = (results: consts.SheetsResponse) => { if (!results.ok) return this._onError(results.error); this.results = results.data; this.requestState = consts.RequestState.Default; this.hasResults = true; this._cdRef.markForCheck(); }; private _onError = (error: consts.SheetsError) => { this.requestState = consts.RequestState.Error; this.errorMessage = error.statusMessage; this._cdRef.markForCheck(); }; onRunClick() { this._runQuery(); } onSaveClick() { const { results, sheetId, tabId } = this; if (!results) return; this.saveData.emit({ tabId, sheetId, results }); } }
{ "content_hash": "3e31165e5c93f2e4ce841cec36d3f321", "timestamp": "", "source": "github", "line_count": 141, "max_line_length": 96, "avg_line_length": 27.72340425531915, "alnum_prop": 0.6904579176259913, "repo_name": "google/web-prototyping-tool", "id": "5913e6d2cea9499b2bcb0295398c176fea02efaa", "size": "4503", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "src/app/routes/project/components/panels/data-panel/components/sheets-dataset-modal/sheets-dataset-modal.component.ts", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "12684" }, { "name": "HTML", "bytes": "403011" }, { "name": "JavaScript", "bytes": "98960" }, { "name": "SCSS", "bytes": "552209" }, { "name": "TypeScript", "bytes": "5529342" } ], "symlink_target": "" }
@interface PKTCommentAPITests : XCTestCase @end @implementation PKTCommentAPITests - (void)testExample { NSURL *embedURL = [NSURL URLWithString:@"https://www.google.com"]; PKTRequest *request = [PKTCommentsAPI requestToAddCommentToObjectWithReferenceID:123 referenceType:PKTReferenceTypeItem value:@"Some text" files:@[@1, @2] embedID:222 embedURL:embedURL]; expect(request.path).to.equal(@"/comment/item/123/"); expect(request.parameters[@"value"]).to.equal(@"Some text"); expect(request.parameters[@"file_ids"]).to.equal(@[@1, @2]); expect(request.parameters[@"embed_id"]).to.equal(222); expect(request.parameters[@"embed_url"]).to.equal([embedURL absoluteString]); } @end
{ "content_hash": "5d64e8d7a43f057d767f982e26a055f8", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 102, "avg_line_length": 48.18181818181818, "alnum_prop": 0.4839622641509434, "repo_name": "podio/podio-objc", "id": "3a265d9814d1f0fcced94a1c0f8dbf6878b6296b", "size": "1272", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "PodioKitTests/PKTCommentAPITests.m", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "402" }, { "name": "Objective-C", "bytes": "754338" }, { "name": "Python", "bytes": "64138" }, { "name": "Ruby", "bytes": "7357" } ], "symlink_target": "" }
package arm import ( "fmt" "net/url" "strings" "github.com/hashicorp/packer/builder/azure/common/constants" "github.com/hashicorp/packer/packer" "github.com/mitchellh/multistep" ) type StepDeleteOSDisk struct { client *AzureClient delete func(string, string) error say func(message string) error func(e error) } func NewStepDeleteOSDisk(client *AzureClient, ui packer.Ui) *StepDeleteOSDisk { var step = &StepDeleteOSDisk{ client: client, say: func(message string) { ui.Say(message) }, error: func(e error) { ui.Error(e.Error()) }, } step.delete = step.deleteBlob return step } func (s *StepDeleteOSDisk) deleteBlob(storageContainerName string, blobName string) error { blob := s.client.BlobStorageClient.GetContainerReference(storageContainerName).GetBlobReference(blobName) err := blob.Delete(nil) if err != nil { s.say(s.client.LastError.Error()) } return err } func (s *StepDeleteOSDisk) Run(state multistep.StateBag) multistep.StepAction { s.say("Deleting the temporary OS disk ...") var osDisk = state.Get(constants.ArmOSDiskVhd).(string) var isManagedDisk = state.Get(constants.ArmIsManagedImage).(bool) if isManagedDisk { s.say(fmt.Sprintf(" -> OS Disk : skipping, managed disk was used...")) return multistep.ActionContinue } s.say(fmt.Sprintf(" -> OS Disk : '%s'", osDisk)) u, err := url.Parse(osDisk) if err != nil { s.say("Failed to parse the OS Disk's VHD URI!") return multistep.ActionHalt } xs := strings.Split(u.Path, "/") var storageAccountName = xs[1] var blobName = strings.Join(xs[2:], "/") err = s.delete(storageAccountName, blobName) return processStepResult(err, s.error, state) } func (*StepDeleteOSDisk) Cleanup(multistep.StateBag) { }
{ "content_hash": "16fe88c63638e8230821d9b4921bd4a8", "timestamp": "", "source": "github", "line_count": 71, "max_line_length": 106, "avg_line_length": 24.422535211267604, "alnum_prop": 0.7185697808535179, "repo_name": "deasmi/terraform-provider-libvirt", "id": "1dd28ec858563d2cfb231f1c20498dc36760a3be", "size": "1734", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "vendor/github.com/mitchellh/packer/builder/azure/arm/step_delete_os_disk.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Go", "bytes": "178528" }, { "name": "HTML", "bytes": "1555" }, { "name": "Makefile", "bytes": "508" }, { "name": "Shell", "bytes": "2852" } ], "symlink_target": "" }
require File.expand_path(File.dirname(__FILE__) + '/test_helper.rb') class TesetEnUsLocale < Test::Unit::TestCase def setup FactoryHelper::Config.locale = nil end def teardown FactoryHelper::Config.locale = nil end def test_us_phone_methods_return_nil_for_nil_locale assert_nil FactoryHelper::PhoneNumber.area_code assert_nil FactoryHelper::PhoneNumber.exchange_code end def test_subscriber_number_method assert FactoryHelper::PhoneNumber.subscriber_number.is_a? String assert_equal FactoryHelper::PhoneNumber.subscriber_number.length, 4 assert_equal FactoryHelper::PhoneNumber.subscriber_number(10).length, 10 assert_equal FactoryHelper::PhoneNumber.method(:extension), FactoryHelper::PhoneNumber.method(:subscriber_number) end def test_us_phone_methods_with_en_us_locale FactoryHelper::Config.locale = 'en-US' assert FactoryHelper::PhoneNumber.area_code.is_a? String assert FactoryHelper::PhoneNumber.area_code.to_i.is_a? Integer assert_equal FactoryHelper::PhoneNumber.area_code.length, 3 assert FactoryHelper::PhoneNumber.exchange_code.is_a? String assert FactoryHelper::PhoneNumber.exchange_code.to_i.is_a? Integer assert_equal FactoryHelper::PhoneNumber.exchange_code.length, 3 end def test_validity_of_phone_method_output FactoryHelper::Config.locale = 'en-US' # got the following regex from http://stackoverflow.com/a/123666/1210055 as an expression of the NANP standard. us_number_validation_regex = /^(?:(?:\+?1\s*(?:[.-]\s*)?)?(?:\(\s*([2-9]1[02-9]|[2-9][02-8]1|[2-9][02-8][02-9])\s*\)|([2-9]1[02-9]|[2-9][02-8]1|[2-9][02-8][02-9]))\s*(?:[.-]\s*)?)?([2-9]1[02-9]|[2-9][02-9]1|[2-9][02-9]{2})\s*(?:[.-]\s*)?([0-9]{4})(?:\s*(?:#|x\.?|ext\.?|extension)\s*(\d+))?$/ assert_match(us_number_validation_regex, FactoryHelper::PhoneNumber.phone_number) end def test_us_invalid_state_raises_exception FactoryHelper::Config.locale = 'en-US' assert_raise I18n::MissingTranslationData do FactoryHelper::Address.zip_code('NA') end end def test_us_zip_codes_match_state FactoryHelper::Config.locale = 'en-US' state_abbr = 'AZ' expected = /^850\d\d$/ assert_match(expected, FactoryHelper::Address.zip_code(state_abbr)) # disjointed ranges for these states # http://www.fincen.gov/forms/files/us_state_territory_zip_codes.pdf state_abbr = 'AR' expected = /^717\d\d$/ assert_match(expected, FactoryHelper::Address.zip_code(state_abbr)) state_abbr = 'GA' expected = /^301\d\d$/ assert_match(expected, FactoryHelper::Address.zip_code(state_abbr)) state_abbr = 'MA' expected = /^026\d\d$/ assert_match(expected, FactoryHelper::Address.zip_code(state_abbr)) state_abbr = 'NY' expected = /^122\d\d$/ assert_match(expected, FactoryHelper::Address.zip_code(state_abbr)) state_abbr = 'TX' expected = /^798\d\d$/ assert_match(expected, FactoryHelper::Address.zip_code(state_abbr)) state_abbr = 'VA' expected = /^222\d\d$/ assert_match(expected, FactoryHelper::Address.zip_code(state_abbr)) end end
{ "content_hash": "eceb9a62bbc583290da4bac8ef700ad9", "timestamp": "", "source": "github", "line_count": 79, "max_line_length": 296, "avg_line_length": 39.35443037974684, "alnum_prop": 0.689289160501769, "repo_name": "razorcd/factory-helper", "id": "e6db79090e0b8524fe3b9792449773ff6c72d005", "size": "3139", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "test/test_en_us_locale.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "138036" } ], "symlink_target": "" }
<?php declare(strict_types=1); /* * This file is part of PHP CS Fixer. * * (c) Fabien Potencier <[email protected]> * Dariusz Rumiński <[email protected]> * * This source file is subject to the MIT license that is bundled * with this source code in the file LICENSE. */ namespace PhpCsFixer\Tokenizer; /** * Representation of single token. * As a token prototype you should understand a single element generated by token_get_all. * * @author Dariusz Rumiński <[email protected]> */ final class Token { /** * Content of token prototype. * * @var string */ private $content; /** * ID of token prototype, if available. * * @var null|int */ private $id; /** * If token prototype is an array. * * @var bool */ private $isArray; /** * Flag is token was changed. * * @var bool */ private $changed = false; /** * @param array|string $token token prototype */ public function __construct($token) { if (\is_array($token)) { if (!\is_int($token[0])) { throw new \InvalidArgumentException(sprintf( 'Id must be an int, got "%s".', \is_object($token[0]) ? \get_class($token[0]) : \gettype($token[0]) )); } if (!\is_string($token[1])) { throw new \InvalidArgumentException(sprintf( 'Content must be a string, got "%s".', \is_object($token[1]) ? \get_class($token[1]) : \gettype($token[1]) )); } if ('' === $token[1]) { throw new \InvalidArgumentException('Cannot set empty content for id-based Token.'); } $this->isArray = true; $this->id = $token[0]; $this->content = $token[1]; } elseif (\is_string($token)) { $this->isArray = false; $this->content = $token; } else { throw new \InvalidArgumentException(sprintf( 'Cannot recognize input value as valid Token prototype, got "%s".', // @phpstan-ignore-next-line due to lack of strong typing of method parameter \is_object($token) ? \get_class($token) : \gettype($token) )); } } /** * @return int[] */ public static function getCastTokenKinds(): array { static $castTokens = [T_ARRAY_CAST, T_BOOL_CAST, T_DOUBLE_CAST, T_INT_CAST, T_OBJECT_CAST, T_STRING_CAST, T_UNSET_CAST]; return $castTokens; } /** * Get classy tokens kinds: T_CLASS, T_INTERFACE and T_TRAIT. * * @return int[] */ public static function getClassyTokenKinds(): array { static $classTokens = [T_CLASS, T_TRAIT, T_INTERFACE]; return $classTokens; } /** * Get object operator tokens kinds: T_OBJECT_OPERATOR and (if available) T_NULLSAFE_OBJECT_OPERATOR. * * @return int[] */ public static function getObjectOperatorKinds(): array { static $objectOperators = null; if (null === $objectOperators) { $objectOperators = [T_OBJECT_OPERATOR]; if (\defined('T_NULLSAFE_OBJECT_OPERATOR')) { $objectOperators[] = T_NULLSAFE_OBJECT_OPERATOR; } } return $objectOperators; } /** * Check if token is equals to given one. * * If tokens are arrays, then only keys defined in parameter token are checked. * * @param array|string|Token $other token or it's prototype * @param bool $caseSensitive perform a case sensitive comparison */ public function equals($other, bool $caseSensitive = true): bool { if (\defined('T_AMPERSAND_FOLLOWED_BY_VAR_OR_VARARG')) { // @TODO: drop condition with new MAJOR release 4.0 if ('&' === $other) { return '&' === $this->content && (null === $this->id || $this->isGivenKind([T_AMPERSAND_FOLLOWED_BY_VAR_OR_VARARG, T_AMPERSAND_NOT_FOLLOWED_BY_VAR_OR_VARARG])); } if (null === $this->id && '&' === $this->content) { return $other instanceof self && '&' === $other->content && (null === $other->id || $other->isGivenKind([T_AMPERSAND_FOLLOWED_BY_VAR_OR_VARARG, T_AMPERSAND_NOT_FOLLOWED_BY_VAR_OR_VARARG])); } } if ($other instanceof self) { // Inlined getPrototype() on this very hot path. // We access the private properties of $other directly to save function call overhead. // This is only possible because $other is of the same class as `self`. if (!$other->isArray) { $otherPrototype = $other->content; } else { $otherPrototype = [ $other->id, $other->content, ]; } } else { $otherPrototype = $other; } if ($this->isArray !== \is_array($otherPrototype)) { return false; } if (!$this->isArray) { return $this->content === $otherPrototype; } if ($this->id !== $otherPrototype[0]) { return false; } if (isset($otherPrototype[1])) { if ($caseSensitive) { if ($this->content !== $otherPrototype[1]) { return false; } } elseif (0 !== strcasecmp($this->content, $otherPrototype[1])) { return false; } } // detect unknown keys unset($otherPrototype[0], $otherPrototype[1]); return empty($otherPrototype); } /** * Check if token is equals to one of given. * * @param array $others array of tokens or token prototypes * @param bool $caseSensitive perform a case sensitive comparison */ public function equalsAny(array $others, bool $caseSensitive = true): bool { foreach ($others as $other) { if ($this->equals($other, $caseSensitive)) { return true; } } return false; } /** * A helper method used to find out whether a certain input token has to be case-sensitively matched. * * @param array<int, bool>|bool $caseSensitive global case sensitiveness or an array of booleans, whose keys should match * the ones used in $others. If any is missing, the default case-sensitive * comparison is used * @param int $key the key of the token that has to be looked up */ public static function isKeyCaseSensitive($caseSensitive, int $key): bool { if (\is_array($caseSensitive)) { return $caseSensitive[$key] ?? true; } return $caseSensitive; } /** * @return array|string token prototype */ public function getPrototype() { if (!$this->isArray) { return $this->content; } return [ $this->id, $this->content, ]; } /** * Get token's content. * * It shall be used only for getting the content of token, not for checking it against excepted value. */ public function getContent(): string { return $this->content; } /** * Get token's id. * * It shall be used only for getting the internal id of token, not for checking it against excepted value. */ public function getId(): ?int { return $this->id; } /** * Get token's name. * * It shall be used only for getting the name of token, not for checking it against excepted value. * * @return null|string token name */ public function getName(): ?string { if (null === $this->id) { return null; } return self::getNameForId($this->id); } /** * Get token's name. * * It shall be used only for getting the name of token, not for checking it against excepted value. * * @return null|string token name */ public static function getNameForId(int $id): ?string { if (CT::has($id)) { return CT::getName($id); } $name = token_name($id); return 'UNKNOWN' === $name ? null : $name; } /** * Generate array containing all keywords that exists in PHP version in use. * * @return array<int, int> */ public static function getKeywords(): array { static $keywords = null; if (null === $keywords) { $keywords = self::getTokenKindsForNames(['T_ABSTRACT', 'T_ARRAY', 'T_AS', 'T_BREAK', 'T_CALLABLE', 'T_CASE', 'T_CATCH', 'T_CLASS', 'T_CLONE', 'T_CONST', 'T_CONTINUE', 'T_DECLARE', 'T_DEFAULT', 'T_DO', 'T_ECHO', 'T_ELSE', 'T_ELSEIF', 'T_EMPTY', 'T_ENDDECLARE', 'T_ENDFOR', 'T_ENDFOREACH', 'T_ENDIF', 'T_ENDSWITCH', 'T_ENDWHILE', 'T_EVAL', 'T_EXIT', 'T_EXTENDS', 'T_FINAL', 'T_FINALLY', 'T_FN', 'T_FOR', 'T_FOREACH', 'T_FUNCTION', 'T_GLOBAL', 'T_GOTO', 'T_HALT_COMPILER', 'T_IF', 'T_IMPLEMENTS', 'T_INCLUDE', 'T_INCLUDE_ONCE', 'T_INSTANCEOF', 'T_INSTEADOF', 'T_INTERFACE', 'T_ISSET', 'T_LIST', 'T_LOGICAL_AND', 'T_LOGICAL_OR', 'T_LOGICAL_XOR', 'T_NAMESPACE', 'T_MATCH', 'T_NEW', 'T_PRINT', 'T_PRIVATE', 'T_PROTECTED', 'T_PUBLIC', 'T_REQUIRE', 'T_REQUIRE_ONCE', 'T_RETURN', 'T_STATIC', 'T_SWITCH', 'T_THROW', 'T_TRAIT', 'T_TRY', 'T_UNSET', 'T_USE', 'T_VAR', 'T_WHILE', 'T_YIELD', 'T_YIELD_FROM', 'T_READONLY', 'T_ENUM', ]) + [ CT::T_ARRAY_TYPEHINT => CT::T_ARRAY_TYPEHINT, CT::T_CLASS_CONSTANT => CT::T_CLASS_CONSTANT, CT::T_CONST_IMPORT => CT::T_CONST_IMPORT, CT::T_CONSTRUCTOR_PROPERTY_PROMOTION_PRIVATE => CT::T_CONSTRUCTOR_PROPERTY_PROMOTION_PRIVATE, CT::T_CONSTRUCTOR_PROPERTY_PROMOTION_PROTECTED => CT::T_CONSTRUCTOR_PROPERTY_PROMOTION_PROTECTED, CT::T_CONSTRUCTOR_PROPERTY_PROMOTION_PUBLIC => CT::T_CONSTRUCTOR_PROPERTY_PROMOTION_PUBLIC, CT::T_FUNCTION_IMPORT => CT::T_FUNCTION_IMPORT, CT::T_NAMESPACE_OPERATOR => CT::T_NAMESPACE_OPERATOR, CT::T_USE_LAMBDA => CT::T_USE_LAMBDA, CT::T_USE_TRAIT => CT::T_USE_TRAIT, ]; } return $keywords; } /** * Generate array containing all predefined constants that exists in PHP version in use. * * @see https://php.net/manual/en/language.constants.predefined.php * * @return array<int, int> */ public static function getMagicConstants(): array { static $magicConstants = null; if (null === $magicConstants) { $magicConstants = self::getTokenKindsForNames(['T_CLASS_C', 'T_DIR', 'T_FILE', 'T_FUNC_C', 'T_LINE', 'T_METHOD_C', 'T_NS_C', 'T_TRAIT_C']); } return $magicConstants; } /** * Check if token prototype is an array. * * @return bool is array */ public function isArray(): bool { return $this->isArray; } /** * Check if token is one of type cast tokens. */ public function isCast(): bool { return $this->isGivenKind(self::getCastTokenKinds()); } /** * Check if token is one of classy tokens: T_CLASS, T_INTERFACE or T_TRAIT. */ public function isClassy(): bool { return $this->isGivenKind(self::getClassyTokenKinds()); } /** * Check if token is one of comment tokens: T_COMMENT or T_DOC_COMMENT. */ public function isComment(): bool { static $commentTokens = [T_COMMENT, T_DOC_COMMENT]; return $this->isGivenKind($commentTokens); } /** * Check if token is one of object operator tokens: T_OBJECT_OPERATOR or T_NULLSAFE_OBJECT_OPERATOR. */ public function isObjectOperator(): bool { return $this->isGivenKind(self::getObjectOperatorKinds()); } /** * Check if token is one of given kind. * * @param int|int[] $possibleKind kind or array of kinds */ public function isGivenKind($possibleKind): bool { return $this->isArray && (\is_array($possibleKind) ? \in_array($this->id, $possibleKind, true) : $this->id === $possibleKind); } /** * Check if token is a keyword. */ public function isKeyword(): bool { $keywords = static::getKeywords(); return $this->isArray && isset($keywords[$this->id]); } /** * Check if token is a native PHP constant: true, false or null. */ public function isNativeConstant(): bool { static $nativeConstantStrings = ['true', 'false', 'null']; return $this->isArray && \in_array(strtolower($this->content), $nativeConstantStrings, true); } /** * Returns if the token is of a Magic constants type. * * @see https://php.net/manual/en/language.constants.predefined.php */ public function isMagicConstant(): bool { $magicConstants = static::getMagicConstants(); return $this->isArray && isset($magicConstants[$this->id]); } /** * Check if token is whitespace. * * @param null|string $whitespaces whitespace characters, default is " \t\n\r\0\x0B" */ public function isWhitespace(?string $whitespaces = " \t\n\r\0\x0B"): bool { if (null === $whitespaces) { $whitespaces = " \t\n\r\0\x0B"; } if ($this->isArray && !$this->isGivenKind(T_WHITESPACE)) { return false; } return '' === trim($this->content, $whitespaces); } public function toArray(): array { return [ 'id' => $this->id, 'name' => $this->getName(), 'content' => $this->content, 'isArray' => $this->isArray, 'changed' => $this->changed, ]; } public function toJson(): string { $jsonResult = json_encode($this->toArray(), JSON_PRETTY_PRINT | JSON_NUMERIC_CHECK); if (JSON_ERROR_NONE !== json_last_error()) { $jsonResult = json_encode( [ 'errorDescription' => 'Cannot encode Tokens to JSON.', 'rawErrorMessage' => json_last_error_msg(), ], JSON_PRETTY_PRINT | JSON_NUMERIC_CHECK ); } return $jsonResult; } /** * @param string[] $tokenNames * * @return array<int, int> */ private static function getTokenKindsForNames(array $tokenNames): array { $keywords = []; foreach ($tokenNames as $keywordName) { if (\defined($keywordName)) { $keyword = \constant($keywordName); $keywords[$keyword] = $keyword; } } return $keywords; } }
{ "content_hash": "2cbb3b26b7a7f253c3d11f7f6a0362ac", "timestamp": "", "source": "github", "line_count": 503, "max_line_length": 205, "avg_line_length": 30.435387673956264, "alnum_prop": 0.5338036449147561, "repo_name": "Slamdunk/PHP-CS-Fixer", "id": "a1b191217c8695f30b6827ea0c722e34e556974d", "size": "15311", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Tokenizer/Token.php", "mode": "33188", "license": "mit", "language": [ { "name": "Dockerfile", "bytes": "5280" }, { "name": "PHP", "bytes": "9087062" }, { "name": "Shell", "bytes": "5394" } ], "symlink_target": "" }
package org.switchyard.quickstarts.demos.orders; import org.switchyard.component.bean.Service; @Service(OrderService.class) public class OrderServiceBean implements OrderService { @Override public OrderAck submitOrder(Order order) { // Create an order ack OrderAck orderAck = new OrderAck().setOrderId(order.getOrderId()); orderAck.setAccepted(true).setStatus("Order Accepted"); return orderAck; } }
{ "content_hash": "4114b997c9109330705eba02b2b89ca3", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 74, "avg_line_length": 26.41176470588235, "alnum_prop": 0.7216035634743875, "repo_name": "cunningt/switchyard", "id": "ea90f16f0933bc9c823fbc738c2f5cb44c732398", "size": "1246", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "quickstarts/demos/webapp-deploy/src/main/java/org/switchyard/quickstarts/demos/orders/OrderServiceBean.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "1387" }, { "name": "CSS", "bytes": "1428" }, { "name": "Clojure", "bytes": "239" }, { "name": "HTML", "bytes": "12878" }, { "name": "Java", "bytes": "9666412" }, { "name": "Ruby", "bytes": "1772" }, { "name": "XSLT", "bytes": "83579" } ], "symlink_target": "" }
FROM balenalib/raspberrypi3-alpine:edge-build # remove several traces of python RUN apk del python* # http://bugs.python.org/issue19846 # > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK. ENV LANG C.UTF-8 # key 63C7CC90: public key "Simon McVittie <[email protected]>" imported # key 3372DCFA: public key "Donald Stufft (dstufft) <[email protected]>" imported RUN gpg --keyserver keyring.debian.org --recv-keys 4DE8FF2A63C7CC90 \ && gpg --keyserver keyserver.ubuntu.com --recv-key 6E3CBCE93372DCFA \ && gpg --keyserver keyserver.ubuntu.com --recv-keys 0x52a43a1e4b77b059 # point Python at a system-provided certificate database. Otherwise, we might hit CERTIFICATE_VERIFY_FAILED. # https://www.python.org/dev/peps/pep-0476/#trust-database ENV SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt ENV PYTHON_VERSION 3.8.12 # if this is called "PIP_VERSION", pip explodes with "ValueError: invalid truth value '<VERSION>'" ENV PYTHON_PIP_VERSION 21.3.1 ENV SETUPTOOLS_VERSION 60.5.4 RUN set -x \ && curl -SLO "http://resin-packages.s3.amazonaws.com/python/v$PYTHON_VERSION/Python-$PYTHON_VERSION.linux-alpine-armv7hf-libffi3.3.tar.gz" \ && echo "a9c035ae60c69723a518ec604de8e0cc39dde8f6f838946393e5999c9cdf3cba Python-$PYTHON_VERSION.linux-alpine-armv7hf-libffi3.3.tar.gz" | sha256sum -c - \ && tar -xzf "Python-$PYTHON_VERSION.linux-alpine-armv7hf-libffi3.3.tar.gz" --strip-components=1 \ && rm -rf "Python-$PYTHON_VERSION.linux-alpine-armv7hf-libffi3.3.tar.gz" \ && if [ ! -e /usr/local/bin/pip3 ]; then : \ && curl -SLO "https://raw.githubusercontent.com/pypa/get-pip/430ba37776ae2ad89f794c7a43b90dc23bac334c/get-pip.py" \ && echo "19dae841a150c86e2a09d475b5eb0602861f2a5b7761ec268049a662dbd2bd0c get-pip.py" | sha256sum -c - \ && python3 get-pip.py \ && rm get-pip.py \ ; fi \ && pip3 install --no-cache-dir --upgrade --force-reinstall pip=="$PYTHON_PIP_VERSION" setuptools=="$SETUPTOOLS_VERSION" \ && find /usr/local \ \( -type d -a -name test -o -name tests \) \ -o \( -type f -a -name '*.pyc' -o -name '*.pyo' \) \ -exec rm -rf '{}' + \ && cd / \ && rm -rf /usr/src/python ~/.cache # install "virtualenv", since the vast majority of users of this image will want it RUN pip3 install --no-cache-dir virtualenv ENV PYTHON_DBUS_VERSION 1.2.18 # install dbus-python dependencies RUN apk add --no-cache \ dbus-dev \ dbus-glib-dev # install dbus-python RUN set -x \ && mkdir -p /usr/src/dbus-python \ && curl -SL "http://dbus.freedesktop.org/releases/dbus-python/dbus-python-$PYTHON_DBUS_VERSION.tar.gz" -o dbus-python.tar.gz \ && curl -SL "http://dbus.freedesktop.org/releases/dbus-python/dbus-python-$PYTHON_DBUS_VERSION.tar.gz.asc" -o dbus-python.tar.gz.asc \ && gpg --verify dbus-python.tar.gz.asc \ && tar -xzC /usr/src/dbus-python --strip-components=1 -f dbus-python.tar.gz \ && rm dbus-python.tar.gz* \ && cd /usr/src/dbus-python \ && PYTHON_VERSION=$(expr match "$PYTHON_VERSION" '\([0-9]*\.[0-9]*\)') ./configure \ && make -j$(nproc) \ && make install -j$(nproc) \ && cd / \ && rm -rf /usr/src/dbus-python # make some useful symlinks that are expected to exist RUN cd /usr/local/bin \ && ln -sf pip3 pip \ && { [ -e easy_install ] || ln -s easy_install-* easy_install; } \ && ln -sf idle3 idle \ && ln -sf pydoc3 pydoc \ && ln -sf python3 python \ && ln -sf python3-config python-config CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"] RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/[email protected]" \ && echo "Running test-stack@python" \ && chmod +x [email protected] \ && bash [email protected] \ && rm -rf [email protected] RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Alpine Linux edge \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nPython v3.8.12, Pip v21.3.1, Setuptools v60.5.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info RUN echo $'#!/bin/bash\nbalena-info\nbusybox ln -sf /bin/busybox /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \ && chmod +x /bin/sh-shim \ && ln -f /bin/sh /bin/sh.real \ && ln -f /bin/sh-shim /bin/sh
{ "content_hash": "7af58e925b9c5f04fe51887af7b089e5", "timestamp": "", "source": "github", "line_count": 93, "max_line_length": 716, "avg_line_length": 51.795698924731184, "alnum_prop": 0.7083246834129125, "repo_name": "resin-io-library/base-images", "id": "92a3da9c7adc2570324dafa1b963674b7138a5f6", "size": "4838", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "balena-base-images/python/raspberrypi3/alpine/edge/3.8.12/build/Dockerfile", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "71234697" }, { "name": "JavaScript", "bytes": "13096" }, { "name": "Shell", "bytes": "12051936" }, { "name": "Smarty", "bytes": "59789" } ], "symlink_target": "" }
/*jslint nomen: true, unparam: true, regexp: true */ /*global define, window, document, URL, webkitURL, FileReader */ (function (factory) { 'use strict'; if (typeof define === 'function' && define.amd) { // Register as an anonymous AMD module: define([ 'jquery', 'tmpl', 'load-image', './jquery.fileupload-ip' ], factory); } else { // Browser globals: factory( window.jQuery, window.tmpl, window.loadImage ); } }(function ($, tmpl, loadImage) { 'use strict'; // The UI version extends the IP (image processing) version or the basic // file upload widget and adds complete user interface interaction: var parentWidget = ($.blueimpIP || $.blueimp).fileupload; $.widget('blueimpUI.fileupload', parentWidget, { options: { // By default, files added to the widget are uploaded as soon // as the user clicks on the start buttons. To enable automatic // uploads, set the following option to true: autoUpload: false, // The following option limits the number of files that are // allowed to be uploaded using this widget: maxNumberOfFiles: undefined, // The maximum allowed file size: maxFileSize: undefined, // The minimum allowed file size: minFileSize: undefined, // The regular expression for allowed file types, matches // against either file type or file name: acceptFileTypes: /.+$/i, // The regular expression to define for which files a preview // image is shown, matched against the file type: previewSourceFileTypes: /^image\/(gif|jpeg|png)$/, // The maximum file size of images that are to be displayed as preview: previewSourceMaxFileSize: 5000000, // 5MB // The maximum width of the preview images: previewMaxWidth: 80, // The maximum height of the preview images: previewMaxHeight: 80, // By default, preview images are displayed as canvas elements // if supported by the browser. Set the following option to false // to always display preview images as img elements: previewAsCanvas: true, // The ID of the upload template: uploadTemplateId: 'template-upload', // The ID of the download template: downloadTemplateId: 'template-download', // The expected data type of the upload response, sets the dataType // option of the $.ajax upload requests: dataType: 'json', // The add callback is invoked as soon as files are added to the fileupload // widget (via file input selection, drag & drop or add API call). // See the basic file upload widget for more information: add: function (e, data) { var that = $(this).data('fileupload'), options = that.options, files = data.files; $(this).fileupload('resize', data).done(data, function () { that._adjustMaxNumberOfFiles(-files.length); data.isAdjusted = true; data.files.valid = data.isValidated = that._validate(files); data.context = that._renderUpload(files) .appendTo(options.filesContainer) .data('data', data); that._renderPreviews(files, data.context); that._forceReflow(data.context); that._transition(data.context).done( function () { if ((that._trigger('added', e, data) !== false) && (options.autoUpload || data.autoUpload) && data.autoUpload !== false && data.isValidated) { data.submit(); } } ); }); }, // Callback for the start of each file upload request: send: function (e, data) { var that = $(this).data('fileupload'); if (!data.isValidated) { if (!data.isAdjusted) { that._adjustMaxNumberOfFiles(-data.files.length); } if (!that._validate(data.files)) { return false; } } if (data.context && data.dataType && data.dataType.substr(0, 6) === 'iframe') { // Iframe Transport does not support progress events. // In lack of an indeterminate progress bar, we set // the progress to 100%, showing the full animated bar: data.context .find('.progress').addClass( !$.support.transition && 'progress-animated' ) .find('.bar').css( 'width', parseInt(100, 10) + '%' ); } return that._trigger('sent', e, data); }, // Callback for successful uploads: done: function (e, data) { var that = $(this).data('fileupload'), template, preview; if (data.context) { data.context.each(function (index) { var file = ($.isArray(data.result) && data.result[index]) || {error: 'emptyResult'}; if (file.error) { that._adjustMaxNumberOfFiles(1); } that._transition($(this)).done( function () { var node = $(this); template = that._renderDownload([file]) .css('height', node.height()) .replaceAll(node); that._forceReflow(template); that._transition(template).done( function () { data.context = $(this); that._trigger('completed', e, data); } ); } ); }); } else { template = that._renderDownload(data.result) .appendTo(that.options.filesContainer); that._forceReflow(template); that._transition(template).done( function () { data.context = $(this); that._trigger('completed', e, data); } ); } }, // Callback for failed (abort or error) uploads: fail: function (e, data) { var that = $(this).data('fileupload'), template; that._adjustMaxNumberOfFiles(data.files.length); if (data.context) { data.context.each(function (index) { if (data.errorThrown !== 'abort') { var file = data.files[index]; file.error = file.error || data.errorThrown || true; that._transition($(this)).done( function () { var node = $(this); template = that._renderDownload([file]) .replaceAll(node); that._forceReflow(template); that._transition(template).done( function () { data.context = $(this); that._trigger('failed', e, data); } ); } ); } else { that._transition($(this)).done( function () { $(this).remove(); that._trigger('failed', e, data); } ); } }); } else if (data.errorThrown !== 'abort') { that._adjustMaxNumberOfFiles(-data.files.length); data.context = that._renderUpload(data.files) .appendTo(that.options.filesContainer) .data('data', data); that._forceReflow(data.context); that._transition(data.context).done( function () { data.context = $(this); that._trigger('failed', e, data); } ); } else { that._trigger('failed', e, data); } }, // Callback for upload progress events: progress: function (e, data) { if (data.context) { data.context.find('.progress .bar').css( 'width', parseInt(data.loaded / data.total * 100, 10) + '%' ); } }, // Callback for global upload progress events: progressall: function (e, data) { $(this).find('.fileupload-buttonbar .progress .bar').css( 'width', parseInt(data.loaded / data.total * 100, 10) + '%' ); }, // Callback for uploads start, equivalent to the global ajaxStart event: start: function (e) { var that = $(this).data('fileupload'); that._transition($(this).find('.fileupload-buttonbar .progress')).done( function () { that._trigger('started', e); } ); }, // Callback for uploads stop, equivalent to the global ajaxStop event: stop: function (e) { var that = $(this).data('fileupload'); that._transition($(this).find('.fileupload-buttonbar .progress')).done( function () { $(this).find('.bar').css('width', '0%'); that._trigger('stopped', e); } ); }, // Callback for file deletion: destroy: function (e, data) { var that = $(this).data('fileupload'); if (data.url) { $.ajax(data); } that._adjustMaxNumberOfFiles(1); that._transition(data.context).done( function () { $(this).remove(); that._trigger('destroyed', e, data); } ); } }, // Link handler, that allows to download files // by drag & drop of the links to the desktop: _enableDragToDesktop: function () { var link = $(this), url = link.prop('href'), name = link.prop('download'), type = 'application/octet-stream'; link.bind('dragstart', function (e) { try { e.originalEvent.dataTransfer.setData( 'DownloadURL', [type, name, url].join(':') ); } catch (err) {} }); }, _adjustMaxNumberOfFiles: function (operand) { if (typeof this.options.maxNumberOfFiles === 'number') { this.options.maxNumberOfFiles += operand; if (this.options.maxNumberOfFiles < 1) { this._disableFileInputButton(); } else { this._enableFileInputButton(); } } }, _formatFileSize: function (bytes) { if (typeof bytes !== 'number') { return ''; } if (bytes >= 1000000000) { return (bytes / 1000000000).toFixed(2) + ' GB'; } if (bytes >= 1000000) { return (bytes / 1000000).toFixed(2) + ' MB'; } return (bytes / 1000).toFixed(2) + ' KB'; }, _hasError: function (file) { if (file.error) { return file.error; } // The number of added files is subtracted from // maxNumberOfFiles before validation, so we check if // maxNumberOfFiles is below 0 (instead of below 1): if (this.options.maxNumberOfFiles < 0) { return 'maxNumberOfFiles'; } // Files are accepted if either the file type or the file name // matches against the acceptFileTypes regular expression, as // only browsers with support for the File API report the type: if (!(this.options.acceptFileTypes.test(file.type) || this.options.acceptFileTypes.test(file.name))) { return 'acceptFileTypes'; } if (this.options.maxFileSize && file.size > this.options.maxFileSize) { return 'maxFileSize'; } if (typeof file.size === 'number' && file.size < this.options.minFileSize) { return 'minFileSize'; } return null; }, _validate: function (files) { var that = this, valid = !!files.length; $.each(files, function (index, file) { file.error = that._hasError(file); if (file.error) { valid = false; } }); return valid; }, _renderTemplate: function (func, files) { if (!func) { return $(); } var result = func({ files: files, formatFileSize: this._formatFileSize, options: this.options }); if (result instanceof $) { return result; } return $(this.options.templatesContainer).html(result).children(); }, _renderPreview: function (file, node) { var that = this, options = this.options, deferred = $.Deferred(); return ((loadImage && loadImage( file, function (img) { node.append(img); that._forceReflow(node); that._transition(node).done(function () { deferred.resolveWith(node); }); if (!$.contains(document.body, node[0])) { // If the element is not part of the DOM, // transition events are not triggered, // so we have to resolve manually: deferred.resolveWith(node); } }, { maxWidth: options.previewMaxWidth, maxHeight: options.previewMaxHeight, canvas: options.previewAsCanvas } )) || deferred.resolveWith(node)) && deferred; }, _renderPreviews: function (files, nodes) { var that = this, options = this.options; nodes.find('.preview span').each(function (index, element) { var file = files[index]; if (options.previewSourceFileTypes.test(file.type) && ($.type(options.previewSourceMaxFileSize) !== 'number' || file.size < options.previewSourceMaxFileSize)) { that._processingQueue = that._processingQueue.pipe(function () { var deferred = $.Deferred(); that._renderPreview(file, $(element)).done( function () { deferred.resolveWith(that); } ); return deferred.promise(); }); } }); return this._processingQueue; }, _renderUpload: function (files) { return this._renderTemplate( this.options.uploadTemplate, files ); }, _renderDownload: function (files) { return this._renderTemplate( this.options.downloadTemplate, files ).find('a[download]').each(this._enableDragToDesktop).end(); }, _startHandler: function (e) { e.preventDefault(); var button = $(this), template = button.closest('.template-upload'), data = template.data('data'); if (data && data.submit && !data.jqXHR && data.submit()) { button.prop('disabled', true); } }, _cancelHandler: function (e) { e.preventDefault(); var template = $(this).closest('.template-upload'), data = template.data('data') || {}; if (!data.jqXHR) { data.errorThrown = 'abort'; e.data.fileupload._trigger('fail', e, data); } else { data.jqXHR.abort(); } }, _deleteHandler: function (e) { e.preventDefault(); var button = $(this); e.data.fileupload._trigger('destroy', e, { context: button.closest('.template-download'), url: button.attr('data-url'), type: button.attr('data-type') || 'DELETE', dataType: e.data.fileupload.options.dataType }); }, _forceReflow: function (node) { this._reflow = $.support.transition && node.length && node[0].offsetWidth; }, _transition: function (node) { var that = this, deferred = $.Deferred(); if ($.support.transition && node.hasClass('fade')) { node.bind( $.support.transition.end, function (e) { // Make sure we don't respond to other transitions events // in the container element, e.g. from button elements: if (e.target === node[0]) { node.unbind($.support.transition.end); deferred.resolveWith(node); } } ).toggleClass('in'); } else { node.toggleClass('in'); deferred.resolveWith(node); } return deferred; }, _initButtonBarEventHandlers: function () { var fileUploadButtonBar = this.element.find('.fileupload-buttonbar'), filesList = this.options.filesContainer, ns = this.options.namespace; fileUploadButtonBar.find('.start') .bind('click.' + ns, function (e) { e.preventDefault(); filesList.find('.start button').click(); }); fileUploadButtonBar.find('.cancel') .bind('click.' + ns, function (e) { e.preventDefault(); filesList.find('.cancel button').click(); }); fileUploadButtonBar.find('.delete') .bind('click.' + ns, function (e) { e.preventDefault(); filesList.find('.delete input:checked') .siblings('button').click(); fileUploadButtonBar.find('.toggle') .prop('checked', false); }); fileUploadButtonBar.find('.toggle') .bind('change.' + ns, function (e) { filesList.find('.delete input').prop( 'checked', $(this).is(':checked') ); }); }, _destroyButtonBarEventHandlers: function () { this.element.find('.fileupload-buttonbar button') .unbind('click.' + this.options.namespace); this.element.find('.fileupload-buttonbar .toggle') .unbind('change.' + this.options.namespace); }, _initEventHandlers: function () { parentWidget.prototype._initEventHandlers.call(this); var eventData = {fileupload: this}; this.options.filesContainer .delegate( '.start button', 'click.' + this.options.namespace, eventData, this._startHandler ) .delegate( '.cancel button', 'click.' + this.options.namespace, eventData, this._cancelHandler ) .delegate( '.delete button', 'click.' + this.options.namespace, eventData, this._deleteHandler ); this._initButtonBarEventHandlers(); }, _destroyEventHandlers: function () { var options = this.options; this._destroyButtonBarEventHandlers(); options.filesContainer .undelegate('.start button', 'click.' + options.namespace) .undelegate('.cancel button', 'click.' + options.namespace) .undelegate('.delete button', 'click.' + options.namespace); parentWidget.prototype._destroyEventHandlers.call(this); }, _enableFileInputButton: function () { this.element.find('.fileinput-button input') .prop('disabled', false) .parent().removeClass('disabled'); }, _disableFileInputButton: function () { this.element.find('.fileinput-button input') .prop('disabled', true) .parent().addClass('disabled'); }, _initTemplates: function () { var options = this.options; options.templatesContainer = document.createElement( options.filesContainer.prop('nodeName') ); if (tmpl) { if (options.uploadTemplateId) { options.uploadTemplate = tmpl(options.uploadTemplateId); } if (options.downloadTemplateId) { options.downloadTemplate = tmpl(options.downloadTemplateId); } } }, _initFilesContainer: function () { var options = this.options; if (options.filesContainer === undefined) { options.filesContainer = this.element.find('.files'); } else if (!(options.filesContainer instanceof $)) { options.filesContainer = $(options.filesContainer); } }, _initSpecialOptions: function () { parentWidget.prototype._initSpecialOptions.call(this); this._initFilesContainer(); this._initTemplates(); }, _create: function () { parentWidget.prototype._create.call(this); this._refreshOptionsList.push( 'filesContainer', 'uploadTemplateId', 'downloadTemplateId' ); if (!$.blueimpIP) { this._processingQueue = $.Deferred().resolveWith(this).promise(); this.resize = function () { return this._processingQueue; }; } }, enable: function () { parentWidget.prototype.enable.call(this); this.element.find('input, button').prop('disabled', false); this._enableFileInputButton(); }, disable: function () { this.element.find('input, button').prop('disabled', true); this._disableFileInputButton(); parentWidget.prototype.disable.call(this); } }); }));
{ "content_hash": "57c965a69503f8688ca6649f3717bfe9", "timestamp": "", "source": "github", "line_count": 627, "max_line_length": 87, "avg_line_length": 40.71451355661882, "alnum_prop": 0.4391648386085866, "repo_name": "jeremyf/sufia", "id": "991dee545e2d7a67ce55afe2f529cd2bfbbcf4c4", "size": "25775", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "vendor/assets/javascripts/fileupload/jquery.fileupload-ui.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "43760" }, { "name": "JavaScript", "bytes": "21133" }, { "name": "Ruby", "bytes": "429562" } ], "symlink_target": "" }
![BCMagicTransition](https://github.com/boycechang/BCMagicTransition/blob/master/icon.png) BCMagicTransition [![Build Status](https://travis-ci.org/boycechang/BCMagicTransition.svg?branch=master)](https://travis-ci.org/boycechang/BCMagicTransition) ![Verison](https://cocoapod-badges.herokuapp.com/v/BCMagicTransition/badge.png) ![Lisence](https://cocoapod-badges.herokuapp.com/l/BCMagicTransition/badge.(png|svg)) ![platform](https://cocoapod-badges.herokuapp.com/p/BCMagicTransition/badge.png) ===================== ![BCMagicTransition](https://github.com/boycechang/BCMagicTransition/blob/master/MagicTransition.gif) **A MagicMove Style Custom UIViewController Transiton** **Version 1.0.5** ##Adding BCMagicTransition to your project #### Requirements * ARC only; iOS 7.0+ #### Get it as: ##### 1) source files 1. Download the BCMagicTransition repository as a zip file or clone it 2. Copy the BCMagicTransition files into your Xcode project ##### 2) via Cocoa pods BCMagicTransition is available on [CocoaPods](http://cocoapods.org). Just add the following to your project Podfile: ```ruby pod 'BCMagicTransition' ``` If you want to read more about CocoaPods, have a look at [this short tutorial](http://www.raywenderlich.com/12139/introduction-to-cocoapods). ##Basic usage ```objective-c #import "UIViewController+BCMagicTransition.h" @interface MyViewController : <BCMagicTransitionProtocol> - (void)push { ... ... [self pushViewController:secondVC fromViews:fromViews toViews:toViews duration:0.3]; } ``` ##Misc Author: BoyceChang If you like BCMagicTransition and use it, could you please: * star this repo * send me some feedback. Thanks! #### License This code is distributed under the terms and conditions of the MIT license. #### Contribution guidelines If you are fixing a bug you discovered, please add also a unit test so I know how exactly to reproduce the bug before merging.
{ "content_hash": "962a9704c92b6fd152a65e4429d3ab8c", "timestamp": "", "source": "github", "line_count": 69, "max_line_length": 405, "avg_line_length": 28.18840579710145, "alnum_prop": 0.7491002570694087, "repo_name": "boycechang/BCMagicTransition", "id": "ac701a72b557d99c87139c969ea4daf23754cff6", "size": "1945", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "Objective-C", "bytes": "23544" }, { "name": "Ruby", "bytes": "594" } ], "symlink_target": "" }
package com.example.mengmeng.activity; import android.content.Intent; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.view.View; import android.widget.TextView; public class Mine_SetActivity extends AppCompatActivity implements View.OnClickListener{ private TextView count_set; private TextView psd_set; private TextView address; private TextView photo_set; private TextView about; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_mine__set); count_set = ((TextView) findViewById(R.id.count_set)); count_set.setOnClickListener(this); psd_set = ((TextView) findViewById(R.id.psd_set)); psd_set.setOnClickListener(this); address = ((TextView) findViewById(R.id.address)); address.setOnClickListener(this); photo_set = ((TextView) findViewById(R.id.message_photo_set)); photo_set.setOnClickListener(this); about = ((TextView) findViewById(R.id.about)); about.setOnClickListener(this); } @Override public void onClick(View v) { switch (v.getId()){ case R.id.count_set: Intent intent = new Intent(getApplication(), Count_setActivity.class); intent.putExtra("userId",LoginInfo.userId+""); startActivity(intent); break; case R.id.psd_set: Intent intent1 = new Intent(getApplication(), Psd_setActivity.class); intent1.putExtra("userId",LoginInfo.userId+""); startActivity(intent1); break; case R.id.message_photo_set: Intent intent2=new Intent(this,SetQianmingActivity.class); intent2.putExtra("userId",LoginInfo.userId+""); startActivity(intent2); break; case R.id.address: Intent intent3=new Intent(this,SetAddressActivity.class); intent3.putExtra("userId",LoginInfo.userId+""); startActivity(intent3); break; case R.id.about: Intent intent4=new Intent(this,AboutActivity.class); startActivity(intent4); break; } } }
{ "content_hash": "826bfa96747b5b77730ea3b3cf2864ef", "timestamp": "", "source": "github", "line_count": 69, "max_line_length": 88, "avg_line_length": 34.34782608695652, "alnum_prop": 0.6122362869198312, "repo_name": "luxiansenv5/MengMeng", "id": "803c7d5582742bcee6aa21ca4afb27ad05041ea1", "size": "2370", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/java/com/example/mengmeng/activity/Mine_SetActivity.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "798002" } ], "symlink_target": "" }
var express = require('express'); var app = express(); // Load Express Configuration require('./express-config')(app, express); // Root route app.get('/', function(req, res){ res.sendfile('index.html', {root: app.settings.views}); }); // Load routes require('./routes/user')(app); //user routes require('./routes/session')(app); // session routes, mostly for authentication require('./routes/note')(app); // note routes require('./routes/category')(app); // category routes // Start the server //var server = app.listen(8000, function() { // console.log('Listening on port %d', server.address().port); //}); module.exports = app;
{ "content_hash": "95e801078e42a2582450c65590b1d373", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 78, "avg_line_length": 27.782608695652176, "alnum_prop": 0.672926447574335, "repo_name": "Danziger/Code-School-Staying-Sharp-with-Angular.js-Note-Wrangler", "id": "363032279e517315186da35b30d473004f21d28e", "size": "639", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "Level 2 - Directives with Scope/app.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "153871" }, { "name": "HTML", "bytes": "43437" }, { "name": "JavaScript", "bytes": "89418" } ], "symlink_target": "" }
package org.xins.common.service; import java.io.UnsupportedEncodingException; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.NoSuchElementException; import java.util.Set; import java.util.zip.CRC32; import org.xins.common.MandatoryArgumentChecker; import org.xins.common.Utils; import org.xins.common.text.HexConverter; import org.xins.common.text.TextUtils; /** * Descriptor for a single target service. A target descriptor defines a URL * that identifies the location of the service. Also, it may define 3 kinds of * time-outs: * * <dl> * <dt><em>total time-out</em> ({@link #getTotalTimeOut()})</dt> * <dd>the maximum duration of a call, including connection time, time used * to send the request, time used to receive the response, etc.</dd> * * <dt><em>connection time-out</em> ({@link #getConnectionTimeOut()})</dt> * <dd>the maximum time for attempting to establish a connection.</dd> * * <dt><em>socket time-out</em> ({@link #getSocketTimeOut()})</dt> * <dd>the maximum time for attempting to receive data on a socket.</dd> * </dl> * * @version $Revision: 1.62 $ $Date: 2007/05/21 08:34:42 $ * @author <a href="mailto:[email protected]">Ernst de Haan</a> * * @since XINS 1.0.0 */ public final class TargetDescriptor extends Descriptor { /** * The number of instances of this class. Initially 0. */ private static int INSTANCE_COUNT; /** * The default time-out when no time-out is specified. */ private static final int DEFAULT_TIMEOUT = 5000; /** * Collection of protocols that are known to require a hostname in URLs. */ private static final Set<String> PROTOCOLS_REQUIRING_HOST = new HashSet<String>(Arrays.asList(new String[] { "http", "https", "ftp", "sftp", "smtp", "smtps", "gopher" })); /** * Computes the CRC-32 checksum for the specified character string. * * @param s * the string for which to compute the checksum, not <code>null</code>. * * @return * the checksum for <code>s</code>. */ private static int computeCRC32(String s) { // Compute the CRC-32 checksum CRC32 checksum = new CRC32(); byte[] bytes; final String ENCODING = "US-ASCII"; try { bytes = s.getBytes(ENCODING); // Unsupported exception } catch (UnsupportedEncodingException exception) { throw Utils.logProgrammingError(exception); } checksum.update(bytes, 0, bytes.length); return (int) (checksum.getValue() & 0x00000000ffffffffL); } /** * Tests the specified URL string and throws an exception if it is * malformed. * * @param url * the string to test to see if it is a valid URL, * cannot be <code>null</code>. * * @throws IllegalArgumentException * if <code>url == null</code>. * * @throws MalformedURLException * if <code>url</code> is not a valid URL according to the official * format of a URL * (<a href="http://www.faqs.org/rfcs/rfc1738.html">RFC 1738</a>). */ private static void testURL(String url) throws IllegalArgumentException, MalformedURLException { // TODO: Add more detail to the exception message // Check preconditions MandatoryArgumentChecker.check("url", url); // The approach in this method is: the URL is considered valid until // proven otherwise boolean error = false; // Construct a URI object (which was introduced in Java 1.4). // An important difference with the java.net.URL class is that the // latter only supports protocols backed by a URLStreamHandler. URI uri; try { uri = new URI(url); // The URI/URL must be absolute error = ! uri.isAbsolute(); if (! error) { String protocol = uri.getScheme(); String host = uri.getHost(); // Some protocols are known to require a host name. Check this. if (PROTOCOLS_REQUIRING_HOST.contains(protocol) && TextUtils.isEmpty(host)) { error = true; } } // The URI constructor may throw a URISyntaxException, but we catch // other exceptions and errors just the same } catch (Throwable e) { error = true; } // If an error was detected, then perhaps the URL contains a // subprotocol. Test without the protocol part to see if that validates. // If it does, then the URL is still considered valid. if (error) { int i = url.indexOf(':'); if (i > 0) { try { testURL(url.substring(i + 1)); error = false; } catch (MalformedURLException e) { // empty } } } // If there was still an error at this point, then the URL is definitely // considered malformed. if (error) { throw new MalformedURLException(url); } } /** * The 1-based sequence number of this instance. Since this number is * 1-based, the first instance of this class will have instance number 1 * assigned to it. */ private final int _instanceNumber; /** * A textual representation of this object. Lazily initialized by * {@link #toString()} before returning it. */ private String _asString; /** * The URL for the service. Cannot be <code>null</code>. */ private final String _url; /** * The total time-out for the service. Is set to a 0 if no total time-out * should be applied. */ private final int _timeOut; /** * The connection time-out for the service. Always greater than 0 and * smaller than or equal to the total time-out. */ private final int _connectionTimeOut; /** * The socket time-out for the service. Always greater than 0 and smaller * than or equal to the total time-out. */ private final int _socketTimeOut; /** * The CRC-32 checksum for the URL. */ private final int _crc; /** * Constructs a new <code>TargetDescriptor</code> for the specified URL. * * <p>Note: Both the connection time-out and the socket time-out will be * set to the default time-out: 5 seconds. * * @param url * the URL of the service, cannot be <code>null</code>. * * @throws IllegalArgumentException * if <code>url == null</code>. * * @throws MalformedURLException * if the specified URL is malformed. */ public TargetDescriptor(String url) throws IllegalArgumentException, MalformedURLException { this(url, DEFAULT_TIMEOUT, DEFAULT_TIMEOUT, DEFAULT_TIMEOUT); } /** * Constructs a new <code>TargetDescriptor</code> for the specified URL, * with the specifed total time-out. * * <p>Note: Both the connection time-out and the socket time-out will be * set to equal the total time-out. * * @param url * the URL of the service, cannot be <code>null</code>. * * @param timeOut * the total time-out for the service, in milliseconds; or a * non-positive value for no total time-out. * * @throws IllegalArgumentException * if <code>url == null</code>. * * @throws MalformedURLException * if the specified URL is malformed. */ public TargetDescriptor(String url, int timeOut) throws IllegalArgumentException, MalformedURLException { this(url, timeOut, timeOut, timeOut); } /** * Constructs a new <code>TargetDescriptor</code> for the specified URL, * with the specifed total time-out and connection time-out. * * <p>Note: If the passed connection time-out is smaller than 1 ms, or * greater than the total time-out, then it will be adjusted to equal the * total time-out. * * <p>Note: The socket time-out will be set to equal the total time-out. * * @param url * the URL of the service, cannot be <code>null</code>. * * @param timeOut * the total time-out for the service, in milliseconds; or a * non-positive value for no total time-out. * * @param connectionTimeOut * the connection time-out for the service, in milliseconds; or a * non-positive value if the connection time-out should equal the total * time-out. * * @throws IllegalArgumentException * if <code>url == null</code>. * * @throws MalformedURLException * if the specified URL is malformed. */ public TargetDescriptor(String url, int timeOut, int connectionTimeOut) throws IllegalArgumentException, MalformedURLException { this(url, timeOut, connectionTimeOut, timeOut); } /** * Constructs a new <code>TargetDescriptor</code> for the specified URL, * with the specifed total time-out, connection time-out and socket * time-out. * * <p>Note: If the passed connection time-out is smaller than 1 ms, or * greater than the total time-out, then it will be adjusted to equal the * total time-out. * * <p>Note: If the passed socket time-out is smaller than 1 ms or greater * than the total time-out, then it will be adjusted to equal the total * time-out. * * @param url * the URL of the service, cannot be <code>null</code>. * * @param timeOut * the total time-out for the service, in milliseconds; or a * non-positive value for no total time-out. * * @param connectionTimeOut * the connection time-out for the service, in milliseconds; or a * non-positive value if the connection time-out should equal the total * time-out. * * @param socketTimeOut * the socket time-out for the service, in milliseconds; or a * non-positive value for no socket time-out. * * @throws IllegalArgumentException * if <code>url == null</code>. * * @throws MalformedURLException * if the specified URL is malformed. */ public TargetDescriptor(String url, int timeOut, int connectionTimeOut, int socketTimeOut) throws IllegalArgumentException, MalformedURLException { // Determine instance number first _instanceNumber = ++INSTANCE_COUNT; // Check preconditions MandatoryArgumentChecker.check("url", url); testURL(url); // Convert negative total time-out to 0 timeOut = (timeOut > 0) ? timeOut : 0; // If connection time-out or socket time-out is not set, then set it to // the total time-out connectionTimeOut = (connectionTimeOut > 0) ? connectionTimeOut : timeOut; socketTimeOut = (socketTimeOut > 0) ? socketTimeOut : timeOut; // If either connection or socket time-out is greater than total // time-out, then limit it to the total time-out connectionTimeOut = (connectionTimeOut < timeOut) ? connectionTimeOut : timeOut; socketTimeOut = (socketTimeOut < timeOut) ? socketTimeOut : timeOut; // Set fields _url = url; _timeOut = timeOut; _connectionTimeOut = connectionTimeOut; _socketTimeOut = socketTimeOut; _crc = computeCRC32(url); // NOTE: _asString is lazily initialized } /** * Checks if this descriptor denotes a group of descriptors. * * @return * <code>false</code>, since this descriptor does not denote a group. */ @Override public boolean isGroup() { return false; } /** * Returns the URL for the service. * * @return * the URL for the service, not <code>null</code>. */ public String getURL() { return _url; } /** * Returns the protocol in the URL for the service. * * @return * the protocol in the URL, not <code>null</code>. * * @since XINS 1.2.0 */ public String getProtocol() { int index = _url.indexOf("://"); return _url.substring(0, index); } /** * Returns the total time-out for a call to the service. The value 0 * is returned if there is no total time-out. * * @return * the total time-out for the service, as a positive number, in * milli-seconds, or 0 if there is no total time-out. */ public int getTotalTimeOut() { return _timeOut; } /** * Returns the connection time-out for a call to the service. * * @return * the connection time-out for the service; always greater than 0 and * smaller than or equal to the total time-out. */ public int getConnectionTimeOut() { return _connectionTimeOut; } /** * Returns the socket time-out for a call to the service. * * @return * the socket time-out for the service; always greater than 0 and * smaller than or equal to the total time-out. */ public int getSocketTimeOut() { return _socketTimeOut; } /** * Returns the CRC-32 checksum for the URL of this target descriptor. * * @return * the CRC-32 checksum. */ public int getCRC() { return _crc; } @Override public Collection<TargetDescriptor> targets() { return Collections.singleton(this); } /** * Counts the total number of target descriptors in/under this descriptor. * * @return * the total number of target descriptors, always 1. */ public int getTargetCount() { return 1; } /** * Returns the <code>TargetDescriptor</code> that matches the specified * CRC-32 checksum. * * @param crc * the CRC-32 checksum. * * @return * the {@link TargetDescriptor} that matches the specified checksum, or * <code>null</code>, if none could be found in this descriptor. */ public TargetDescriptor getTargetByCRC(int crc) { return (_crc == crc) ? this : null; } /** * Returns a hash code value for the object. * * @return * a hash code value for this object. * * @see Object#hashCode() * @see #equals(Object) */ public int hashCode() { return _crc; } /** * Indicates whether some other object is "equal to" this one. This method * considers <code>obj</code> equals if and only if it matches the * following conditions: * * <ul> * <li><code>obj instanceof TargetDescriptor</code> * <li>URL is equal * <li>total time-out is equal * <li>connection time-out is equal * <li>socket time-out is equal * </ul> * * @param obj * the reference object with which to compare. * * @return * <code>true</code> if this object is the same as the <code>obj</code> * argument; <code>false</code> otherwise. * * @see #hashCode() */ public boolean equals(Object obj) { boolean equal = false; if (obj instanceof TargetDescriptor) { TargetDescriptor that = (TargetDescriptor) obj; equal = (_url.equals(that._url)) && (_timeOut == that._timeOut) && (_connectionTimeOut == that._connectionTimeOut) && (_socketTimeOut == that._socketTimeOut); } return equal; } /** * Textual description of this object. The string includes the URL and all * time-out values. For example: * * <blockquote><code>TargetDescriptor(url="http://api.google.com/some_api/"; * total-time-out is 5300 ms; * connection time-out is 1000 ms; * socket time-out is disabled)</code></blockquote> * * @return * this <code>TargetDescriptor</code> as a {@link String}, never * <code>null</code>. */ public String toString() { // Lazily initialize if (_asString == null) { StringBuffer buffer = new StringBuffer(233); buffer.append("TargetDescriptor #"); buffer.append(_instanceNumber); buffer.append(" [url=\""); buffer.append(_url); buffer.append("\"; crc=\""); buffer.append(HexConverter.toHexString(_crc)); buffer.append("\"; total time-out is "); if (_timeOut < 1) { buffer.append("disabled; connection time-out is "); } else { buffer.append(_timeOut); buffer.append(" ms; connection time-out is "); } if (_connectionTimeOut < 1) { buffer.append("disabled; socket time-out is "); } else { buffer.append(_connectionTimeOut); buffer.append(" ms; socket time-out is "); } if (_socketTimeOut < 1) { buffer.append("disabled]"); } else { buffer.append(_socketTimeOut); buffer.append(" ms]"); } _asString = buffer.toString(); } return _asString; } }
{ "content_hash": "d3bb4a08eb559734679646d6fab72af2", "timestamp": "", "source": "github", "line_count": 559, "max_line_length": 174, "avg_line_length": 30.63148479427549, "alnum_prop": 0.6107574607253402, "repo_name": "znerd/xins", "id": "d78714b6e007e9b9b340b5d5101ea60fdc02e001", "size": "17326", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/java/org/xins/common/service/TargetDescriptor.java", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Java", "bytes": "2555859" }, { "name": "JavaScript", "bytes": "4387" }, { "name": "Shell", "bytes": "3272" } ], "symlink_target": "" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>markov: Not compatible</title> <link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" /> <link href="../../../../../bootstrap.min.css" rel="stylesheet"> <link href="../../../../../bootstrap-custom.css" rel="stylesheet"> <link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet"> <script src="../../../../../moment.min.js"></script> <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries --> <!-- WARNING: Respond.js doesn't work if you view the page via file:// --> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script> <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script> <![endif]--> </head> <body> <div class="container"> <div class="navbar navbar-default" role="navigation"> <div class="container-fluid"> <div class="navbar-header"> <a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a> </div> <div id="navbar" class="collapse navbar-collapse"> <ul class="nav navbar-nav"> <li><a href="../..">clean / released</a></li> <li class="active"><a href="">8.13.0 / markov - 8.7.0</a></li> </ul> </div> </div> </div> <div class="article"> <div class="row"> <div class="col-md-12"> <a href="../..">« Up</a> <h1> markov <small> 8.7.0 <span class="label label-info">Not compatible</span> </small> </h1> <p><em><script>document.write(moment("2021-10-30 10:24:50 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2021-10-30 10:24:50 UTC)</em><p> <h2>Context</h2> <pre># Packages matching: installed # Name # Installed # Synopsis base-bigarray base base-threads base base-unix base conf-findutils 1 Virtual package relying on findutils conf-gmp 3 Virtual package relying on a GMP lib system installation coq 8.13.0 Formal proof management system num 1.4 The legacy Num library for arbitrary-precision integer and rational arithmetic ocaml 4.12.0 The OCaml compiler (virtual package) ocaml-base-compiler 4.12.0 Official release 4.12.0 ocaml-config 2 OCaml Switch Configuration ocaml-options-vanilla 1 Ensure that OCaml is compiled with no special options enabled ocamlfind 1.9.1 A library manager for OCaml zarith 1.12 Implements arithmetic and logical operations over arbitrary-precision integers # opam file: opam-version: &quot;2.0&quot; maintainer: &quot;[email protected]&quot; homepage: &quot;https://github.com/coq-contribs/markov&quot; license: &quot;GNU Lesser Public License&quot; build: [make &quot;-j%{jobs}%&quot;] install: [make &quot;install&quot;] remove: [&quot;rm&quot; &quot;-R&quot; &quot;%{lib}%/coq/user-contrib/Markov&quot;] depends: [ &quot;ocaml&quot; &quot;coq&quot; {&gt;= &quot;8.7&quot; &amp; &lt; &quot;8.8~&quot;} ] tags: [ &quot;keyword: probability&quot; &quot;keyword: Markov&quot; &quot;keyword: Lebesgue integral&quot; &quot;keyword: sigma algebras&quot; &quot;keyword: measurability&quot; &quot;keyword: Borel&quot; &quot;category: Mathematics/Real Calculus and Topology&quot; &quot;date: 2008-01-5&quot; ] authors: [ &quot;Robert Kam &lt;[email protected]&gt; [none]&quot; ] bug-reports: &quot;https://github.com/coq-contribs/markov/issues&quot; dev-repo: &quot;git+https://github.com/coq-contribs/markov.git&quot; synopsis: &quot;Markov&#39;s inequality&quot; description: &quot;A proof of Markov&#39;s inequality, restricted to probability spaces, based on the Wikipedia proof. Defines Lebesgue integral and associated concepts such as measurability, measure functions, and sigma algebras. Extended real numbers did not need to be defined because we are working in a probability space with measure 1. Nonconstructive; uses classic, Extensionality_Ensembles, axiomatized real numbers from Coq standard library.&quot; flags: light-uninstall url { src: &quot;https://github.com/coq-contribs/markov/archive/v8.7.0.tar.gz&quot; checksum: &quot;md5=562f69118aadd4091485ae6698f2e582&quot; } </pre> <h2>Lint</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Dry install</h2> <p>Dry install with the current Coq version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam install -y --show-action coq-markov.8.7.0 coq.8.13.0</code></dd> <dt>Return code</dt> <dd>5120</dd> <dt>Output</dt> <dd><pre>[NOTE] Package coq is already installed (current version is 8.13.0). The following dependencies couldn&#39;t be met: - coq-markov -&gt; coq &lt; 8.8~ -&gt; ocaml &lt; 4.10 base of this switch (use `--unlock-base&#39; to force) No solution found, exiting </pre></dd> </dl> <p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-markov.8.7.0</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Install dependencies</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Install</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Installation size</h2> <p>No files were installed.</p> <h2>Uninstall</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Missing removes</dt> <dd> none </dd> <dt>Wrong removes</dt> <dd> none </dd> </dl> </div> </div> </div> <hr/> <div class="footer"> <p class="text-center"> <small>Sources are on <a href="https://github.com/coq-bench">GitHub</a>. © Guillaume Claret.</small> </p> </div> </div> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script> <script src="../../../../../bootstrap.min.js"></script> </body> </html>
{ "content_hash": "4e2b08a7cbb5e4142636df370201fadc", "timestamp": "", "source": "github", "line_count": 173, "max_line_length": 445, "avg_line_length": 43.27745664739884, "alnum_prop": 0.5583010551622812, "repo_name": "coq-bench/coq-bench.github.io", "id": "070bc48e3d8d5cdd173fba09a922900308b22e88", "size": "7489", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.12.0-2.0.8/released/8.13.0/markov/8.7.0.html", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
'use strict'; // JSLint /*global ADTECH, $1CRI, $include */ $include('./src/scripts/common/header.js'); $include('./src/scripts/common/utils.js'); $include('./src/scripts/common/settings.js'); $include('./src/scripts/common/imageContainer.js'); $include('./src/scripts/common/closeButton.js'); $include('./src/scripts/smartPlayer/core.js'); /* Core */ $1CRI.core = function() { this.settings = new $1CRI.Settings(); this.screenDimensions = {}; this.addCoreEventListeners(); this.requestViewportDimensions(); }; $1CRI.core.prototype = { init: function() { var self = this; self.container = document.createElement('div'); self.container.style.position = 'relative'; self.container.id = 'container'; document.body.appendChild(self.container); var dims = self.getScreenSize(); self.backgroundImage = new $1CRI.imageContainer(dims, self.settings.Image); self.backgroundImage.getImage().addEventListener('click', function() { self.clickHandler(); }); self.container.appendChild(self.backgroundImage.getImage()); if (self.settings.VideoPlayer.Enabled === true || self.settings.VideoPlayer.Enabled === 'true') { self.smartPlayer = new $1CRI.smartVideo.core(self.settings.VideoPlayer, self.container); } self.closeButton = new $1CRI.closeButton(self.settings.CloseButton); self.container.appendChild(self.closeButton.getButton()); self.closeButton.updatePosition(self.backgroundImage.getImage().width, 3); var containerInfo = self.container.getBoundingClientRect(); var positionInfo = self.backgroundImage.getImage().getBoundingClientRect(); //alert(containerInfo.width + ' : ' + positionInfo.width); self.container.style.left = ((containerInfo.width - positionInfo.width) / 2) + 'px'; self.container.style.top = 0; }, requestViewportDimensions: function() { ADTECH.event('viewport', {type:'request'}); }, addCoreEventListeners: function() { var self = this; ADTECH.addEventListener('viewport', function(event) { if (event.meta.type === 'response') { self.screenDimensions = event.meta.dims; self.init(); } else if (event.meta.type === 'update'){ self.screenDimensions = event.meta.dims; self.backgroundImage.updateSize(self.screenDimensions); self.smartPlayer.updateSize(); self.closeButton.updatePosition(self.backgroundImage.getImage().width, 3); var positionInfo = self.backgroundImage.getImage().getBoundingClientRect(); self.container.style.left = ((self.screenDimensions.w - positionInfo.width) / 2) + 'px'; } }); }, getScreenSize: function() { return this.screenDimensions; }, clickHandler: function() { ADTECH.dynamicClick('Clickthrough', this.settings.Clickthrough); } }; $include('./src/scripts/common/ready.js');
{ "content_hash": "3fa98a067197cf5f20df482bd3f20d77", "timestamp": "", "source": "github", "line_count": 76, "max_line_length": 101, "avg_line_length": 37.473684210526315, "alnum_prop": 0.6899578651685393, "repo_name": "nodexchange/gulp-project-starter", "id": "3977093139467b8d210778f7c61b01a9a23f7695", "size": "2848", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/scripts/core.js", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "76106" }, { "name": "Batchfile", "bytes": "95" }, { "name": "CSS", "bytes": "430" }, { "name": "HTML", "bytes": "45407" }, { "name": "JavaScript", "bytes": "874343" }, { "name": "Shell", "bytes": "68" } ], "symlink_target": "" }
var app = angular.module('helloApp.controllers', []); app.controller('helloCtrl', ['$scope', '$http', function ($scope, $http) { $scope.$watch('name', function () { if ($scope.name) { $http.get('/hello/' + $scope.name) .success(function (response) { $scope.helloResult = response.Result; }); } }); $scope.testFunction = function() { return true; } } ]);
{ "content_hash": "93979e1a592cc9d716b6e0d1e3c5f01b", "timestamp": "", "source": "github", "line_count": 18, "max_line_length": 65, "avg_line_length": 31.333333333333332, "alnum_prop": 0.40425531914893614, "repo_name": "Layoric/DiscourseSsoExample", "id": "e7d2a2179f2a9eb4e6f033cb3f149ed5c2f87ed1", "size": "566", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/DiscourseSsoExample1/DiscourseSsoExample1/js/hello/controllers.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ASP", "bytes": "103" }, { "name": "C#", "bytes": "24117" }, { "name": "CSS", "bytes": "12" }, { "name": "HTML", "bytes": "323" }, { "name": "JavaScript", "bytes": "11542" }, { "name": "Shell", "bytes": "203" } ], "symlink_target": "" }
<!DOCTYPE html> <!-- Copyright 2016 The Chromium Authors. All rights reserved. Use of this source code is governed by a BSD-style license that can be found in the LICENSE file. --> <link rel="import" href="/tracing/core/test_utils.html"> <link rel="import" href="/tracing/extras/cpu/cpu_usage_auditor.html"> <link rel="import" href="/tracing/model/model.html"> <link rel="import" href="/tracing/model/thread_slice.html"> <link rel='import' href='/tracing/ui/base/constants.html'> <link rel='import' href='/tracing/ui/timeline_viewport.html'> <link rel="import" href="/tracing/ui/tracks/cpu_usage_track.html"> <link rel='import' href='/tracing/ui/tracks/drawing_container.html'> <script> 'use strict'; tr.b.unittest.testSuite(function() { var Model = tr.Model; var ThreadSlice = tr.model.ThreadSlice; var DIFF_EPSILON = 0.0001; // Input : slices is an array-of-array-of slices. Each top level array // represents a process. So, each slice in one of the top level array // will be placed in the same process. function buildModel(slices) { var model = tr.c.TestUtils.newModel(function(model) { var process = model.getOrCreateProcess(1); for (var i = 0; i < slices.length; i++) { var thread = process.getOrCreateThread(i); slices[i].forEach(s => thread.sliceGroup.pushSlice(s)); } }); var auditor = new tr.e.audits.CpuUsageAuditor(model); auditor.runAnnotate(); return model; } // Compare float arrays based on an epsilon since floating point arithmetic // is not always 100% accurate. function assertArrayValuesCloseTo(actualValue, expectedValue) { assert.lengthOf(actualValue, expectedValue.length); for (var i = 0; i < expectedValue.length; i++) { assert.closeTo(actualValue[i], expectedValue[i], DIFF_EPSILON); } } function createCpuUsageTrack(model, interval) { var div = document.createElement('div'); var viewport = new tr.ui.TimelineViewport(div); var drawingContainer = new tr.ui.tracks.DrawingContainer(viewport); div.appendChild(drawingContainer); var track = new tr.ui.tracks.CpuUsageTrack(drawingContainer.viewport); if (model !== undefined) { setDisplayTransformFromBounds(viewport, model.bounds); } track.initialize(model, interval); drawingContainer.appendChild(track); this.addHTMLOutput(drawingContainer); return track; } /** * Sets the mapping between the input range of timestamps and the output range * of horizontal pixels. */ function setDisplayTransformFromBounds(viewport, bounds) { var dt = new tr.ui.TimelineDisplayTransform(); var pixelRatio = window.devicePixelRatio || 1; var chartPixelWidth = (window.innerWidth - tr.ui.b.constants.HEADING_WIDTH) * pixelRatio; dt.xSetWorldBounds(bounds.min, bounds.max, chartPixelWidth); viewport.setDisplayTransformImmediately(dt); } test('computeCpuUsage_simple', function() { // Set the boundaries, from 0-15 ms. This slice will not // contain any CPU usage data, it's just to make the boundaries // of the bins go as 0-1, 1-2, 2-3, etc. This also tests whether // this function works properly in the presence of slices that // don't include CPU usage data. var bigSlice = new tr.model.ThreadSlice('', title, 0, 0, {}, 15); // First thread. // 0 5 10 15 // [ sliceA ] // [ sliceB ] [C ] var sliceA = new tr.model.ThreadSlice('', title, 0, 0.5, {}, 5); sliceA.cpuDuration = 5; var sliceB = new tr.model.ThreadSlice('', title, 0, 2.5, {}, 8); sliceB.cpuDuration = 6; // The slice completely fits into an interval and is the last. var sliceC = new tr.model.ThreadSlice('', title, 0, 12.5, {}, 2); sliceC.cpuDuration = 1; // Second thread. // 0 5 10 15 // [ sliceD ][ sliceE ] var sliceD = new tr.model.ThreadSlice('', title, 0, 3.5, {}, 3); sliceD.cpuDuration = 3; var sliceE = new tr.model.ThreadSlice('', title, 0, 6.5, {}, 6); sliceE.cpuDuration = 3; var model = buildModel([ [bigSlice, sliceA, sliceB, sliceC], [sliceD, sliceE] ]); // Compute average CPU usage over A (but not over B and C). var avgCpuUsageA = sliceA.cpuSelfTime / sliceA.selfTime; // Compute average CPU usage over B, C, D, E. They don't have subslices. var avgCpuUsageB = sliceB.cpuDuration / sliceB.duration; var avgCpuUsageC = sliceC.cpuDuration / sliceC.duration; var avgCpuUsageD = sliceD.cpuDuration / sliceD.duration; var avgCpuUsageE = sliceE.cpuDuration / sliceE.duration; var expectedValue = [ 0, avgCpuUsageA, avgCpuUsageA, avgCpuUsageA + avgCpuUsageB, avgCpuUsageA + avgCpuUsageB + avgCpuUsageD, avgCpuUsageA + avgCpuUsageB + avgCpuUsageD, avgCpuUsageB + avgCpuUsageD, avgCpuUsageB + avgCpuUsageE, avgCpuUsageB + avgCpuUsageE, avgCpuUsageB + avgCpuUsageE, avgCpuUsageB + avgCpuUsageE, avgCpuUsageE, avgCpuUsageE, avgCpuUsageC, avgCpuUsageC, 0 ]; var track = createCpuUsageTrack.call(this, model); var actualValue = track.series[0].points.map(point => point.y); assertArrayValuesCloseTo(actualValue, expectedValue); }); test('computeCpuUsage_longDurationThreadSlice', function() { // Create a slice covering 24 hours. var sliceA = new tr.model.ThreadSlice( '', title, 0, 0, {}, 24 * 60 * 60 * 1000); sliceA.cpuDuration = sliceA.duration * 0.25; var model = buildModel([[sliceA]]); var track = createCpuUsageTrack.call(this, model); var cpuSamples = track.series[0].points.map(point => point.y); // All except the last sample is 0.25, since sliceA.cpuDuration was set to // 0.25 of the total. for (var cpuSample of cpuSamples.slice(0, cpuSamples.length - 1)) { assert.closeTo(cpuSample, 0.25, DIFF_EPSILON); } // The last sample is 0. assert.closeTo(cpuSamples[cpuSamples.length - 1], 0, DIFF_EPSILON); }); test('instantiate', function() { var sliceA = new tr.model.ThreadSlice('', title, 0, 5.5111, {}, 47.1023); sliceA.cpuDuration = 25; var sliceB = new tr.model.ThreadSlice('', title, 0, 11.2384, {}, 1.8769); sliceB.cpuDuration = 1.5; var sliceC = new tr.model.ThreadSlice('', title, 0, 11.239, {}, 5.8769); sliceC.cpuDuration = 5; var sliceD = new tr.model.ThreadSlice('', title, 0, 48.012, {}, 5.01); sliceD.cpuDuration = 4; var model = buildModel([[sliceA, sliceB, sliceC, sliceD]]); createCpuUsageTrack.call(this, model); }); test('hasVisibleContent_trueWithThreadSlicePresent', function() { var sliceA = new tr.model.ThreadSlice('', title, 0, 48.012, {}, 5.01); sliceA.cpuDuration = 4; var model = buildModel([[sliceA]]); var track = createCpuUsageTrack.call(this, model); assert.isTrue(track.hasVisibleContent); }); test('hasVisibleContent_falseWithUndefinedProcessModel', function() { var track = createCpuUsageTrack.call(this, undefined); assert.isFalse(track.hasVisibleContent); }); test('hasVisibleContent_falseWithNoThreadSlice', function() { // model with a CPU and a thread but no ThreadSlice. var model = buildModel([]); var track = createCpuUsageTrack.call(this, model); assert.isFalse(track.hasVisibleContent); }); test('hasVisibleContent_trueWithSubSlices', function() { var sliceA = new tr.model.ThreadSlice('', title, 0, 5.5111, {}, 47.1023); sliceA.cpuDuration = 25; var sliceB = new tr.model.ThreadSlice('', title, 0, 11.2384, {}, 1.8769); sliceB.cpuDuration = 1.5; var model = buildModel([[sliceA, sliceB]]); var process = model.getProcess(1); // B will become lowest level slices of A. process.getThread(0).sliceGroup.createSubSlices(); assert.strictEqual( sliceA.cpuSelfTime, (sliceA.cpuDuration - sliceB.cpuDuration)); var track = createCpuUsageTrack.call(this, model); assert.isTrue(track.hasVisibleContent); }); }); </script>
{ "content_hash": "8a8c438cdabd2ace23b978fdbd582585", "timestamp": "", "source": "github", "line_count": 215, "max_line_length": 80, "avg_line_length": 37.55813953488372, "alnum_prop": 0.666625386996904, "repo_name": "sahiljain/catapult", "id": "1e972ab77751df9c27e475ce8936868479933af4", "size": "8075", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tracing/tracing/ui/tracks/cpu_usage_track_test.html", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "3598" }, { "name": "C++", "bytes": "6390" }, { "name": "CSS", "bytes": "24751" }, { "name": "HTML", "bytes": "14570791" }, { "name": "JavaScript", "bytes": "511007" }, { "name": "Python", "bytes": "5842419" }, { "name": "Shell", "bytes": "2834" } ], "symlink_target": "" }
ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
{ "content_hash": "4ec2934695c4fe51523c268b162e639d", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 31, "avg_line_length": 9.692307692307692, "alnum_prop": 0.7063492063492064, "repo_name": "mdoering/backbone", "id": "a8835c9e51e631fdcd0fc15752b0fd274206a92a", "size": "182", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Magnoliales/Myristicaceae/Iryanthera/Iryanthera megistophylla/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
<!-- Main Header --> <header class="main-header"> <!-- Logo --> <a href="{{ url('/home') }}" class="logo"> <!-- mini logo for sidebar mini 50x50 pixels --> <span class="logo-mini">{!! $layout->logoSmall !!}</span> <!-- logo for regular state and mobile devices --> <span class="logo-lg">{!! $layout->logoLarge !!}</span> </a> <!-- Header Navbar --> <nav class="navbar navbar-static-top" role="navigation"> @if($layout->showSidebar) <!-- Sidebar toggle button--> <a href="#" class="sidebar-toggle" data-toggle="offcanvas" role="button"> <span class="sr-only">Toggle navigation</span> </a> @endif <!-- Navbar Right Menu --> <div class="navbar-custom-menu"> <ul class="nav navbar-nav"> @if ($layout->headerMenus['show']) @foreach($layout->headerMenus['items'] as $menu) @include($menu) @endforeach @endif @if (Auth::guest()) <li><a href="{{ url('/login') }}">Login</a></li> <li><a href="{{ url('/register') }}">Register</a></li> @else <!-- User Account Menu --> <li class="dropdown user user-menu"> <!-- Menu Toggle Button --> <a href="#" class="dropdown-toggle" data-toggle="dropdown"> <!-- The user image in the navbar--> <img src="{{ $layout->profileImg }}" class="user-image avatar" alt="User Image"/> <!-- hidden-xs hides the username on small devices so only the image appears. --> <span class="hidden-xs">{{ Auth::user()->name }}</span> </a> <ul class="dropdown-menu"> <!-- The user image in the menu --> <li class="user-header"> <img src="{{ $layout->profileImg }}" class="img-circle avatar" alt="User Image" /> <p> {{ Auth::user()->name }} <small>Member since Nov. 2012</small> </p> </li> <!-- Menu Body --> <li class="user-body"> <div class="col-xs-4 text-center"> <a href="#">Followers</a> </div> <div class="col-xs-4 text-center"> <a href="#">Sales</a> </div> <div class="col-xs-4 text-center"> <a href="#">Friends</a> </div> </li> <!-- Menu Footer--> <li class="user-footer"> <div class="pull-left"> <a href="#" data-target="#user-profile" data-toggle="modal" class="btn btn-default btn-flat">Profile</a> </div> <div class="pull-right"> <a href="{{ url('/logout') }}" class="btn btn-default btn-flat">Sign out</a> </div> </li> </ul> </li> @endif @if($layout->showControlSidebar) <!-- Control Sidebar Toggle Button --> <li> <a href="#" data-toggle="control-sidebar"><i class="fa fa-gears"></i></a> </li> @endif </ul> </div> </nav> </header> @if (!Auth::guest()) <!-- profile edit form --> @include('adminlte::partials.profile') @endif
{ "content_hash": "b8412343f5c2252cb149877226b06e82", "timestamp": "", "source": "github", "line_count": 91, "max_line_length": 140, "avg_line_length": 46.032967032967036, "alnum_prop": 0.36762950584865123, "repo_name": "sakhunzai/adminlte-laravel", "id": "e45c45dc57b8fa70aaf37271457510a19cf73537", "size": "4189", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "resources/views/layouts/partials/mainheader.blade.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "105370" }, { "name": "JavaScript", "bytes": "26800" }, { "name": "PHP", "bytes": "100325" }, { "name": "Shell", "bytes": "477" } ], "symlink_target": "" }
using NSubstitute; using StructureMap.Pipeline; using Xunit; namespace StructureMap.Testing.Pipeline { public class TransientLifecycleTester { private readonly ILifecycleContext theContext; private readonly TransientLifecycle theLifecycle; private readonly ITransientTracking theCache; public TransientLifecycleTester() { theContext = Substitute.For<ILifecycleContext>(); theLifecycle = new TransientLifecycle(); theCache = Substitute.For<ITransientTracking>(); theContext.Transients.Returns(theCache); } [Fact] public void the_cache_is_from_the_transient_of_the_context() { theLifecycle.FindCache(theContext).ShouldBeTheSameAs(theCache); } [Fact] public void eject_all_delegates() { theLifecycle.EjectAll(theContext); theCache.Received().DisposeAndClear(); } } }
{ "content_hash": "369724b9a69f2befbb7fd18bdd5c34fd", "timestamp": "", "source": "github", "line_count": 36, "max_line_length": 75, "avg_line_length": 28.13888888888889, "alnum_prop": 0.6189536031589339, "repo_name": "DixonD-git/structuremap", "id": "16c13d805698490938e6164b46df5ec31ed88000", "size": "1015", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "src/StructureMap.Testing/Pipeline/TransientLifecycleTester.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "256" }, { "name": "C#", "bytes": "1639923" }, { "name": "CSS", "bytes": "3509" }, { "name": "HTML", "bytes": "1226515" }, { "name": "JavaScript", "bytes": "1432" }, { "name": "Ruby", "bytes": "3688" } ], "symlink_target": "" }
<?php /** * Immediate flush block. To be used only as root * * @author Magento Core Team <[email protected]> */ class Mage_Core_Block_Flush extends Mage_Core_Block_Abstract { protected function _toHtml() { if (!$this->_beforeToHtml()) { return ''; } ob_implicit_flush(); foreach ($this->getSortedChildren() as $name) { $block = $this->getLayout()->getBlock($name); if (!$block) { Mage::exception(Mage::helper('core')->__('Invalid block: %s', $name)); } echo $block->toHtml(); } } }
{ "content_hash": "9ed6447ca9cf82264173afc583e38803", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 86, "avg_line_length": 21.1, "alnum_prop": 0.5150078988941548, "repo_name": "garasiya/magento1910", "id": "6358432eb28b9ab72b9ae2980515b629b6d20afe", "size": "1569", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "app/code/core/Mage/Core/Block/Flush.php", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ActionScript", "bytes": "19946" }, { "name": "ApacheConf", "bytes": "6705" }, { "name": "Batchfile", "bytes": "1036" }, { "name": "CSS", "bytes": "1753629" }, { "name": "HTML", "bytes": "5223902" }, { "name": "JavaScript", "bytes": "1103124" }, { "name": "PHP", "bytes": "44372061" }, { "name": "PowerShell", "bytes": "1028" }, { "name": "Ruby", "bytes": "288" }, { "name": "Shell", "bytes": "2036" }, { "name": "XSLT", "bytes": "2135" } ], "symlink_target": "" }
require 'test_helper' module RailsApiDoc class ParameterTest < ActiveSupport::TestCase # test "the truth" do # assert true # end end end
{ "content_hash": "cdcd83925809303f537f2af1ec557cdd", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 47, "avg_line_length": 17.333333333333332, "alnum_prop": 0.6794871794871795, "repo_name": "Harkamal/rails_api_doc", "id": "7144726afc456bea13ad747125e003a5a68b401d", "size": "156", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/unit/rails_api_doc/parameter_test.rb", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "202087" }, { "name": "JavaScript", "bytes": "2216" }, { "name": "Ruby", "bytes": "34859" } ], "symlink_target": "" }
@implementation UIView(Tag) @dynamic tagString; @dynamic tagClasses; - (NSString *)tagString { NSObject * obj = objc_getAssociatedObject( self, KEY_TAGSTRING ); if ( obj && [obj isKindOfClass:[NSString class]] ) return (NSString *)obj; return nil; } - (void)setTagString:(NSString *)value { objc_setAssociatedObject( self, KEY_TAGSTRING, value, OBJC_ASSOCIATION_RETAIN_NONATOMIC ); } - (NSMutableArray *)tagClasses { NSObject * obj = objc_getAssociatedObject( self, KEY_TAGCLASSES ); if ( obj && [obj isKindOfClass:[NSMutableArray class]] ) return (NSMutableArray *)obj; return nil; } - (void)setTagClasses:(NSMutableArray *)value { objc_setAssociatedObject( self, KEY_TAGCLASSES, value, OBJC_ASSOCIATION_RETAIN_NONATOMIC ); } - (UIView *)viewWithTagString:(NSString *)value { if ( nil == value ) return nil; for ( UIView * subview in self.subviews ) { NSString * tag = subview.tagString; if ( [tag isEqualToString:value] ) { return subview; } } return nil; } - (UIView *)viewWithTagPath:(NSString *)path { NSArray * array = [path componentsSeparatedByString:@"."]; if ( 0 == [array count] ) { return nil; } UIView * result = self; for ( NSString * subPath in array ) { if ( 0 == subPath.length ) continue; result = [result viewWithTagString:subPath]; if ( nil == result ) return nil; if ( [array lastObject] == subPath ) { return result; } else if ( NO == [result isKindOfClass:[UIView class]] ) { return nil; } } return result; } - (NSArray *)viewWithTagClass:(NSString *)value { NSMutableArray * result = [NSMutableArray nonRetainingArray]; for ( UIView * subview in self.subviews ) { NSMutableArray * classes = subview.tagClasses; for ( NSString * tagClass in classes ) { if ( NSOrderedSame == [tagClass compare:value options:NSCaseInsensitiveSearch] ) { [result addObject:subview]; break; } } } return result; } - (NSArray *)viewWithTagClasses:(NSArray *)array { NSMutableArray * result = [NSMutableArray nonRetainingArray]; for ( NSString * tagClass in array ) { NSArray * subResult = [self viewWithTagClass:tagClass]; if ( subResult && subResult.count ) { [result addObjectsFromArray:subResult]; } } return result; } - (NSArray *)viewWithTagMatchRegex:(NSString *)regex { if ( nil == regex ) return nil; NSMutableArray * array = [NSMutableArray nonRetainingArray]; for ( UIView * subview in self.subviews ) { NSString * tag = subview.tagString; if ( [tag match:regex] ) { [array addObject:subview]; } } return array; } @end #endif // #if (TARGET_OS_IPHONE || TARGET_IPHONE_SIMULATOR)
{ "content_hash": "899c9c50107413143c6fa3d112d9195d", "timestamp": "", "source": "github", "line_count": 140, "max_line_length": 92, "avg_line_length": 19.09285714285714, "alnum_prop": 0.6704077815188926, "repo_name": "haolloyin/iPhoneChartsDemo", "id": "98a8b3d7473172914df066809ac93b78015321aa", "size": "4252", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Demo/framework/application/mvc/view/dom-element/extension/UIView+Tag.m", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C++", "bytes": "124848" }, { "name": "Objective-C", "bytes": "1803064" }, { "name": "Python", "bytes": "32288" } ], "symlink_target": "" }
This project is a wrapper around Hadoop's s3a file system. By pulling a smaller dependency tree and shading all dependencies away, this keeps the appearance of Flink being Hadoop-free, from a dependency perspective. We also relocate the shaded Hadoop version to allow running in a different setup. For this to work, however, we needed to adapt Hadoop's `Configuration` class to load a (shaded) `core-default-shaded.xml` configuration with the relocated class names of classes loaded via reflection (in the future, we may need to extend this to `mapred-default.xml` and `hdfs-defaults.xml` and their respective configuration classes). # Changing the Hadoop Version If you want to change the Hadoop version this project depends on, the following steps are required to keep the shading correct: 1. from the respective Hadoop jar (currently 2.8.1 as of the `s3hadoop.hadoop.version` property our `pom.xml`), - copy `org/apache/hadoop/conf/Configuration.java` to `src/main/java/org/apache/hadoop/conf/` and - replace `core-default.xml` with `core-default-shaded.xml`. - copy `org/apache/hadoop/util/NativeCodeLoader.java` to `src/main/java/org/apache/hadoop/util/` and - replace the static initializer with ``` static { LOG.info("Skipping native-hadoop library for flink-s3-fs-hadoop's relocated Hadoop... " + "using builtin-java classes where applicable"); } ``` - copy `core-default.xml` to `src/main/resources/core-default-shaded.xml` and - change every occurrence of `org.apache.hadoop` into `org.apache.flink.fs.s3hadoop.shaded.org.apache.hadoop` - copy `core-site.xml` to `src/test/resources/core-site.xml` (as is) 2. verify the shaded jar: - does not contain any unshaded classes except for `org.apache.flink.fs.s3hadoop.S3FileSystemFactory` - all other classes should be under `org.apache.flink.fs.s3hadoop.shaded` - there should be a `META-INF/services/org.apache.flink.core.fs.FileSystemFactory` file pointing to two classes: `org.apache.flink.fs.s3hadoop.S3FileSystemFactory` and `org.apache.flink.fs.s3hadoop.S3AFileSystemFactory` - other service files under `META-INF/services` should have their names and contents in the relocated `org.apache.flink.fs.s3hadoop.shaded` package - contains a `core-default-shaded.xml` file - does not contain a `core-default.xml` or `core-site.xml` file
{ "content_hash": "d7da2567cdb0d85f07a4e8ad6f92e670", "timestamp": "", "source": "github", "line_count": 36, "max_line_length": 221, "avg_line_length": 65.41666666666667, "alnum_prop": 0.7626326963906582, "repo_name": "zhangminglei/flink", "id": "f65ee61d189e8e27568b4d4fa1563530c026101b", "size": "2355", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "flink-filesystems/flink-s3-fs-hadoop/README.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "5666" }, { "name": "CSS", "bytes": "18100" }, { "name": "Clojure", "bytes": "63105" }, { "name": "CoffeeScript", "bytes": "91220" }, { "name": "Dockerfile", "bytes": "3528" }, { "name": "HTML", "bytes": "86821" }, { "name": "Java", "bytes": "39364056" }, { "name": "JavaScript", "bytes": "8267" }, { "name": "Python", "bytes": "249644" }, { "name": "Scala", "bytes": "7201411" }, { "name": "Shell", "bytes": "282124" } ], "symlink_target": "" }
import json from dojo.models import Finding class SemgrepParser(object): def get_scan_types(self): return ["Semgrep JSON Report"] def get_label_for_scan_types(self, scan_type): return scan_type # no custom label for now def get_description_for_scan_types(self, scan_type): return "Import Semgrep output (--json)" def get_findings(self, filename, test): data = json.load(filename) dupes = dict() for item in data["results"]: finding = Finding( test=test, title=item["check_id"], severity=self.convert_severity(item["extra"]["severity"]), description=self.get_description(item), file_path=item['path'], line=item["start"]["line"], static_finding=True, dynamic_finding=False, vuln_id_from_tool=item["check_id"], nb_occurences=1, ) # manage CWE if 'cwe' in item["extra"]["metadata"]: finding.cwe = int(item["extra"]["metadata"].get("cwe").partition(':')[0].partition('-')[2]) # manage references from metadata if 'references' in item["extra"]["metadata"]: finding.references = "\n".join(item["extra"]["metadata"]["references"]) # manage mitigation from metadata if 'fix' in item["extra"]: finding.mitigation = item["extra"]["fix"] elif 'fix_regex' in item["extra"]: finding.mitigation = "\n".join([ "**You can automaticaly apply this regex:**", "\n```\n", json.dumps(item["extra"]["fix_regex"]), "\n```\n", ]) dupe_key = finding.title + finding.file_path + str(finding.line) if dupe_key in dupes: find = dupes[dupe_key] find.nb_occurences += 1 else: dupes[dupe_key] = finding return list(dupes.values()) def convert_severity(self, val): if "WARNING" == val.upper(): return "Low" elif "ERROR" == val.upper(): return "High" elif "INFO" == val.upper(): return "Info" else: raise ValueError(f"Unknown value for severity: {val}") def get_description(self, item): description = '' message = item["extra"]["message"] description += '**Result message:** {}\n'.format(message) snippet = item["extra"].get("lines") if snippet is not None: description += '**Snippet:**\n```{}```\n'.format(snippet) return description
{ "content_hash": "b83c70f58ac964667b35ce8d662044f3", "timestamp": "", "source": "github", "line_count": 85, "max_line_length": 107, "avg_line_length": 32.411764705882355, "alnum_prop": 0.5078039927404718, "repo_name": "rackerlabs/django-DefectDojo", "id": "684000da70f2c4d83e245f32294aaf89f5b5ebe7", "size": "2755", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "dojo/tools/semgrep/parser.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "18132" }, { "name": "Groff", "bytes": "91" }, { "name": "HTML", "bytes": "666571" }, { "name": "JavaScript", "bytes": "6393" }, { "name": "Python", "bytes": "524728" }, { "name": "Shell", "bytes": "20558" }, { "name": "XSLT", "bytes": "6624" } ], "symlink_target": "" }
#ifndef HALIDE_INTERNAL_REGION_COSTS_H #define HALIDE_INTERNAL_REGION_COSTS_H /** \file * * Defines RegionCosts - used by the auto scheduler to query the cost of * computing some function regions. */ #include <set> #include <limits> #include "AutoScheduleUtils.h" #include "Interval.h" #include "Scope.h" namespace Halide { namespace Internal { struct Cost { // Estimate of cycles spent doing arithmetic. Expr arith; // Estimate of bytes loaded. Expr memory; Cost(int64_t arith, int64_t memory) : arith(arith), memory(memory) {} Cost(Expr arith, Expr memory) : arith(std::move(arith)), memory(std::move(memory)) {} Cost() {} inline bool defined() const { return arith.defined() && memory.defined(); } void simplify(); friend std::ostream& operator<<(std::ostream &stream, const Cost &c) { stream << "[arith: " << c.arith << ", memory: " << c.memory << "]"; return stream; } }; /** Auto scheduling component which is used to assign costs for computing a * region of a function or one of its stages. */ struct RegionCosts { /** An environment map which contains all functions in the pipeline. */ std::map<std::string, Function> env; /** A map containing the cost of computing a value in each stage of a * function. The number of entries in the vector is equal to the number of * stages in the function. */ std::map<std::string, std::vector<Cost>> func_cost; /** A map containing the types of all image inputs in the pipeline. */ std::map<std::string, Type> inputs; /** A scope containing the estimated min/extent values of ImageParams * in the pipeline. */ Scope<Interval> input_estimates; /** Return the cost of producing a region (specified by 'bounds') of a * function stage (specified by 'func' and 'stage'). 'inlines' specifies * names of all the inlined functions. */ Cost stage_region_cost(std::string func, int stage, const DimBounds &bounds, const std::set<std::string> &inlines = std::set<std::string>()); /** Return the cost of producing a region of a function stage (specified * by 'func' and 'stage'). 'inlines' specifies names of all the inlined * functions. */ Cost stage_region_cost(std::string func, int stage, const Box &region, const std::set<std::string> &inlines = std::set<std::string>()); /** Return the cost of producing a region of function 'func'. This adds up the * costs of all stages of 'func' required to produce the region. 'inlines' * specifies names of all the inlined functions. */ Cost region_cost(std::string func, const Box &region, const std::set<std::string> &inlines = std::set<std::string>()); /** Same as region_cost above but this computes the total cost of many * function regions. */ Cost region_cost(const std::map<std::string, Box> &regions, const std::set<std::string> &inlines = std::set<std::string>()); /** Compute the cost of producing a single value by one stage of 'f'. * 'inlines' specifies names of all the inlined functions. */ Cost get_func_stage_cost(const Function &f, int stage, const std::set<std::string> &inlines = std::set<std::string>()); /** Compute the cost of producing a single value by all stages of 'f'. * 'inlines' specifies names of all the inlined functions. This returns a * vector of costs. Each entry in the vector corresponds to a stage in 'f'. */ std::vector<Cost> get_func_cost(const Function &f, const std::set<std::string> &inlines = std::set<std::string>()); /** Computes the memory costs of computing a region (specified by 'bounds') * of a function stage (specified by 'func' and 'stage'). This returns a map * containing the costs incurred to access each of the functions required * to produce 'func'. */ std::map<std::string, Expr> stage_detailed_load_costs(std::string func, int stage, DimBounds &bounds, const std::set<std::string> &inlines = std::set<std::string>()); /** Return a map containing the costs incurred to access each of the functions * required to produce a single value of a function stage. */ std::map<std::string, Expr> stage_detailed_load_costs(std::string func, int stage, const std::set<std::string> &inlines = std::set<std::string>()); /** Same as stage_detailed_load_costs above but this computes the cost of a region * of 'func'. */ std::map<std::string, Expr> detailed_load_costs(std::string func, const Box &region, const std::set<std::string> &inlines = std::set<std::string>()); /** Same as detailed_load_costs above but this computes the cost of many function * regions and aggregates them. */ std::map<std::string, Expr> detailed_load_costs(const std::map<std::string, Box> &regions, const std::set<std::string> &inlines = std::set<std::string>()); /** Return the size of the region of 'func' in bytes. */ Expr region_size(std::string func, const Box &region); /** Return the size of the peak amount of memory allocated in bytes. This takes * the realization order of the function regions and the early free mechanism * into account while computing the peak footprint. */ Expr region_footprint(const std::map<std::string, Box> &regions, const std::set<std::string> &inlined = std::set<std::string>()); /** Return the size of the input region in bytes. */ Expr input_region_size(std::string input, const Box &region); /** Return the total size of the many input regions in bytes. */ Expr input_region_size(const std::map<std::string, Box> &input_regions); /** Display the cost of each function in the pipeline. */ void disp_func_costs(); /** Construct a region cost object for the pipeline. 'env' is a map of all * functions in the pipeline.*/ RegionCosts(const std::map<std::string, Function> &env); }; /** Return true if the cost of inlining a function is equivalent to the * cost of calling the function directly. */ bool is_func_trivial_to_inline(const Function &func); } } #endif
{ "content_hash": "f2d80b953233372087c5308e21f6be1e", "timestamp": "", "source": "github", "line_count": 144, "max_line_length": 100, "avg_line_length": 44.5, "alnum_prop": 0.6413857677902621, "repo_name": "Trass3r/Halide", "id": "db1897e04fd70ae536a58599bb159046d0e1ef92", "size": "6408", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/RegionCosts.h", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "7244" }, { "name": "C", "bytes": "366606" }, { "name": "C++", "bytes": "4728663" }, { "name": "CMake", "bytes": "81655" }, { "name": "HTML", "bytes": "9668" }, { "name": "Java", "bytes": "109499" }, { "name": "LLVM", "bytes": "92208" }, { "name": "Makefile", "bytes": "105829" }, { "name": "Matlab", "bytes": "543" }, { "name": "Objective-C", "bytes": "26335" }, { "name": "Objective-C++", "bytes": "23224" }, { "name": "PowerShell", "bytes": "11621" }, { "name": "Python", "bytes": "215248" }, { "name": "Shell", "bytes": "42577" } ], "symlink_target": "" }
package fs import ( "bytes" "fmt" "log" "os" "strings" "time" "github.com/hanwen/go-fuse/fuse" "github.com/hanwen/go-fuse/fuse/nodefs" "github.com/hanwen/go-mtpfs/mtp" ) const blockSize = 512 type DeviceFsOptions struct { // Assume removable volumes are VFAT and munge filenames // accordingly. RemovableVFat bool // Backing directory. Dir string // Use android extensions if available. Android bool } // DeviceFS implements a fuse.NodeFileSystem that mounts multiple // storages. type deviceFS struct { backingDir string delBackingDir bool root *rootNode dev *mtp.Device devInfo mtp.DeviceInfo storages []uint32 mungeVfat map[uint32]bool options *DeviceFsOptions } // DeviceFs is a simple filesystem interface to an MTP device. It // should be wrapped in a Locking(Raw)FileSystem to make sure it is // threadsafe. The file system assumes the device does not touch the // storage. Arguments are the opened mtp device and a directory for the // backing store. func NewDeviceFSRoot(d *mtp.Device, storages []uint32, options DeviceFsOptions) (nodefs.Node, error) { root := rootNode{Node: nodefs.NewDefaultNode()} fs := &deviceFS{ root: &root, dev: d, options: &options, } root.fs = fs fs.storages = storages if err := d.GetDeviceInfo(&fs.devInfo); err != nil { return nil, err } if !strings.Contains(fs.devInfo.MTPExtension, "android.com") { fs.options.Android = false } if !options.Android { if err := fs.setupClassic(); err != nil { return nil, err } } fs.mungeVfat = make(map[uint32]bool) for _, sid := range fs.storages { var info mtp.StorageInfo if err := fs.dev.GetStorageInfo(sid, &info); err != nil { return nil, err } fs.mungeVfat[sid] = info.IsRemovable() && fs.options.RemovableVFat } return fs.Root(), nil } func (fs *deviceFS) Root() nodefs.Node { return fs.root } func (fs *deviceFS) String() string { return fmt.Sprintf("deviceFS(%s)", fs.devInfo.Model) } func (fs *deviceFS) onMount() { for _, sid := range fs.storages { var info mtp.StorageInfo if err := fs.dev.GetStorageInfo(sid, &info); err != nil { log.Printf("GetStorageInfo %x: %v", sid, err) continue } obj := mtp.ObjectInfo{ ParentObject: NOPARENT_ID, StorageID: sid, Filename: info.StorageDescription, } folder := fs.newFolder(obj, NOPARENT_ID) fs.root.Inode().NewChild(info.StorageDescription, true, folder) } } // TODO - this should be per storage and return just the free space in // the storage. func (fs *deviceFS) newFile(obj mtp.ObjectInfo, size int64, id uint32) (node nodefs.Node) { if obj.CompressedSize != 0xFFFFFFFF { size = int64(obj.CompressedSize) } mNode := mtpNodeImpl{ Node: nodefs.NewDefaultNode(), obj: &obj, handle: id, fs: fs, Size: size, } if fs.options.Android { node = &androidNode{ mtpNodeImpl: mNode, } } else { node = &classicNode{ mtpNodeImpl: mNode, } } return node } type rootNode struct { nodefs.Node fs *deviceFS } const NOPARENT_ID = 0xFFFFFFFF func (n *rootNode) OnMount(conn *nodefs.FileSystemConnector) { n.fs.onMount() } func (n *rootNode) OnUnmount() { if n.fs.delBackingDir { os.RemoveAll(n.fs.options.Dir) n.fs.delBackingDir = false } } func (n *rootNode) StatFs() *fuse.StatfsOut { total := uint64(0) free := uint64(0) for _, ch := range n.Inode().Children() { if s := ch.Node().StatFs(); s != nil { total += s.Blocks free += s.Bfree } } return &fuse.StatfsOut{ Bsize: blockSize, Blocks: total, Bavail: free, Bfree: free, } } const forbidden = ":*?\"<>|" func SanitizeDosName(name string) string { if strings.IndexAny(name, forbidden) == -1 { return name } dest := make([]byte, len(name)) for i := 0; i < len(name); i++ { if strings.Contains(forbidden, string(name[i])) { dest[i] = '_' } else { dest[i] = name[i] } } return string(dest) } //////////////// // mtpNode type mtpNode interface { nodefs.Node Handle() uint32 StorageID() uint32 SetName(string) } type mtpNodeImpl struct { nodefs.Node // MTP handle. handle uint32 obj *mtp.ObjectInfo fs *deviceFS // This is needed because obj.CompressedSize only goes to // 0xFFFFFFFF Size int64 } func (n *mtpNodeImpl) StatFs() *fuse.StatfsOut { total := uint64(0) free := uint64(0) var info mtp.StorageInfo if err := n.fs.dev.GetStorageInfo(n.StorageID(), &info); err != nil { log.Printf("GetStorageInfo %x: %v", n.StorageID(), err) return nil } total += uint64(info.MaxCapability) free += uint64(info.FreeSpaceInBytes) return &fuse.StatfsOut{ Bsize: blockSize, Blocks: total / blockSize, Bavail: free / blockSize, Bfree: free / blockSize, } } func (n *mtpNodeImpl) GetAttr(out *fuse.Attr, file nodefs.File, context *fuse.Context) (code fuse.Status) { out.Mode = fuse.S_IFREG | 0644 f := n.obj if f != nil { out.Size = uint64(n.Size) t := f.ModificationDate out.SetTimes(&t, &t, &t) out.Blocks = (out.Size + blockSize - 1) / blockSize } return fuse.OK } func (n *mtpNodeImpl) Chown(file nodefs.File, uid uint32, gid uint32, context *fuse.Context) (code fuse.Status) { // Get rid of pesky messages from cp -a. return fuse.OK } func (n *mtpNodeImpl) Chmod(file nodefs.File, perms uint32, context *fuse.Context) (code fuse.Status) { // Get rid of pesky messages from cp -a. return fuse.OK } func (n *mtpNodeImpl) Utimens(file nodefs.File, aTime *time.Time, mTime *time.Time, context *fuse.Context) (code fuse.Status) { // Unfortunately, we can't set the modtime; it's READONLY in // the Android MTP implementation. We just change the time in // the mount, but this is not persisted. if mTime != nil { n.obj.ModificationDate = *mTime } return fuse.OK } func (n *mtpNodeImpl) Handle() uint32 { return n.handle } func (n *mtpNodeImpl) SetName(nm string) { n.obj.Filename = nm } func (n *mtpNodeImpl) StorageID() uint32 { return n.obj.StorageID } var _ = mtpNode((*folderNode)(nil)) //////////////// // files ////////////////// // folders type folderNode struct { mtpNodeImpl fetched bool } func (fs *deviceFS) newFolder(obj mtp.ObjectInfo, h uint32) *folderNode { obj.AssociationType = mtp.OFC_Association return &folderNode{ mtpNodeImpl: mtpNodeImpl{ Node: nodefs.NewDefaultNode(), handle: h, obj: &obj, fs: fs, }, } } // Keep the root nodes for all device storages alive. func (n *folderNode) Deletable() bool { return n.Handle() != NOPARENT_ID } // Fetches data from device returns false on failure. func (n *folderNode) fetch() bool { if n.fetched { return true } handles := mtp.Uint32Array{} if err := n.fs.dev.GetObjectHandles(n.StorageID(), 0x0, n.Handle(), &handles); err != nil { log.Printf("GetObjectHandles failed: %v", err) return false } infos := map[uint32]*mtp.ObjectInfo{} sizes := map[uint32]int64{} for _, handle := range handles.Values { obj := mtp.ObjectInfo{} if err := n.fs.dev.GetObjectInfo(handle, &obj); err != nil { log.Printf("GetObjectInfo for handle %d failed: %v", handle, err) continue } if obj.Filename == "" { log.Printf("ignoring handle 0x%x with empty name in dir 0x%x", handle, n.Handle()) continue } if obj.CompressedSize == 0xFFFFFFFF { var val mtp.Uint64Value if err := n.fs.dev.GetObjectPropValue(handle, mtp.OPC_ObjectSize, &val); err != nil { log.Printf("GetObjectPropValue handle %d failed: %v", handle, err) return false } sizes[handle] = int64(val.Value) } infos[handle] = &obj } for handle, info := range infos { var node nodefs.Node info.ParentObject = n.Handle() isdir := info.ObjectFormat == mtp.OFC_Association if isdir { fNode := n.fs.newFolder(*info, handle) node = fNode } else { sz := sizes[handle] node = n.fs.newFile(*info, sz, handle) } n.Inode().NewChild(info.Filename, isdir, node) } n.fetched = true return true } func (n *folderNode) OpenDir(context *fuse.Context) (stream []fuse.DirEntry, status fuse.Status) { if !n.fetch() { return nil, fuse.EIO } return n.Node.OpenDir(context) } func (n *folderNode) GetAttr(out *fuse.Attr, file nodefs.File, context *fuse.Context) (code fuse.Status) { out.Mode = fuse.S_IFDIR | 0755 return fuse.OK } func (n *folderNode) basenameRename(oldName string, newName string) error { ch := n.Inode().GetChild(oldName) mFile := ch.Node().(mtpNode) if mFile.Handle() != 0 { // Only rename on device if it was sent already. v := mtp.StringValue{Value: newName} if err := n.fs.dev.SetObjectPropValue(mFile.Handle(), mtp.OPC_ObjectFileName, &v); err != nil { return err } } n.Inode().RmChild(oldName) n.Inode().AddChild(newName, ch) return nil } func (n *folderNode) Rename(oldName string, newParent nodefs.Node, newName string, context *fuse.Context) (code fuse.Status) { fn, ok := newParent.(*folderNode) if !ok { return fuse.ENOSYS } fn.fetch() n.fetch() if f := n.Inode().GetChild(newName); f != nil { if fn != n { // TODO - delete destination? log.Printf("old folder already has child %q", newName) return fuse.ENOSYS } // does mtp overwrite the destination? } if fn != n { return fuse.ENOSYS } if newName != oldName { if err := n.basenameRename(oldName, newName); err != nil { log.Printf("basenameRename failed: %v", err) return fuse.EIO } } return fuse.OK } func (n *folderNode) Lookup(out *fuse.Attr, name string, context *fuse.Context) (node *nodefs.Inode, code fuse.Status) { if !n.fetch() { return nil, fuse.EIO } ch := n.Inode().GetChild(name) if ch == nil { return nil, fuse.ENOENT } return ch, ch.Node().GetAttr(out, nil, context) } func (n *folderNode) Mkdir(name string, mode uint32, context *fuse.Context) (*nodefs.Inode, fuse.Status) { if !n.fetch() { return nil, fuse.EIO } obj := mtp.ObjectInfo{ Filename: name, ObjectFormat: mtp.OFC_Association, ModificationDate: time.Now(), ParentObject: n.Handle(), StorageID: n.StorageID(), } if n.fs.mungeVfat[n.StorageID()] { obj.Filename = SanitizeDosName(obj.Filename) } _, _, newId, err := n.fs.dev.SendObjectInfo(n.StorageID(), n.Handle(), &obj) if err != nil { log.Printf("CreateFolder failed: %v", err) return nil, fuse.EIO } f := n.fs.newFolder(obj, newId) return n.Inode().NewChild(name, true, f), fuse.OK } func (n *folderNode) Unlink(name string, c *fuse.Context) fuse.Status { if !n.fetch() { return fuse.EIO } ch := n.Inode().GetChild(name) if ch == nil { return fuse.ENOENT } f := ch.Node().(mtpNode) if f.Handle() != 0 { if err := n.fs.dev.DeleteObject(f.Handle()); err != nil { log.Printf("DeleteObject failed: %v", err) return fuse.EIO } } else { f.SetName("") } n.Inode().RmChild(name) return fuse.OK } func (n *folderNode) Rmdir(name string, c *fuse.Context) fuse.Status { return n.Unlink(name, c) } func (n *folderNode) Create(name string, flags uint32, mode uint32, context *fuse.Context) (nodefs.File, *nodefs.Inode, fuse.Status) { if !n.fetch() { return nil, nil, fuse.EIO } obj := mtp.ObjectInfo{ StorageID: n.StorageID(), Filename: name, ObjectFormat: mtp.OFC_Undefined, ModificationDate: time.Now(), ParentObject: n.Handle(), CompressedSize: 0, } var file nodefs.File var fsNode nodefs.Node if n.fs.options.Android { _, _, handle, err := n.fs.dev.SendObjectInfo(n.StorageID(), n.Handle(), &obj) if err != nil { log.Println("SendObjectInfo failed", err) return nil, nil, fuse.EIO } if err = n.fs.dev.SendObject(&bytes.Buffer{}, 0); err != nil { log.Println("SendObject failed:", err) return nil, nil, fuse.EIO } aNode := &androidNode{ mtpNodeImpl: mtpNodeImpl{ Node: nodefs.NewDefaultNode(), obj: &obj, fs: n.fs, handle: handle, }, } if !aNode.startEdit() { return nil, nil, fuse.EIO } file = &androidFile{ File: nodefs.NewDefaultFile(), node: aNode, } fsNode = aNode } else { var err error file, fsNode, err = n.fs.createClassicFile(obj) if err != nil { return nil, nil, fuse.ToStatus(err) } } return file, n.Inode().NewChild(name, false, fsNode), fuse.OK }
{ "content_hash": "14cd17740b0ad10d3857033538f3b016", "timestamp": "", "source": "github", "line_count": 545, "max_line_length": 134, "avg_line_length": 22.39816513761468, "alnum_prop": 0.6576554435979356, "repo_name": "wisavalite/go-mtpfs", "id": "56d33d3665e79fe77e0b7d50c6dcf020f71efe39", "size": "12363", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "fs/fs.go", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Go", "bytes": "154688" }, { "name": "Python", "bytes": "1978" }, { "name": "Shell", "bytes": "245" } ], "symlink_target": "" }
package com.github.dozermapper.spring.functional_tests.support; import com.github.dozermapper.core.DozerConverter; import com.github.dozermapper.spring.vo.Destination; import com.github.dozermapper.spring.vo.Source; public class InjectedCustomConverter extends DozerConverter<Source, Destination> { private String injectedName; public InjectedCustomConverter() { super(Source.class, Destination.class); } @Override public Destination convertTo(Source source, Destination destination) { Destination result = new Destination(); result.setValue(injectedName); return result; } @Override public Source convertFrom(Destination source, Source destination) { return null; } public String getInjectedName() { return injectedName; } public void setInjectedName(String injectedName) { this.injectedName = injectedName; } }
{ "content_hash": "c089793a86fe49db73b7ed2cd2aa38f4", "timestamp": "", "source": "github", "line_count": 35, "max_line_length": 82, "avg_line_length": 26.571428571428573, "alnum_prop": 0.7204301075268817, "repo_name": "garethahealy/dozer", "id": "95a2c2060755eaeb3164ee1dc1cf30c3ede25b15", "size": "1532", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "dozer-integrations/dozer-spring-support/dozer-spring4/src/test/java/com/github/dozermapper/spring/functional_tests/support/InjectedCustomConverter.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "2318876" }, { "name": "Shell", "bytes": "1703" } ], "symlink_target": "" }
#ifndef QUICKSTEP_QUERY_OPTIMIZER_TESTS_TEST_DATABASE_LOADER_HPP_ #define QUICKSTEP_QUERY_OPTIMIZER_TESTS_TEST_DATABASE_LOADER_HPP_ #include <string> #include "catalog/CatalogDatabase.hpp" #include "query_execution/QueryExecutionTypedefs.hpp" #ifdef QUICKSTEP_DISTRIBUTED #include "storage/StorageBlockInfo.hpp" #endif // QUICKSTEP_DISTRIBUTED #include "storage/StorageManager.hpp" #include "threading/ThreadIDBasedMap.hpp" #include "utility/Macros.hpp" #include "tmb/id_typedefs.h" #ifdef QUICKSTEP_DISTRIBUTED namespace tmb { class MessageBus; } #endif // QUICKSTEP_DISTRIBUTED namespace quickstep { class CatalogRelation; namespace optimizer { /** \addtogroup QueryOptimizer * @{ */ /** * @brief Creates and populate testing schema and data. */ class TestDatabaseLoader { public: /** * @brief Constructor. * * @param storage_path A filesystem directory where the blocks may be * evicted to during the execution of a test query. * Can be empty if the test query is not executed * in the query engine. */ explicit TestDatabaseLoader(const std::string &storage_path = "") : thread_id_map_(ClientIDMap::Instance()), catalog_database_(nullptr /* parent */, "TestDatabase" /* name */, 0 /* id */), storage_manager_(storage_path), test_relation_(nullptr) { init(); } #ifdef QUICKSTEP_DISTRIBUTED /** * @brief Constructor for the distributed version. * * @param storage_path A filesystem directory where the blocks may be * evicted to during the execution of a test query. * Can be empty if the test query is not executed * in the query engine. * @param block_domain The block_domain for StorageManager. * @param locator_client_id The client id of BlockLocator for StorageManager. * @param bus_global The Bus for StorageManager. */ TestDatabaseLoader(const std::string &storage_path, const block_id_domain block_domain, const tmb::client_id locator_client_id, tmb::MessageBus *bus_global) : thread_id_map_(ClientIDMap::Instance()), catalog_database_(nullptr /* parent */, "TestDatabase" /* name */, 0 /* id */), storage_manager_(storage_path, block_domain, locator_client_id, bus_global), test_relation_(nullptr) { init(); } #endif // QUICKSTEP_DISTRIBUTED ~TestDatabaseLoader() { clear(); thread_id_map_->removeValue(); } /** * @brief Gets the test database. * * @return The test database. */ CatalogDatabase* catalog_database() { return &catalog_database_; } /** * @brief Gets the storage manager. * * @return The storage manager. */ StorageManager* storage_manager() { return &storage_manager_; } /** * @brief Gets the test relation. * * @return The test relation. */ CatalogRelation* test_relation() { return test_relation_; } /** * @brief Creates a CatalogRelation with six columns (one column per type,): * int_col (nullable), long_col, float_col, double_col (nullable), * char_col, vchar_col (nullable). * If \p allow_vchar is false, vchar_col is not added. * @warning This can only be called once. * * @param allow_vchar Whether the VCHAR column should be added. * @return The test database. */ CatalogRelation* createTestRelation(bool allow_vchar); /** * @brief Creates four relations a(w INT, x INT, y INT, z INT), b(w INT, x INT), * c(x INT, y INT) and d(y INT, z INT) for testing JOINs. The created * relations are stored inside \p catalog_database_. * @warning This can only be called once. */ void createJoinRelations(); /** * @brief Loads data into the test relation. The test relation has 25 tuples. * Each tuple is * ((-1)^x*x, x^2, sqrt(x), (-1)^x*x*sqrt(x), * concat(string(int_col), string(float_col)). * where 0 <= x <= 24. * Nullable attributes (int_col and doulbe_col) have a NULL value for * every 10 tuples. * @warning This can only be called once and the test relation cannot have * vchar_col, since the default layout does not support it. */ void loadTestRelation(); /** * @brief Removes all data and drops all relations from the database. */ void clear(); private: void init() { bus_.Initialize(); const tmb::client_id worker_thread_client_id = bus_.Connect(); bus_.RegisterClientAsSender(worker_thread_client_id, kCatalogRelationNewBlockMessage); // Refer to InsertDestination::sendBlockFilledMessage for the rationale // behind using ClientIDMap. thread_id_map_->addValue(worker_thread_client_id); scheduler_client_id_ = bus_.Connect(); bus_.RegisterClientAsReceiver(scheduler_client_id_, kCatalogRelationNewBlockMessage); } /** * @brief Simulate Foreman to add all new blocks to the relation. */ void processCatalogRelationNewBlockMessages(); ClientIDMap *thread_id_map_; MessageBusImpl bus_; tmb::client_id scheduler_client_id_; CatalogDatabase catalog_database_; StorageManager storage_manager_; // Owned by catalog_database_. CatalogRelation* test_relation_; DISALLOW_COPY_AND_ASSIGN(TestDatabaseLoader); }; /** @} */ } // namespace optimizer } // namespace quickstep #endif /* QUICKSTEP_QUERY_OPTIMIZER_TESTS_TEST_CATALOG_HPP_ */
{ "content_hash": "b8aedd1781d25067d94d1a10c3ec0e33", "timestamp": "", "source": "github", "line_count": 187, "max_line_length": 90, "avg_line_length": 30.122994652406415, "alnum_prop": 0.6396236463696077, "repo_name": "cramja/incubator-quickstep", "id": "87c19c6f1227b7c90a49b014faab8999af4116ee", "size": "6442", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "query_optimizer/tests/TestDatabaseLoader.hpp", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C++", "bytes": "8868819" }, { "name": "CMake", "bytes": "635006" }, { "name": "Protocol Buffer", "bytes": "51411" }, { "name": "Python", "bytes": "33257" }, { "name": "Ruby", "bytes": "5352" }, { "name": "Shell", "bytes": "9617" } ], "symlink_target": "" }
/*------------------------------------------------------------------------- Copyright 2008 Sandia Corporation. Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation, the U.S. Government retains certain rights in this software. -------------------------------------------------------------------------*/ // .NAME vtkBoostBrandesCentrality - Compute Brandes betweenness centrality // on a vtkGraph // .SECTION Description // This vtk class uses the Boost brandes_betweeness_centrality // generic algorithm to compute betweenness centrality on // the input graph (a vtkGraph). // .SECTION See Also // vtkGraph vtkBoostGraphAdapter #ifndef vtkBoostBrandesCentrality_h #define vtkBoostBrandesCentrality_h #include "vtkInfovisBoostGraphAlgorithmsModule.h" // For export macro #include "vtkVariant.h" // For variant type #include "vtkGraphAlgorithm.h" class VTKINFOVISBOOSTGRAPHALGORITHMS_EXPORT vtkBoostBrandesCentrality : public vtkGraphAlgorithm { public: static vtkBoostBrandesCentrality *New(); vtkTypeMacro(vtkBoostBrandesCentrality, vtkGraphAlgorithm); void PrintSelf(ostream& os, vtkIndent indent); // Description: // Get/Set the flag that sets the rule whether or not to use the // edge weight array as set using \c SetEdgeWeightArrayName. vtkSetMacro(UseEdgeWeightArray, bool); vtkBooleanMacro(UseEdgeWeightArray, bool); vtkSetMacro(InvertEdgeWeightArray, bool); vtkBooleanMacro(InvertEdgeWeightArray, bool); // Description: // Get/Set the name of the array that needs to be used as the edge weight. // The array should be a vtkDataArray. vtkGetStringMacro(EdgeWeightArrayName); vtkSetStringMacro(EdgeWeightArrayName); protected: vtkBoostBrandesCentrality(); ~vtkBoostBrandesCentrality(); int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *); private: bool UseEdgeWeightArray; bool InvertEdgeWeightArray; char* EdgeWeightArrayName; vtkBoostBrandesCentrality(const vtkBoostBrandesCentrality&) VTK_DELETE_FUNCTION; void operator=(const vtkBoostBrandesCentrality&) VTK_DELETE_FUNCTION; }; #endif
{ "content_hash": "fc4a6de59cc7c3d1692971d1524ed989", "timestamp": "", "source": "github", "line_count": 67, "max_line_length": 96, "avg_line_length": 32.08955223880597, "alnum_prop": 0.7223255813953489, "repo_name": "keithroe/vtkoptix", "id": "2f71e2e0e5d4791aaaa9ce4ec5285a29ad95e72e", "size": "2745", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Infovis/BoostGraphAlgorithms/vtkBoostBrandesCentrality.h", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Assembly", "bytes": "37444" }, { "name": "Batchfile", "bytes": "106" }, { "name": "C", "bytes": "46217717" }, { "name": "C++", "bytes": "73779038" }, { "name": "CMake", "bytes": "1786055" }, { "name": "CSS", "bytes": "7532" }, { "name": "Cuda", "bytes": "37418" }, { "name": "D", "bytes": "2081" }, { "name": "GAP", "bytes": "14120" }, { "name": "GLSL", "bytes": "222494" }, { "name": "Groff", "bytes": "65394" }, { "name": "HTML", "bytes": "193016" }, { "name": "Java", "bytes": "148789" }, { "name": "JavaScript", "bytes": "54139" }, { "name": "Lex", "bytes": "50109" }, { "name": "M4", "bytes": "159710" }, { "name": "Makefile", "bytes": "275672" }, { "name": "Objective-C", "bytes": "22779" }, { "name": "Objective-C++", "bytes": "191216" }, { "name": "Perl", "bytes": "173168" }, { "name": "Prolog", "bytes": "4406" }, { "name": "Python", "bytes": "15765617" }, { "name": "Shell", "bytes": "88087" }, { "name": "Slash", "bytes": "1476" }, { "name": "Smarty", "bytes": "393" }, { "name": "Tcl", "bytes": "1404085" }, { "name": "Yacc", "bytes": "191144" } ], "symlink_target": "" }
// © 2016 and later: Unicode, Inc. and others. // License & terms of use: http://www.unicode.org/copyright.html #ifndef __RELDATEFMT_H #define __RELDATEFMT_H #include "unicode/utypes.h" #if U_SHOW_CPLUSPLUS_API #include "unicode/uobject.h" #include "unicode/udisplaycontext.h" #include "unicode/ureldatefmt.h" #include "unicode/locid.h" #include "unicode/formattedvalue.h" /** * \file * \brief C++ API: Formats relative dates such as "1 day ago" or "tomorrow" */ #if !UCONFIG_NO_FORMATTING /** * Represents the unit for formatting a relative date. e.g "in 5 days" * or "in 3 months" * @stable ICU 53 */ typedef enum UDateRelativeUnit { /** * Seconds * @stable ICU 53 */ UDAT_RELATIVE_SECONDS, /** * Minutes * @stable ICU 53 */ UDAT_RELATIVE_MINUTES, /** * Hours * @stable ICU 53 */ UDAT_RELATIVE_HOURS, /** * Days * @stable ICU 53 */ UDAT_RELATIVE_DAYS, /** * Weeks * @stable ICU 53 */ UDAT_RELATIVE_WEEKS, /** * Months * @stable ICU 53 */ UDAT_RELATIVE_MONTHS, /** * Years * @stable ICU 53 */ UDAT_RELATIVE_YEARS, #ifndef U_HIDE_DEPRECATED_API /** * One more than the highest normal UDateRelativeUnit value. * @deprecated ICU 58 The numeric value may change over time, see ICU ticket #12420. */ UDAT_RELATIVE_UNIT_COUNT #endif // U_HIDE_DEPRECATED_API } UDateRelativeUnit; /** * Represents an absolute unit. * @stable ICU 53 */ typedef enum UDateAbsoluteUnit { // Days of week have to remain together and in order from Sunday to // Saturday. /** * Sunday * @stable ICU 53 */ UDAT_ABSOLUTE_SUNDAY, /** * Monday * @stable ICU 53 */ UDAT_ABSOLUTE_MONDAY, /** * Tuesday * @stable ICU 53 */ UDAT_ABSOLUTE_TUESDAY, /** * Wednesday * @stable ICU 53 */ UDAT_ABSOLUTE_WEDNESDAY, /** * Thursday * @stable ICU 53 */ UDAT_ABSOLUTE_THURSDAY, /** * Friday * @stable ICU 53 */ UDAT_ABSOLUTE_FRIDAY, /** * Saturday * @stable ICU 53 */ UDAT_ABSOLUTE_SATURDAY, /** * Day * @stable ICU 53 */ UDAT_ABSOLUTE_DAY, /** * Week * @stable ICU 53 */ UDAT_ABSOLUTE_WEEK, /** * Month * @stable ICU 53 */ UDAT_ABSOLUTE_MONTH, /** * Year * @stable ICU 53 */ UDAT_ABSOLUTE_YEAR, /** * Now * @stable ICU 53 */ UDAT_ABSOLUTE_NOW, /** * Quarter * @stable ICU 63 */ UDAT_ABSOLUTE_QUARTER, /** * Hour * @stable ICU 65 */ UDAT_ABSOLUTE_HOUR, /** * Minute * @stable ICU 65 */ UDAT_ABSOLUTE_MINUTE, #ifndef U_HIDE_DEPRECATED_API /** * One more than the highest normal UDateAbsoluteUnit value. * @deprecated ICU 58 The numeric value may change over time, see ICU ticket #12420. */ UDAT_ABSOLUTE_UNIT_COUNT = UDAT_ABSOLUTE_NOW + 4 #endif // U_HIDE_DEPRECATED_API } UDateAbsoluteUnit; /** * Represents a direction for an absolute unit e.g "Next Tuesday" * or "Last Tuesday" * @stable ICU 53 */ typedef enum UDateDirection { /** * Two before. Not fully supported in every locale. * @stable ICU 53 */ UDAT_DIRECTION_LAST_2, /** * Last * @stable ICU 53 */ UDAT_DIRECTION_LAST, /** * This * @stable ICU 53 */ UDAT_DIRECTION_THIS, /** * Next * @stable ICU 53 */ UDAT_DIRECTION_NEXT, /** * Two after. Not fully supported in every locale. * @stable ICU 53 */ UDAT_DIRECTION_NEXT_2, /** * Plain, which means the absence of a qualifier. * @stable ICU 53 */ UDAT_DIRECTION_PLAIN, #ifndef U_HIDE_DEPRECATED_API /** * One more than the highest normal UDateDirection value. * @deprecated ICU 58 The numeric value may change over time, see ICU ticket #12420. */ UDAT_DIRECTION_COUNT #endif // U_HIDE_DEPRECATED_API } UDateDirection; #if !UCONFIG_NO_BREAK_ITERATION U_NAMESPACE_BEGIN class BreakIterator; class RelativeDateTimeCacheData; class SharedNumberFormat; class SharedPluralRules; class SharedBreakIterator; class NumberFormat; class UnicodeString; class FormattedRelativeDateTime; class FormattedRelativeDateTimeData; /** * An immutable class containing the result of a relative datetime formatting operation. * * Instances of this class are immutable and thread-safe. * * Not intended for public subclassing. * * @stable ICU 64 */ class U_I18N_API FormattedRelativeDateTime : public UMemory, public FormattedValue { public: /** * Default constructor; makes an empty FormattedRelativeDateTime. * @stable ICU 64 */ FormattedRelativeDateTime() : fData(nullptr), fErrorCode(U_INVALID_STATE_ERROR) {} /** * Move constructor: Leaves the source FormattedRelativeDateTime in an undefined state. * @stable ICU 64 */ FormattedRelativeDateTime(FormattedRelativeDateTime&& src) U_NOEXCEPT; /** * Destruct an instance of FormattedRelativeDateTime. * @stable ICU 64 */ virtual ~FormattedRelativeDateTime() U_OVERRIDE; /** Copying not supported; use move constructor instead. */ FormattedRelativeDateTime(const FormattedRelativeDateTime&) = delete; /** Copying not supported; use move assignment instead. */ FormattedRelativeDateTime& operator=(const FormattedRelativeDateTime&) = delete; /** * Move assignment: Leaves the source FormattedRelativeDateTime in an undefined state. * @stable ICU 64 */ FormattedRelativeDateTime& operator=(FormattedRelativeDateTime&& src) U_NOEXCEPT; /** @copydoc FormattedValue::toString() */ UnicodeString toString(UErrorCode& status) const U_OVERRIDE; /** @copydoc FormattedValue::toTempString() */ UnicodeString toTempString(UErrorCode& status) const U_OVERRIDE; /** @copydoc FormattedValue::appendTo() */ Appendable &appendTo(Appendable& appendable, UErrorCode& status) const U_OVERRIDE; /** @copydoc FormattedValue::nextPosition() */ UBool nextPosition(ConstrainedFieldPosition& cfpos, UErrorCode& status) const U_OVERRIDE; private: FormattedRelativeDateTimeData *fData; UErrorCode fErrorCode; explicit FormattedRelativeDateTime(FormattedRelativeDateTimeData *results) : fData(results), fErrorCode(U_ZERO_ERROR) {} explicit FormattedRelativeDateTime(UErrorCode errorCode) : fData(nullptr), fErrorCode(errorCode) {} friend class RelativeDateTimeFormatter; }; /** * Formats simple relative dates. There are two types of relative dates that * it handles: * <ul> * <li>relative dates with a quantity e.g "in 5 days"</li> * <li>relative dates without a quantity e.g "next Tuesday"</li> * </ul> * <p> * This API is very basic and is intended to be a building block for more * fancy APIs. The caller tells it exactly what to display in a locale * independent way. While this class automatically provides the correct plural * forms, the grammatical form is otherwise as neutral as possible. It is the * caller's responsibility to handle cut-off logic such as deciding between * displaying "in 7 days" or "in 1 week." This API supports relative dates * involving one single unit. This API does not support relative dates * involving compound units, * e.g "in 5 days and 4 hours" nor does it support parsing. * <p> * This class is mostly thread safe and immutable with the following caveats: * 1. The assignment operator violates Immutability. It must not be used * concurrently with other operations. * 2. Caller must not hold onto adopted pointers. * <p> * This class is not intended for public subclassing. * <p> * Here are some examples of use: * <blockquote> * <pre> * UErrorCode status = U_ZERO_ERROR; * UnicodeString appendTo; * RelativeDateTimeFormatter fmt(status); * // Appends "in 1 day" * fmt.format( * 1, UDAT_DIRECTION_NEXT, UDAT_RELATIVE_DAYS, appendTo, status); * // Appends "in 3 days" * fmt.format( * 3, UDAT_DIRECTION_NEXT, UDAT_RELATIVE_DAYS, appendTo, status); * // Appends "3.2 years ago" * fmt.format( * 3.2, UDAT_DIRECTION_LAST, UDAT_RELATIVE_YEARS, appendTo, status); * // Appends "last Sunday" * fmt.format(UDAT_DIRECTION_LAST, UDAT_ABSOLUTE_SUNDAY, appendTo, status); * // Appends "this Sunday" * fmt.format(UDAT_DIRECTION_THIS, UDAT_ABSOLUTE_SUNDAY, appendTo, status); * // Appends "next Sunday" * fmt.format(UDAT_DIRECTION_NEXT, UDAT_ABSOLUTE_SUNDAY, appendTo, status); * // Appends "Sunday" * fmt.format(UDAT_DIRECTION_PLAIN, UDAT_ABSOLUTE_SUNDAY, appendTo, status); * * // Appends "yesterday" * fmt.format(UDAT_DIRECTION_LAST, UDAT_ABSOLUTE_DAY, appendTo, status); * // Appends "today" * fmt.format(UDAT_DIRECTION_THIS, UDAT_ABSOLUTE_DAY, appendTo, status); * // Appends "tomorrow" * fmt.format(UDAT_DIRECTION_NEXT, UDAT_ABSOLUTE_DAY, appendTo, status); * // Appends "now" * fmt.format(UDAT_DIRECTION_PLAIN, UDAT_ABSOLUTE_NOW, appendTo, status); * * </pre> * </blockquote> * <p> * In the future, we may add more forms, such as abbreviated/short forms * (3 secs ago), and relative day periods ("yesterday afternoon"), etc. * * The RelativeDateTimeFormatter class is not intended for public subclassing. * * @stable ICU 53 */ class U_I18N_API RelativeDateTimeFormatter : public UObject { public: /** * Create RelativeDateTimeFormatter with default locale. * @stable ICU 53 */ RelativeDateTimeFormatter(UErrorCode& status); /** * Create RelativeDateTimeFormatter with given locale. * @stable ICU 53 */ RelativeDateTimeFormatter(const Locale& locale, UErrorCode& status); /** * Create RelativeDateTimeFormatter with given locale and NumberFormat. * * @param locale the locale * @param nfToAdopt Constructed object takes ownership of this pointer. * It is an error for caller to delete this pointer or change its * contents after calling this constructor. * @param status Any error is returned here. * @stable ICU 53 */ RelativeDateTimeFormatter( const Locale& locale, NumberFormat *nfToAdopt, UErrorCode& status); /** * Create RelativeDateTimeFormatter with given locale, NumberFormat, * and capitalization context. * * @param locale the locale * @param nfToAdopt Constructed object takes ownership of this pointer. * It is an error for caller to delete this pointer or change its * contents after calling this constructor. Caller may pass NULL for * this argument if they want default number format behavior. * @param style the format style. The UDAT_RELATIVE bit field has no effect. * @param capitalizationContext A value from UDisplayContext that pertains to * capitalization. * @param status Any error is returned here. * @stable ICU 54 */ RelativeDateTimeFormatter( const Locale& locale, NumberFormat *nfToAdopt, UDateRelativeDateTimeFormatterStyle style, UDisplayContext capitalizationContext, UErrorCode& status); /** * Copy constructor. * @stable ICU 53 */ RelativeDateTimeFormatter(const RelativeDateTimeFormatter& other); /** * Assignment operator. * @stable ICU 53 */ RelativeDateTimeFormatter& operator=( const RelativeDateTimeFormatter& other); /** * Destructor. * @stable ICU 53 */ virtual ~RelativeDateTimeFormatter(); /** * Formats a relative date with a quantity such as "in 5 days" or * "3 months ago" * * This method returns a String. To get more information about the * formatting result, use formatToValue(). * * @param quantity The numerical amount e.g 5. This value is formatted * according to this object's NumberFormat object. * @param direction NEXT means a future relative date; LAST means a past * relative date. If direction is anything else, this method sets * status to U_ILLEGAL_ARGUMENT_ERROR. * @param unit the unit e.g day? month? year? * @param appendTo The string to which the formatted result will be * appended * @param status ICU error code returned here. * @return appendTo * @stable ICU 53 */ UnicodeString& format( double quantity, UDateDirection direction, UDateRelativeUnit unit, UnicodeString& appendTo, UErrorCode& status) const; /** * Formats a relative date with a quantity such as "in 5 days" or * "3 months ago" * * This method returns a FormattedRelativeDateTime, which exposes more * information than the String returned by format(). * * @param quantity The numerical amount e.g 5. This value is formatted * according to this object's NumberFormat object. * @param direction NEXT means a future relative date; LAST means a past * relative date. If direction is anything else, this method sets * status to U_ILLEGAL_ARGUMENT_ERROR. * @param unit the unit e.g day? month? year? * @param status ICU error code returned here. * @return The formatted relative datetime * @stable ICU 64 */ FormattedRelativeDateTime formatToValue( double quantity, UDateDirection direction, UDateRelativeUnit unit, UErrorCode& status) const; /** * Formats a relative date without a quantity. * * This method returns a String. To get more information about the * formatting result, use formatToValue(). * * @param direction NEXT, LAST, THIS, etc. * @param unit e.g SATURDAY, DAY, MONTH * @param appendTo The string to which the formatted result will be * appended. If the value of direction is documented as not being fully * supported in all locales then this method leaves appendTo unchanged if * no format string is available. * @param status ICU error code returned here. * @return appendTo * @stable ICU 53 */ UnicodeString& format( UDateDirection direction, UDateAbsoluteUnit unit, UnicodeString& appendTo, UErrorCode& status) const; /** * Formats a relative date without a quantity. * * This method returns a FormattedRelativeDateTime, which exposes more * information than the String returned by format(). * * If the string is not available in the requested locale, the return * value will be empty (calling toString will give an empty string). * * @param direction NEXT, LAST, THIS, etc. * @param unit e.g SATURDAY, DAY, MONTH * @param status ICU error code returned here. * @return The formatted relative datetime * @stable ICU 64 */ FormattedRelativeDateTime formatToValue( UDateDirection direction, UDateAbsoluteUnit unit, UErrorCode& status) const; /** * Format a combination of URelativeDateTimeUnit and numeric offset * using a numeric style, e.g. "1 week ago", "in 1 week", * "5 weeks ago", "in 5 weeks". * * This method returns a String. To get more information about the * formatting result, use formatNumericToValue(). * * @param offset The signed offset for the specified unit. This * will be formatted according to this object's * NumberFormat object. * @param unit The unit to use when formatting the relative * date, e.g. UDAT_REL_UNIT_WEEK, * UDAT_REL_UNIT_FRIDAY. * @param appendTo The string to which the formatted result will be * appended. * @param status ICU error code returned here. * @return appendTo * @stable ICU 57 */ UnicodeString& formatNumeric( double offset, URelativeDateTimeUnit unit, UnicodeString& appendTo, UErrorCode& status) const; /** * Format a combination of URelativeDateTimeUnit and numeric offset * using a numeric style, e.g. "1 week ago", "in 1 week", * "5 weeks ago", "in 5 weeks". * * This method returns a FormattedRelativeDateTime, which exposes more * information than the String returned by formatNumeric(). * * @param offset The signed offset for the specified unit. This * will be formatted according to this object's * NumberFormat object. * @param unit The unit to use when formatting the relative * date, e.g. UDAT_REL_UNIT_WEEK, * UDAT_REL_UNIT_FRIDAY. * @param status ICU error code returned here. * @return The formatted relative datetime * @stable ICU 64 */ FormattedRelativeDateTime formatNumericToValue( double offset, URelativeDateTimeUnit unit, UErrorCode& status) const; /** * Format a combination of URelativeDateTimeUnit and numeric offset * using a text style if possible, e.g. "last week", "this week", * "next week", "yesterday", "tomorrow". Falls back to numeric * style if no appropriate text term is available for the specified * offset in the object's locale. * * This method returns a String. To get more information about the * formatting result, use formatToValue(). * * @param offset The signed offset for the specified unit. * @param unit The unit to use when formatting the relative * date, e.g. UDAT_REL_UNIT_WEEK, * UDAT_REL_UNIT_FRIDAY. * @param appendTo The string to which the formatted result will be * appended. * @param status ICU error code returned here. * @return appendTo * @stable ICU 57 */ UnicodeString& format( double offset, URelativeDateTimeUnit unit, UnicodeString& appendTo, UErrorCode& status) const; /** * Format a combination of URelativeDateTimeUnit and numeric offset * using a text style if possible, e.g. "last week", "this week", * "next week", "yesterday", "tomorrow". Falls back to numeric * style if no appropriate text term is available for the specified * offset in the object's locale. * * This method returns a FormattedRelativeDateTime, which exposes more * information than the String returned by format(). * * @param offset The signed offset for the specified unit. * @param unit The unit to use when formatting the relative * date, e.g. UDAT_REL_UNIT_WEEK, * UDAT_REL_UNIT_FRIDAY. * @param status ICU error code returned here. * @return The formatted relative datetime * @stable ICU 64 */ FormattedRelativeDateTime formatToValue( double offset, URelativeDateTimeUnit unit, UErrorCode& status) const; /** * Combines a relative date string and a time string in this object's * locale. This is done with the same date-time separator used for the * default calendar in this locale. * * @param relativeDateString the relative date, e.g 'yesterday' * @param timeString the time e.g '3:45' * @param appendTo concatenated date and time appended here * @param status ICU error code returned here. * @return appendTo * @stable ICU 53 */ UnicodeString& combineDateAndTime( const UnicodeString& relativeDateString, const UnicodeString& timeString, UnicodeString& appendTo, UErrorCode& status) const; /** * Returns the NumberFormat this object is using. * * @stable ICU 53 */ const NumberFormat& getNumberFormat() const; /** * Returns the capitalization context. * * @stable ICU 54 */ UDisplayContext getCapitalizationContext() const; /** * Returns the format style. * * @stable ICU 54 */ UDateRelativeDateTimeFormatterStyle getFormatStyle() const; private: const RelativeDateTimeCacheData* fCache; const SharedNumberFormat *fNumberFormat; const SharedPluralRules *fPluralRules; UDateRelativeDateTimeFormatterStyle fStyle; UDisplayContext fContext; const SharedBreakIterator *fOptBreakIterator; Locale fLocale; void init( NumberFormat *nfToAdopt, BreakIterator *brkIter, UErrorCode &status); UnicodeString& adjustForContext(UnicodeString &) const; UBool checkNoAdjustForContext(UErrorCode& status) const; template<typename F, typename... Args> UnicodeString& doFormat( F callback, UnicodeString& appendTo, UErrorCode& status, Args... args) const; template<typename F, typename... Args> FormattedRelativeDateTime doFormatToValue( F callback, UErrorCode& status, Args... args) const; void formatImpl( double quantity, UDateDirection direction, UDateRelativeUnit unit, FormattedRelativeDateTimeData& output, UErrorCode& status) const; void formatAbsoluteImpl( UDateDirection direction, UDateAbsoluteUnit unit, FormattedRelativeDateTimeData& output, UErrorCode& status) const; void formatNumericImpl( double offset, URelativeDateTimeUnit unit, FormattedRelativeDateTimeData& output, UErrorCode& status) const; void formatRelativeImpl( double offset, URelativeDateTimeUnit unit, FormattedRelativeDateTimeData& output, UErrorCode& status) const; }; U_NAMESPACE_END #endif /* !UCONFIG_NO_BREAK_ITERATION */ #endif /* !UCONFIG_NO_FORMATTING */ #endif /* U_SHOW_CPLUSPLUS_API */ #endif /* __RELDATEFMT_H */
{ "content_hash": "cf0dd3af7904985222bc64a534021e1e", "timestamp": "", "source": "github", "line_count": 742, "max_line_length": 93, "avg_line_length": 30.163072776280323, "alnum_prop": 0.6410794870649211, "repo_name": "youtube/cobalt_sandbox", "id": "2a3742fd63c392f4eb81b5a9b42e15e8d40952ef", "size": "22751", "binary": false, "copies": "17", "ref": "refs/heads/main", "path": "third_party/icu/source/i18n/unicode/reldatefmt.h", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
package nl.xillio.xill.components.operators; import nl.xillio.xill.TestUtils; import nl.xillio.xill.api.Debugger; import nl.xillio.xill.api.NullDebugger; import nl.xillio.xill.api.components.MetaExpression; import nl.xillio.xill.components.instructions.VariableDeclaration; import org.testng.annotations.Test; import java.util.*; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.spy; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertTrue; public class AssignTest extends TestUtils { private final Debugger debugger = new NullDebugger(); @Test public void testAssignToAtomic() { VariableDeclaration variableDeclaration = new VariableDeclaration(fromValue("Hello"), "testVar"); variableDeclaration.process(debugger); Assign assign = new Assign(variableDeclaration, Collections.emptyList(), fromValue("World")); assertEquals(variableDeclaration.getVariable().getStringValue(), "Hello"); assign.process(debugger); assertEquals(variableDeclaration.getVariable().getStringValue(), "World"); } @Test public void testAssignToList() { VariableDeclaration variableDeclaration = new VariableDeclaration( list(fromValue("Hello")), "testVar" ); variableDeclaration.process(debugger); Assign assign = new Assign(variableDeclaration, Collections.singletonList(fromValue(1)), fromValue("World")); assertEquals(variableDeclaration.getVariable().getStringValue(), "[\"Hello\"]"); assign.process(debugger); assertEquals(variableDeclaration.getVariable().getStringValue(), "[\"Hello\",\"World\"]"); } @Test public void testComplexAssign() { MetaExpression value = list( map( "test", list( map( "other", fromValue(4) ) ) ) ); VariableDeclaration variableDeclaration = new VariableDeclaration(value, "test"); variableDeclaration.process(debugger); Assign assign = new Assign( variableDeclaration, Arrays.asList( fromValue("hello"), fromValue(0), fromValue("test"), fromValue(0) ), fromValue("New Value") ); assertEquals(variableDeclaration.getVariable().getStringValue(), "[{\"test\":[{\"other\":4}]}]"); assign.process(debugger); assertEquals(variableDeclaration.getVariable().getStringValue(), "[{\"test\":[{\"other\":4,\"hello\":\"New Value\"}]}]"); } @Test public void testAssignWithDebuggerStop() { Debugger debugger = spy(new NullDebugger()); doReturn(true).when(debugger).shouldStop(); VariableDeclaration variableDeclaration = new VariableDeclaration(fromValue("Hello"), "testVar"); variableDeclaration.process(debugger); assertTrue(variableDeclaration.getVariable().isNull()); } private MetaExpression map(String key, MetaExpression value) { LinkedHashMap<String, MetaExpression> result = new LinkedHashMap<>(); result.put(key, value); return fromValue(result); } private MetaExpression list(MetaExpression item) { List<MetaExpression> result = new ArrayList<>(); result.add(item); return fromValue(result); } }
{ "content_hash": "717e3afc08efaaaef0932bec0a7f25a0", "timestamp": "", "source": "github", "line_count": 106, "max_line_length": 129, "avg_line_length": 35.490566037735846, "alnum_prop": 0.5964912280701754, "repo_name": "XillioQA/xill-platform-3.4", "id": "bf048e09c541fef7cb72274133515e36fc24e261", "size": "4395", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "xill-processor/src/test/java/nl/xillio/xill/components/operators/AssignTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "93045" }, { "name": "HTML", "bytes": "22194" }, { "name": "Java", "bytes": "3642844" }, { "name": "JavaScript", "bytes": "11981754" }, { "name": "PowerShell", "bytes": "1383" }, { "name": "Shell", "bytes": "959" } ], "symlink_target": "" }
package dwarf import ( "fmt" "strconv" ) // Parse the type units stored in a DWARF4 .debug_types section. Each // type unit defines a single primary type and an 8-byte signature. // Other sections may then use formRefSig8 to refer to the type. // The typeUnit format is a single type with a signature. It holds // the same data as a compilation unit. type typeUnit struct { unit toff Offset // Offset to signature type within data. name string // Name of .debug_type section. cache Type // Cache the type, nil to start. } // Parse a .debug_types section. func (d *Data) parseTypes(name string, types []byte) error { b := makeBuf(d, unknownFormat{}, name, 0, types) for len(b.data) > 0 { base := b.off n, dwarf64 := b.unitLength() if n != Offset(uint32(n)) { b.error("type unit length overflow") return b.err } hdroff := b.off vers := int(b.uint16()) if vers != 4 { b.error("unsupported DWARF version " + strconv.Itoa(vers)) return b.err } var ao uint64 if !dwarf64 { ao = uint64(b.uint32()) } else { ao = b.uint64() } atable, err := d.parseAbbrev(ao, vers) if err != nil { return err } asize := b.uint8() sig := b.uint64() var toff uint32 if !dwarf64 { toff = b.uint32() } else { to64 := b.uint64() if to64 != uint64(uint32(to64)) { b.error("type unit type offset overflow") return b.err } toff = uint32(to64) } boff := b.off d.typeSigs[sig] = &typeUnit{ unit: unit{ base: base, off: boff, data: b.bytes(int(n - (b.off - hdroff))), atable: atable, asize: int(asize), vers: vers, is64: dwarf64, }, toff: Offset(toff), name: name, } if b.err != nil { return b.err } } return nil } // Return the type for a type signature. func (d *Data) sigToType(sig uint64) (Type, error) { tu := d.typeSigs[sig] if tu == nil { return nil, fmt.Errorf("no type unit with signature %v", sig) } if tu.cache != nil { return tu.cache, nil } b := makeBuf(d, tu, tu.name, tu.off, tu.data) r := &typeUnitReader{d: d, tu: tu, b: b} t, err := d.readType(tu.name, r, tu.toff, make(map[Offset]Type), nil) if err != nil { return nil, err } tu.cache = t return t, nil } // typeUnitReader is a typeReader for a tagTypeUnit. type typeUnitReader struct { d *Data tu *typeUnit b buf err error } // Seek to a new position in the type unit. func (tur *typeUnitReader) Seek(off Offset) { tur.err = nil doff := off - tur.tu.off if doff < 0 || doff >= Offset(len(tur.tu.data)) { tur.err = fmt.Errorf("%s: offset %d out of range; max %d", tur.tu.name, doff, len(tur.tu.data)) return } tur.b = makeBuf(tur.d, tur.tu, tur.tu.name, off, tur.tu.data[doff:]) } // AddressSize returns the size in bytes of addresses in the current type unit. func (tur *typeUnitReader) AddressSize() int { return tur.tu.unit.asize } // Next reads the next Entry from the type unit. func (tur *typeUnitReader) Next() (*Entry, error) { if tur.err != nil { return nil, tur.err } if len(tur.tu.data) == 0 { return nil, nil } e := tur.b.entry(tur.tu.atable, tur.tu.base) if tur.b.err != nil { tur.err = tur.b.err return nil, tur.err } return e, nil } // clone returns a new reader for the type unit. func (tur *typeUnitReader) clone() typeReader { return &typeUnitReader{ d: tur.d, tu: tur.tu, b: makeBuf(tur.d, tur.tu, tur.tu.name, tur.tu.off, tur.tu.data), } } // offset returns the current offset. func (tur *typeUnitReader) offset() Offset { return tur.b.off }
{ "content_hash": "fc0566dd79456cfae4d22ccb91b0ed1e", "timestamp": "", "source": "github", "line_count": 156, "max_line_length": 97, "avg_line_length": 22.685897435897434, "alnum_prop": 0.6318168974286522, "repo_name": "christopher-henderson/Go", "id": "76b357ce28b3646ebd9bb5b7c7d8e6c70631f058", "size": "3699", "binary": false, "copies": "25", "ref": "refs/heads/master", "path": "src/debug/dwarf/typeunit.go", "mode": "33188", "license": "mit", "language": [ { "name": "Go", "bytes": "11721" } ], "symlink_target": "" }
// Copyright (c) 2018, Smart Projects Holdings Ltd // All rights reserved. // See LICENSE file for license details. /** * Implementation of ugcs::vsm::Vehicle_request. */ #include <ugcs/vsm/vehicle_request.h> #include <vector> using namespace ugcs::vsm; Vehicle_request::Handle::Handle() { } Vehicle_request::Handle::Handle(Vehicle_request::Ptr vehicle_request): vehicle_request(vehicle_request) { } Vehicle_request::Handle::operator bool() const { return vehicle_request != nullptr && !vehicle_request->request->Is_completed(); } void Vehicle_request::Handle::Assign_result(Result result, const std::string& status_text) { vehicle_request->Set_completion_result(result, status_text); vehicle_request->Complete(); } Vehicle_request::Vehicle_request( Completion_handler completion_handler, Request_completion_context::Ptr completion_ctx): completion_handler(completion_handler) { request = Request::Create(); request->Set_completion_handler(completion_ctx, completion_handler); /* By default, completed with error. Desirable completion result should be * set explicitly by the SDK user via the handle. */ completion_handler.Set_args(Result::NOK); } Vehicle_request::~Vehicle_request() { } void Vehicle_request::Handle::Fail(const char *format, ...) { va_list args; va_start(args, format); Fail_v(format, args); va_end(args); } void Vehicle_request::Handle::Fail_v(const char *format, va_list fmt_args) { if (vehicle_request == nullptr || vehicle_request->request->Is_completed()) { return; } if (format) { va_list fmt_copy; va_copy(fmt_copy, fmt_args); int size = vsnprintf(nullptr, 0, format, fmt_args); auto buf = std::unique_ptr<std::vector<char>>(new std::vector<char>(size + 1)); vsnprintf(&buf->front(), size + 1, format, fmt_copy); va_end(fmt_copy); Assign_result(Result::NOK, &buf->front()); } else { Assign_result(Result::NOK); } } void Vehicle_request::Handle::Fail(const std::string& reason) { if (vehicle_request && !vehicle_request->request->Is_completed()) { Assign_result(Result::NOK, reason); } } void Vehicle_request::Handle::Succeed() { if (vehicle_request && !vehicle_request->request->Is_completed()) { Assign_result(Result::OK); } } void Vehicle_request::Set_completion_result(Result result, const std::string& text) { completion_handler.Set_args(result, text); } void Vehicle_request::Complete() { request->Complete(); } void Vehicle_request::Abort() { request->Abort(); } Vehicle_request::Result Vehicle_request::Get_completion_result() { return completion_handler.template Get_arg<0>(); } bool Vehicle_request::Is_completed() const { return request->Is_completed(); } void Vehicle_request::Add_ref() { atomic_fetch_add(&ref_count, 1); } void Vehicle_request::Release_ref() { int res = atomic_fetch_sub(&ref_count, 1); if (res <= 0) { VSM_EXCEPTION(Internal_error_exception, "Reference counter underflow"); } else if (res == 1 && !Is_completed()) { /* User didn't complete the request explicitly. Might be user * error or intentional action. Make sure request is always * completed. */ ASSERT(Get_completion_result() == Result::NOK); Complete(); } }
{ "content_hash": "89a540bb4b43dd2875f101ba21a07636", "timestamp": "", "source": "github", "line_count": 146, "max_line_length": 87, "avg_line_length": 23.404109589041095, "alnum_prop": 0.6540825285338016, "repo_name": "UgCS/vsm-cpp-sdk", "id": "0ccf726e9053c1bbeb71d5c8afb7998ef2de0fcb", "size": "3417", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/vehicle_request.cpp", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "27998" }, { "name": "C++", "bytes": "1247822" }, { "name": "CMake", "bytes": "34658" }, { "name": "Protocol Buffer", "bytes": "14492" } ], "symlink_target": "" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>T846078255358730240</title> <link rel="stylesheet" href="//fonts.googleapis.com/css?family=Source+Sans+Pro:300,300i,600"> <link rel="stylesheet" href="/style.css"> <link rel="stylesheet" href="/custom.css"> <link rel="shortcut icon" href="https://micro.blog/curt/favicon.png" type="image/x-icon" /> <link rel="alternate" type="application/rss+xml" title="Curt Clifton" href="http://microblog.curtclifton.net/feed.xml" /> <link rel="alternate" type="application/json" title="Curt Clifton" href="http://microblog.curtclifton.net/feed.json" /> <link rel="EditURI" type="application/rsd+xml" href="/rsd.xml" /> <link rel="me" href="https://micro.blog/curt" /> <link rel="me" href="https://twitter.com/curtclifton" /> <link rel="authorization_endpoint" href="https://micro.blog/indieauth/auth" /> <link rel="token_endpoint" href="https://micro.blog/indieauth/token" /> <link rel="micropub" href="https://micro.blog/micropub" /> <link rel="webmention" href="https://micro.blog/webmention" /> <link rel="subscribe" href="https://micro.blog/users/follow" /> </head> <body> <div class="container"> <header class="masthead"> <h1 class="masthead-title--small"> <a href="/">Curt Clifton</a> </h1> </header> <div class="content post h-entry"> <div class="post-date"> <time class="dt-published" datetime="2017-03-26 12:15:50 -0700">26 Mar 2017</time> </div> <div class="e-content"> <p>Ah, spring. <a href="https://t.co/oIEkA4OHCl">https://t.co/oIEkA4OHCl</a></p> </div> </div> </div> </body> </html>
{ "content_hash": "da5bd18435d81cec96bfbe0542f9d868", "timestamp": "", "source": "github", "line_count": 50, "max_line_length": 123, "avg_line_length": 34.26, "alnum_prop": 0.663164039696439, "repo_name": "curtclifton/curtclifton.github.io", "id": "7a9e294bd9cf2d707e89740a5c563b5746d7248d", "size": "1713", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "_site/2017/03/26/t846078255358730240.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "9029" }, { "name": "HTML", "bytes": "3523910" } ], "symlink_target": "" }
/* -*- Mode: C; tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- */ /** * @copyright 2013 Couchbase, Inc. * * @author Filipe Manana <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. **/ #include "mapreduce_tests.h" #include <string.h> #define DOC_BODY "{" \ " \"values\": [10, -7, 20, 1]," \ " \"bin\": \"aGVsbG8gd29ybGQh\"," \ " \"date\":\"+033658-09-27T01:46:40.000Z\"" \ "}" #if __STDC_VERSION__ >=199901L #define ASSIGN(x) x = #else #define ASSIGN(x) #endif static const mapreduce_json_t doc = { ASSIGN(.json) DOC_BODY, ASSIGN(.length) sizeof(DOC_BODY) - 1 }; static const mapreduce_json_t meta = { ASSIGN(.json) "{\"id\":\"doc1\"}", ASSIGN(.length) sizeof("{\"id\":\"doc1\"}") - 1 }; static void test_sum_function(void) { void *context = NULL; char *error_msg = NULL; mapreduce_error_t ret; const char *functions[] = { "function(doc, meta) { emit(meta.id, sum(doc.values)); }" }; mapreduce_map_result_list_t *result = NULL; ret = mapreduce_start_map_context(functions, 1, &context, &error_msg); assert(ret == MAPREDUCE_SUCCESS); assert(error_msg == NULL); assert(context != NULL); ret = mapreduce_map(context, &doc, &meta, &result); assert(ret == MAPREDUCE_SUCCESS); assert(result != NULL); assert(result->length == 1); assert(result->list != NULL); assert(result->list[0].error == MAPREDUCE_SUCCESS); assert(result->list[0].result.kvs.length == 1); assert(result->list[0].result.kvs.kvs[0].key.length == (sizeof("\"doc1\"") - 1)); assert(memcmp(result->list[0].result.kvs.kvs[0].key.json, "\"doc1\"", (sizeof("\"doc1\"") - 1)) == 0); assert(result->list[0].result.kvs.kvs[0].value.length == (sizeof("24") - 1)); assert(memcmp(result->list[0].result.kvs.kvs[0].value.json, "24", (sizeof("24") - 1)) == 0); mapreduce_free_map_result_list(result); mapreduce_free_context(context); } static void test_b64decode_function(void) { void *context = NULL; char *error_msg = NULL; mapreduce_error_t ret; const char *functions[] = { "function(doc, meta) {" " emit(meta.id, String.fromCharCode.apply(this, decodeBase64(doc.bin)));" "}" }; mapreduce_map_result_list_t *result = NULL; ret = mapreduce_start_map_context(functions, 1, &context, &error_msg); assert(ret == MAPREDUCE_SUCCESS); assert(error_msg == NULL); assert(context != NULL); ret = mapreduce_map(context, &doc, &meta, &result); assert(ret == MAPREDUCE_SUCCESS); assert(result != NULL); assert(result->length == 1); assert(result->list != NULL); assert(result->list[0].error == MAPREDUCE_SUCCESS); assert(result->list[0].result.kvs.length == 1); assert(result->list[0].result.kvs.kvs[0].key.length == (sizeof("\"doc1\"") - 1)); assert(memcmp(result->list[0].result.kvs.kvs[0].key.json, "\"doc1\"", (sizeof("\"doc1\"") - 1)) == 0); assert(result->list[0].result.kvs.kvs[0].value.length == (sizeof("\"hello world!\"") - 1)); assert(memcmp(result->list[0].result.kvs.kvs[0].value.json, "\"hello world!\"", (sizeof("\"hello world!\"") - 1)) == 0); mapreduce_free_map_result_list(result); mapreduce_free_context(context); } static void test_date_to_array_function(void) { void *context = NULL; char *error_msg = NULL; mapreduce_error_t ret; const char *functions[] = { "function(doc, meta) { emit(meta.id, dateToArray(doc.date)); }" }; mapreduce_map_result_list_t *result = NULL; ret = mapreduce_start_map_context(functions, 1, &context, &error_msg); assert(ret == MAPREDUCE_SUCCESS); assert(error_msg == NULL); assert(context != NULL); ret = mapreduce_map(context, &doc, &meta, &result); assert(ret == MAPREDUCE_SUCCESS); assert(result != NULL); assert(result->length == 1); assert(result->list != NULL); assert(result->list[0].error == MAPREDUCE_SUCCESS); assert(result->list[0].result.kvs.length == 1); assert(result->list[0].result.kvs.kvs[0].key.length == (sizeof("\"doc1\"") - 1)); assert(memcmp(result->list[0].result.kvs.kvs[0].key.json, "\"doc1\"", (sizeof("\"doc1\"") - 1)) == 0); assert(result->list[0].result.kvs.kvs[0].value.length == (sizeof("[33658,9,27,1,46,40]") - 1)); assert(memcmp(result->list[0].result.kvs.kvs[0].value.json, "[33658,9,27,1,46,40]", (sizeof("[33658,9,27,1,46,40]") - 1)) == 0); mapreduce_free_map_result_list(result); mapreduce_free_context(context); } void builtin_tests(void) { int i; fprintf(stderr, "Running mapreduce builtin tests\n"); for (i = 0; i < 100; ++i) { test_sum_function(); test_b64decode_function(); test_date_to_array_function(); } }
{ "content_hash": "ad96f01dcf1e46e0f219d33553802da5", "timestamp": "", "source": "github", "line_count": 166, "max_line_length": 99, "avg_line_length": 33.22289156626506, "alnum_prop": 0.5956482320942883, "repo_name": "jimwwalker/couchstore", "id": "625f6bfc87e3e81f1bf01cb4bd3cffd4dd496efd", "size": "5515", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "tests/mapreduce/builtin.c", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "664032" }, { "name": "C++", "bytes": "262027" }, { "name": "Lua", "bytes": "12598" }, { "name": "Python", "bytes": "33056" } ], "symlink_target": "" }
package com.ning.http.util; import com.ning.http.client.ProxyServer; import com.ning.http.client.Request; import com.ning.http.client.RequestBuilder; import org.testng.Assert; import org.testng.annotations.Test; public class ProxyUtilsTest { @Test(groups = "fast") public void testBasics() { ProxyServer proxyServer; Request req; // should avoid, there is no proxy (is null) req = new RequestBuilder("GET").setUrl("http://somewhere.com/foo").build(); Assert.assertTrue(ProxyUtils.avoidProxy(null, req)); // should avoid, it's in non-proxy hosts req = new RequestBuilder("GET").setUrl("http://somewhere.com/foo").build(); proxyServer = new ProxyServer("foo", 1234); proxyServer.addNonProxyHost("somewhere.com"); Assert.assertTrue(ProxyUtils.avoidProxy(proxyServer, req)); // should avoid, it's in non-proxy hosts (with "*") req = new RequestBuilder("GET").setUrl("http://sub.somewhere.com/foo").build(); proxyServer = new ProxyServer("foo", 1234); proxyServer.addNonProxyHost("*.somewhere.com"); Assert.assertTrue(ProxyUtils.avoidProxy(proxyServer, req)); // should use it req = new RequestBuilder("GET").setUrl("http://sub.somewhere.com/foo").build(); proxyServer = new ProxyServer("foo", 1234); proxyServer.addNonProxyHost("*.somewhere.org"); Assert.assertFalse(ProxyUtils.avoidProxy(proxyServer, req)); } }
{ "content_hash": "13c101abcb8d4cf72b6848a259e56e97", "timestamp": "", "source": "github", "line_count": 38, "max_line_length": 87, "avg_line_length": 39.1578947368421, "alnum_prop": 0.665994623655914, "repo_name": "ggeorg/chillverse", "id": "1ccd206a802f3f53937c2fb089678430c460882f", "size": "2183", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "async-http-client/src/test/java/com/ning/http/util/ProxyUtilsTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "88039" }, { "name": "Java", "bytes": "4107200" }, { "name": "JavaScript", "bytes": "679964" } ], "symlink_target": "" }
using System; using System.IO; namespace Sir.Mnist { public static class BinaryHelper { public static int ReadInt32WithCorrectEndianness(this BinaryReader br) { var bytes = br.ReadBytes(sizeof(int)); if (BitConverter.IsLittleEndian) Array.Reverse(bytes); return BitConverter.ToInt32(bytes, 0); } } }
{ "content_hash": "b4349dad5f0ebdec44c82a72b3987f36", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 78, "avg_line_length": 24.375, "alnum_prop": 0.6076923076923076, "repo_name": "kreeben/resin", "id": "ba0cc9f140e04096efdba991f9f2194b4f5bd9b1", "size": "392", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Sir.Mnist/BinaryHelper.cs", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "1207" }, { "name": "C#", "bytes": "347145" }, { "name": "CSS", "bytes": "1122" }, { "name": "HTML", "bytes": "12128" } ], "symlink_target": "" }
name "supermarket-cookbooks" license :project_license dependency "berkshelf" source path: "cookbooks/omnibus-supermarket" build do cookbooks_path = "#{install_dir}/embedded/cookbooks" env = with_standard_compiler_flags(with_embedded_path) command "berks vendor #{cookbooks_path}", env: env block do open("#{cookbooks_path}/dna.json", "w") do |file| file.write FFI_Yajl::Encoder.encode(run_list: ['recipe[omnibus-supermarket::default]']) end open("#{cookbooks_path}/show-config.json", "w") do |file| file.write FFI_Yajl::Encoder.encode( run_list: ['recipe[omnibus-supermarket::show_config]'] ) end open("#{cookbooks_path}/solo.rb", "w") do |file| file.write <<-EOH.gsub(/^ {8}/, '') cookbook_path "#{cookbooks_path}" cache_path "/var/opt/supermarket/cache" verbose_logging true ssl_verify_mode :verify_peer EOH end end end
{ "content_hash": "72fc99522476ef2fb9a2868995560f90", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 93, "avg_line_length": 27.5, "alnum_prop": 0.6459893048128342, "repo_name": "tas50/supermarket", "id": "139ff335f5edf4dbc564fb46761d3466f6dee5ee", "size": "1523", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "omnibus/config/software/supermarket-cookbooks.rb", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "60065" }, { "name": "HTML", "bytes": "157545" }, { "name": "JavaScript", "bytes": "13524" }, { "name": "Ruby", "bytes": "981938" }, { "name": "Shell", "bytes": "11046" } ], "symlink_target": "" }
A API de camadas do TensorFlow.js é modelada com base no Keras e nos esforçamos para fazer a [API de camadas](https://js.tensorflow.org/api/latest/) razoalmente semelhante ao Keras, dadas as diferenças entre JavaScript e Python. Isso facilita para usuários com experiência no desenvolvimento de modelos Keras no Python a migrar para Camadas TensorFlow.js em JavaScript. Por exemplo, o seguinte código Kera se traduz em JavaScript: ```python # Python: import keras import numpy as np # Cria e compila modelo. model = keras.Sequential() model.add(keras.layers.Dense(units=1, input_shape=[1])) model.compile(optimizer='sgd', loss='mean_squared_error') # Gera alguns dados sintéticos para o treinamento. xs = np.array([[1], [2], [3], [4]]) ys = np.array([[1], [3], [5], [7]]) # Treina modelo com fit(). model.fit(xs, ys, epochs=1000) # Executa inferência com predict(). print(model.predict(np.array([[5]]))) ``` ```js // JavaScript: import * as tf from '@tensorlowjs/tfjs'; // Cria e compila modelo. const model = tf.sequential(); model.add(tf.layers.dense({units: 1, inputShape: [1]})); model.compile({optimizer: 'sgd', loss: 'meanSquaredError'}); // Gera alguns dados sintéticos para treinamento. const xs = tf.tensor2d([[1], [2], [3], [4]], [4, 1]); const ys = tf.tensor2d([[1], [3], [5], [7]], [4, 1]); // Treina modelo com fit(). await model.fit(xs, ys, {epochs: 1000}); // Executa inferência com predict(). model.predict(tf.tensor2d([[5]], [1, 1])).print(); ``` No entanto, existem algumas diferenças que gostaríamos de chamar e explicar nesse documento. Depois de entender essas diferenças e a lógica por trás delas, sua migração de Python para JavaScript (ou na direção reversa) deve ser uma experiência relativamente tranquila. ## Construtores recebem objetos JavaScript como configuração Compare as seguintes linhas Python e JavaScript do exemplo dado acima: ambos criam uma camada [Dense](https://keras.io/layers/core/#dense). ```python # Python: keras.layers.Dense(units=1, inputShape=[1]) ``` ```js // JavaScript: tf.layers.dense({units: 1, inputShape: [1]}); ``` Funções JavaScript não tem um equivalente aos argumentos keyword das funções Python. Queremos evitar implementar opções de construtores como argumentos posicionais no JavaScript, o que seria especialmente complicado de lembrar e de usar quando são construtores com um grande número de argumentos (Por exemplo, [LSTM](https://keras.io/layers/recurrent/#lstm)). Este é o motivo de usarmos objetos JavaScript para configuração. Esses objetos fornecem o mesmo nível de invariância posicional e flexibilidade como os argumentos keyword do Python. Alguns métodos da classe `Model`, por exemplo [`Model.compile()`](https://keras.io/models/model/#model-class-api), também recebem um objeto JavaScript de configuração como a entrada. No entanto, lembre-se que `Model.fit()`, `Model.evaluate()` e `Model.predict()` são ligeiramente diferentes. Uma vez que esses métodos recebem obrigatoriamente `x` (features) e `y` (labels ou targets) como dados de entrada, `x` e `y` são argumentos posicionais separados de um objeto de configuração que desempenha o papel dos argumentos keyword. Por exemplo: ```js // JavaScript: await model.fit(xs, ys, {epochs: 1000}); ``` ## Model.fit() é assíncrono `Model.fit()` é o método principal com o qual os usuários realizam o treinamento do modelo em TensorFlow.js. Esse método geralmente pode levar muito tempo, com duração de segundos ou minutos. Portanto, nós utilizamos a feature `async` da linguagem JavaScript, para que essa função possa ser usada de uma maneira que não bloqueie a thread principal da interface do usuário quando rodamos ela no navegador. Isso é semelhante à outras funções potencialmente de longa duração no JavaScript, como a `async` [fetch](https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API). Observe que `async` é uma construção que não existe no Python. Enquanto o método [`fit()`](https://keras.io/models/model/#model-class-api) no Keras retorna um objeto de histórico, a contrapartida do método `fit()` no JavaScript retorna uma [Promise](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise) do histórico, que pode ser tratada com [await](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/await) (como no exemplo acima) ou usando o método `.then()`. ## Sem NumPy para TensorFlow.js Os usuários do Python Keras geralmente usam [NumPy](http://www.numpy.org/) para realizar operações numéricas básicas e operações de array, como gerar tensores 2D no exemplo abaixo. ```python # Python: xs = np.array([[1], [2], [3], [4]]) ``` No TensorFlow.js, esses tipos de operações numéricas básicas são feita com o próprio pacote. Por exemplo: ```js // JavaScript: const xs = tf.tensor2d([[1], [2], [3], [4]], [4, 1]); ``` O namespace `tf.*` também fornece várias outras funções para array e operações de álgebra linear, como multiplicação de matrizes. Veja a [Documentação principal TensorFlow.js](https://js.tensorflow.org/api/latest/) para mais informação. ## Use métodos factory, não construtores. Essa linha em Python (do código abaixo) é uma chamada de um construtor: ```python # Python: model = keras.Sequential() ``` Se traduzida estritamente para JavaScript, a chamada do construtor equivalente seria parecido com o seguinte: ```js // JavaScript: const model = new tf.Sequential(); // !!! NÃO FAÇA ISSO !!! ``` No entanto, decidimos não usar os construtores "new" porque 1) a palavra-chave "new" pode tornar o código mais inchado e 2) o construtor "new" é considerado como uma "parte ruim" do JavaScript: uma potencial armadilha, como é argumentado em [*JavaScript: the Good Parts*](http://archive.oreilly.com/pub/a/javascript/excerpts/javascript-good-parts/bad-parts.html). Para criar modelos e camadas em TensorFlow.js, você chama métodos factory, que tem nomes lowerCamelCase, por exemplo: ```js // JavaScript: const model = tf.sequential(); const layer = tf.layers.batchNormalization({axis: 1}); ``` ## Os valores das string opções são lowerCamelCase, não snake_case No JavaScript, é mais comum usar camel case para nomes simbólicos (Por exemplo, [Google JavaScript Style Guide](https://google.github.io/styleguide/jsguide.html#naming-camel-case-defined)), em comparação com o Python, onde snake case é comum (Por exemplo, no Keras). Assim sendo, decidimos usar lowerCamelCase para valores strings para opções incluindo as seguintes: * DataFormat, por exemplo, **`channelsFirst`** em vez de `channels_first` * Inicializador, por exemplo, **`glorotNormal`** em vez de `glorot_normal` * Função de perda e métricas, por exemplo, **`meanSquaredError`** em vez de `mean_squared_error`, **`categoricalCrossentropy`** em vez de `categorical_crossentropy`. Por exemplo, como no exemplo abaixo: ```js // JavaScript: model.compile({optimizer: 'sgd', loss: 'meanSquaredError'}); ``` Com relação à serialização e desserialização do modelo, tenha certeza: o mecanismo interno do TensorFlow.js garante que snake cases nos objetos JSON são manipulados corretamente, por exemplo, ao carregar modelos pré-treinados do Python Keras. ## Execute objetos Layer com apply(), não chamando-os como funções. No Keras, um objeto Layer tem o método `__call__` definido. Portanto, o usuário pode invocar a lógica da camada chamando o objeto como uma função, por exemplo: ```python # Python: my_input = keras.Input(shape=[2, 4]) flatten = keras.layers.Flatten() print(flatten(my_input).shape) ``` Esse "açúcar sintático" do Python é implementado como o método apply() no TensorFlow.js: ```js // JavaScript: const myInput = tf.input({shape: [2, 4]}); const flatten = tf.layers.flatten(); console.log(flatten.apply(myInput).shape); ``` ## Layer.apply() suporta avaliação imperativa em tensores concretos Atualmente, no Keras, o método __call__ só pode operar em objetos `tf.Tensor` do TensorFlow (assumindo o backend do TensorFlow), que são simbólicos e não são reais valores numéricos. Isso é o que é mostrado na seção anterior. No entanto, no TensorFlow.js, o método apply() das camadas pode operar nos modos simbólico e imperativo. Se `apply()` é invocado com um SymbolicTensor (uma analogia próxima de tf.Tensor), o valor de retorno será um SymbolicTensor. Isso acontece tipicamente durante a construção do modelo. Mas se `apply()` é invocado com um tensor concreto, ele retornará um tensor concreto. Por exemplo: ```js // JavaScript: const flatten = tf.layers.flatten(); flatten.apply(tf.ones([2, 3, 4])).print(); ``` Esse recurso é reminiscência da [Execução Eager](https://www.tensorflow.org/guide/eager) do TensorFlow (Python). Oferece maior interatividade e depuração durante o desenvolvimento do modelo, além de abrir portas para compor redes neurais dinâmicas. ## Otimizadores estão sob train.*, não optimizers.* No Keras, os construtores para objetos Optimizer estão sob o namespace `keras.optimizers.*`. No TensorFlow.js, os métodos factory para Optimizers estão sob o namespace `tf.train.*`. Por exemplo: ```python # Python: my_sgd = keras.optimizers.sgd(lr=0.2) ``` ```js // JavaScript: const mySGD = tf.train.sgd({lr: 0.2}); ``` ## loadLayersModel() carrega de uma URL, não um arquivo HDF5 No Keras, modelos são geralmente [salvos](https://keras.io/getting-started/faq/#how-can-i-save-a-keras-model) como um arquivo HDF5 (.h5), que pode ser posteriormente carregado usando o método `keras.models.load_model()`. O método recebe um caminho para o arquivo `.h5`. A contraparte do `load_model()` no TensorFlow.js é [`tf.loadLayersModel()`](https://js.tensorflow.org/api/latest/#loadLayersModel). Como o HDF5 não é um formato de arquivo amigável ao navegador, `tf.loadLayersModel()` recebe um formato de arquivo específico do TensorFlow.js. `tf.loadLayersModel()` recebe um arquivo model.json como argumento de entrada. O model.json pode ser convertido de um arquivo HDF5 do Keras usando o pacote do pip tensorflowjs. ```js // JavaScript: const model = await tf.loadLayersModel('https://foo.bar/model.json'); ``` Observe também que `tf.loadLayersModel()` retorna uma [`Promise`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise) de [`tf.Model`](https://js.tensorflow.org/api/latest/#class:Model). Em geral, salvar e carregar `tf.Model`s no TensorFlow.js é feito usando os métodos `tf.Model.save` e `tf.loadLayersModel`, respectivamente. Nós projetamos essas APIs para serem similares à API [save/load_model](https://keras.io/getting-started/faq/#how-can-i-save-a-keras-model) do Keras. Mas, o ambiente do nevegador é bastante diferente do ambiente backend nos quais estruturas básicas de deep learning, como Keras, são executadas, particularmente na matriz de rotas para persistir e transmitir dados. Portanto, existem algumas diferenças interessantes entre as APIs save/load no TensorFlow.js e no Keras. Veja nosso tutorial [Salvar e carregar modelos](./save_load.md) para mais detalhes. ## Use `fitDataset()` para treinar modelos usando objetos `tf.data.Dataset` No tf.keras do TensorFlow Python, um modelo pode ser treinado usando um objeto [Dataset](https://www.tensorflow.org/guide/datasets). O método `fit()` do modelo aceita esse objeto diretamente. Um modelo TensorFlow.js pode ser treinado com o equivalente JavaScript dos objetos Dataset (Veja a [documentação da API tf.data no TensorFlow.js](https://js.tensorflow.org/api/latest/#Data)). No entanto, diferente do Python, treinamento baseado em Dataset é feio através de um método dedicado, nomeado [fitDataset](https://js.tensorflow.org/api/0.15.1/#tf.Model.fitDataset). O método [fit()](https://js.tensorflow.org/api/latest/#tf.Model.fitDataset) é apenas para treinamento baseado em tensor. ## Gerenciamento de memória de objetos Layer e Model O TensorFlow.js executa no WebGL no navegador, one os pesos dos objetos Layer e Model são suportados por texturas WebGL. No entanto, WebGL não tem suporte à coleta de memória interna (Garbage collector). Objetos Layer e Model internamente gerenciam memória de tensor para o usuário durante suas chamadas de inferência e treinamento. Mas, eles também permitem o usuário descarte-os para liberar memória WebGL que eles ocupam. Isso é útil nos casos em que muitas instâncias de modelo são criadas e liberadas em um único carregamento da página. Para descartar ,um objeto Layer ou Model, use o método `dispose()`.
{ "content_hash": "ebf1f4287d36fcd0bda3015b17e20f6a", "timestamp": "", "source": "github", "line_count": 247, "max_line_length": 692, "avg_line_length": 50.37651821862348, "alnum_prop": 0.7548018966487181, "repo_name": "tensorflow/docs-l10n", "id": "436b06aede146fc4d684de673cfbe19d2ecd13bf", "size": "12725", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "site/pt-br/js/guide/layers_for_keras_users.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Jupyter Notebook", "bytes": "256924604" }, { "name": "Shell", "bytes": "17783" } ], "symlink_target": "" }
<nav class="navbar navbar-default"> <div class="container"> <!-- Brand and toggle get grouped for better mobile display --> <div class="navbar-header"> <button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#bs-example-navbar-collapse-1" aria-expanded="false"> <span class="sr-only">Toggle navigation</span> <span class="icon-bar"></span> <span class="icon-bar"></span> <span class="icon-bar"></span> </button> <a class="navbar-brand" href="<?php echo base_url(); ?>"> <i class="icon icon-home"></i> Home </a> </div> <!-- Collect the nav links, forms, and other content for toggling --> <div class="collapse navbar-collapse" id="bs-example-navbar-collapse-1"> <ul class="nav navbar-nav"> <li><a href="<?php echo base_url(); ?>">Forex Rebates </a></li> <li><a href="<?php echo base_url(); ?>">Brokers</a></li> <li><a href="<?php echo base_url(); ?>">Analytics</a></li> <li><a href="<?php echo base_url(); ?>">Education</a></li> <li><a href="<?php echo base_url(); ?>">FAQ</a></li> <li><a href="<?php echo base_url(); ?>">About us</a></li> <li><a href="<?php echo base_url(); ?>">Contact us</a></li> <!-- <li class="dropdown"> <a href="#" class="dropdown-toggle" data-toggle="dropdown" role="button" aria-haspopup="true" aria-expanded="false">Dropdown <span class="caret"></span></a> <ul class="dropdown-menu"> <li><a href="#">Action</a></li> <li><a href="#">Another action</a></li> <li><a href="#">Something else here</a></li> <li role="separator" class="divider"></li> <li><a href="#">Separated link</a></li> <li role="separator" class="divider"></li> <li><a href="#">One more separated link</a></li> </ul> </li> --> </ul> <!-- <ul class="nav navbar-nav navbar-right"> <li><a href="#">Link</a></li> <li class="dropdown"> <a href="#" class="dropdown-toggle" data-toggle="dropdown" role="button" aria-haspopup="true" aria-expanded="false">Dropdown <span class="caret"></span></a> <ul class="dropdown-menu"> <li><a href="#">Action</a></li> <li><a href="#">Another action</a></li> <li><a href="#">Something else here</a></li> <li role="separator" class="divider"></li> <li><a href="#">Separated link</a></li> </ul> </li> </ul> --> </div><!-- /.navbar-collapse --> </div><!-- /.container-fluid --> </nav> </header> <!-- Login Modal --> <div class="modal fade" id="loginModal" tabindex="-1" role="dialog" aria-labelledby="myLoginLabel"> <div class="modal-dialog" role="document"> <div class="modal-content"> <div class="modal-header"> <button type="button" class="close" data-dismiss="modal" aria-label="Close"> <span aria-hidden="true">&times;</span> </button> <h3 class="modal-title" id="login-title">Sign In</h3> </div> <div class="modal-body"> <div class="row"> <div class="col-md-11"> <div class="gorm-group"> <div class="row login-content"> <div class="col-md-5"> <label for="Email">Email Address <span class="pull-right text-right">:</span> </label> </div> <div class="col-md-7"> <input type="text" name="user_email_address" class="form-control"> </div> <div class="col-md-5"> <label for="Email">Password <span class="pull-right text-right">:</span> </label> </div> <div class="col-md-7"> <input type="password" name="user_password" class="form-control"> </div> <div class="col-md-5"></div> <div class="col-md-7"> <button type="submit" class="btn btn-super" name="login-button">Sign In <span class="glyphicon glyphicon-arrow-right pull-right"></span></button> <div class="forget-password"> <a href="" class="pull-left" data-toggle="modal" data-target="#RegisterModal">Sign Up</a> <a href="" class="pull-right">Forget Password</a> </div> </div> </div> </div> </div> </div> </div> <!-- <div class="modal-footer"> <button type="button" class="btn btn-default" data-dismiss="modal">Close</button> <button type="button" class="btn btn-primary">Save changes</button> </div> --> </div> </div> </div> <!-- /Modal --> <!-- Registration Modal --> <div class="modal fade" id="RegisterModal" tabindex="-1" role="dialog" aria-labelledby="RegisterModalLabel"> <div class="modal-dialog modal-register" role="document"> <div class="modal-content"> <div class="modal-header"> <button type="button" class="close" data-dismiss="modal" aria-label="Close"><span aria-hidden="true">&times;</span></button> <h3 class="modal-title" id="login-title">Sign Up</h3> </div> <div class="modal-body"> <div class="row"> <div class="col-md-11"> <div class="gorm-group"> <div class="row login-content"> <div class="col-md-5"> <label for="Email">First Name <span class="pull-right text-right">:</span> </label> </div> <div class="col-md-7"> <input type="text" name="user_first_name" class="form-control"> </div> <div class="col-md-5"> <label for="Email">Last Name <span class="pull-right text-right">:</span> </label> </div> <div class="col-md-7"> <input type="text" name="user_last_name" class="form-control"> </div> <div class="col-md-5"> <label for="Email">Email Address <span class="pull-right text-right">:</span> </label> </div> <div class="col-md-7"> <input type="text" name="user_email_address" class="form-control"> </div> <div class="col-md-5"> <label for="Email">Password <span class="pull-right text-right">:</span> </label> </div> <div class="col-md-7"> <input type="password" name="user_password" class="form-control"> </div> <div class="col-md-5"> <label for="Email">Gender <span class="pull-right text-right">:</span> </label> </div> <div class="col-md-7"> <input type="radio" name="gender"> Male <input type="radio" name="gender"> Female <br/> </div> <div class="clearfix"></div> <div class="col-md-5"> <label for="Email">Country Name <span class="pull-right text-right">:</span> </label> </div> <div class="col-md-7"> <input type="text" name="user_country" class="form-control"> </div> <div class="col-md-5"> <label for="Email">Phone No <span class="pull-right text-right">:</span> </label> </div> <div class="col-md-7"> <input type="text" name="user_phone_no" class="form-control"> </div> <div class="col-md-5"></div> <div class="col-md-7"> <button type="submit" class="btn btn-super" name="login-button">Sign Up <span class="glyphicon glyphicon-arrow-right pull-right"></span></button> <div class="forget-password"> <a href="" class="pull-left" data-toggle="modal" data-target="#loginModal">Sign In</a> <a href="" class="pull-right">Forget Password</a> </div> </div> </div> </div> </div> </div> </div> <!-- <div class="modal-footer"> <button type="button" class="btn btn-default" data-dismiss="modal">Close</button> <button type="button" class="btn btn-primary">Save changes</button> </div> --> </div> </div> </div> <!-- /Modal --> <div style="width: 100%; height: 155px;"></div>
{ "content_hash": "44966796fd482dbbfd3ca9076ebb3cec", "timestamp": "", "source": "github", "line_count": 206, "max_line_length": 189, "avg_line_length": 58.96601941747573, "alnum_prop": 0.35761916522598175, "repo_name": "suman0359/get4xrebate", "id": "f98dcb5c3c666bd0d0073d8bc40f7c3543fda028", "size": "12147", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "application/views/common/navigation.php", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "479" }, { "name": "CSS", "bytes": "94214" }, { "name": "HTML", "bytes": "5401028" }, { "name": "JavaScript", "bytes": "51943" }, { "name": "PHP", "bytes": "1788822" } ], "symlink_target": "" }
function slice(arr, offset){ return Array.prototype.slice.call(arr, offset || 0); } /** * Delays the call of a function within a given context. */ function timeout(fn, millis, context){ var args = slice(arguments, 3); return setTimeout(function() { fn.apply(context, args); }, millis); } module.exports = timeout;
{ "content_hash": "7c99e46a9bedaebcb07422b67ffb6c22", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 60, "avg_line_length": 19.105263157894736, "alnum_prop": 0.6170798898071626, "repo_name": "thomjoy/sydney-buses", "id": "6f1770e9c873006a25a5ae75e41c08df47c1fa5d", "size": "363", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "node_modules/bower/node_modules/mout/function/timeout.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1030" }, { "name": "JavaScript", "bytes": "13537" }, { "name": "Python", "bytes": "902" } ], "symlink_target": "" }
<?xml version="1.0" encoding="utf-8"?> <!--This file is part of GV Callback. GV Callback is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. GV Callback is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with GV Callback. If not, see <http://www.gnu.org/licenses/>. --> <resources> <string name="about_title">GV Callback</string> <string name="about_message">© 2010 Brandon Chalk\nVersion: %s\n\nThis program is open source and available at http://code.google.com/p/gvcallback . It is protected under the GNU GPL v3. It is also based on Even Charlton's program, GV, which can be found at http://evancharlton.com/projects/gv .</string> <string name="app_name">GV Callback</string> <string name="bad_callback_title">Bad Callback Number</string> <string name="bad_callback_message">Callback number was not saved. Callback number should be a 10 digit phone number.</string> <string name="cancel">Cancel</string> <string name="finish">Finish</string> <string name="login_failed_title">Login failed!</string> <string name="login_failed_message">There was an error logging in to Google Voice.\nPlease check your provided information and try again.</string> <string name="password_description">Your Google Voice password</string> <string name="settings_phone_number">Your callback number (required)</string> <string name="settings_phone_number_hint">Almost always your cell number</string> <string name="setup_login_information">Type in your Google Voice login information</string> <string name="setup_testing_login">Testing login information...</string> <string name="setup_working">Working...</string> <string name="success_title">Success!</string> <string name="success_message">Settings saved.</string> <string name="use_gv_callback">Use GV Callback</string> <string name="username_description">Your Google Voice username</string> <string name="version">0.2</string> </resources>
{ "content_hash": "bff7198bf1e84d1b900faa02d6e92b84", "timestamp": "", "source": "github", "line_count": 38, "max_line_length": 308, "avg_line_length": 63.23684210526316, "alnum_prop": 0.7361631294215564, "repo_name": "kpkammer/gvcallback", "id": "1543342b02fac1bdd572877457a5f014eb145408", "size": "2404", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "res/values/strings.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "48920" } ], "symlink_target": "" }
doc pattern library ==============
{ "content_hash": "b3404b337f2f775dea9396768453d4fc", "timestamp": "", "source": "github", "line_count": 2, "max_line_length": 19, "avg_line_length": 17.5, "alnum_prop": 0.4857142857142857, "repo_name": "m-e-h/doc-styleguide", "id": "08ac00c698d372c623749d783cf7008350f532dc", "size": "35", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "699594" }, { "name": "JavaScript", "bytes": "914124" }, { "name": "PHP", "bytes": "395050" }, { "name": "Ruby", "bytes": "3415" } ], "symlink_target": "" }
using namespace boost; using namespace std; DECLARE_bool(abort_on_config_error); DEFINE_int32(num_cores, 0, "(Advanced) If > 0, it sets the number of cores available to" " Impala. Setting it to 0 means Impala will use all available cores on the machine" " according to /proc/cpuinfo."); namespace impala { bool CpuInfo::initialized_ = false; int64_t CpuInfo::hardware_flags_ = 0; int64_t CpuInfo::original_hardware_flags_; long CpuInfo::cache_sizes_[L3_CACHE + 1]; int64_t CpuInfo::cycles_per_ms_; int CpuInfo::num_cores_ = 1; string CpuInfo::model_name_ = "unknown"; static struct { string name; int64_t flag; } flag_mappings[] = { { "ssse3", CpuInfo::SSSE3 }, { "sse4_1", CpuInfo::SSE4_1 }, { "sse4_2", CpuInfo::SSE4_2 }, { "popcnt", CpuInfo::POPCNT }, }; static const long num_flags = sizeof(flag_mappings) / sizeof(flag_mappings[0]); // Helper function to parse for hardware flags. // values contains a list of space-seperated flags. check to see if the flags we // care about are present. // Returns a bitmap of flags. int64_t ParseCPUFlags(const string& values) { int64_t flags = 0; for (int i = 0; i < num_flags; ++i) { if (contains(values, flag_mappings[i].name)) { flags |= flag_mappings[i].flag; } } return flags; } void CpuInfo::Init() { string line; string name; string value; float max_mhz = 0; int num_cores = 0; memset(&cache_sizes_, 0, sizeof(cache_sizes_)); // Read from /proc/cpuinfo ifstream cpuinfo("/proc/cpuinfo", ios::in); while (cpuinfo) { getline(cpuinfo, line); size_t colon = line.find(':'); if (colon != string::npos) { name = line.substr(0, colon - 1); value = line.substr(colon + 1, string::npos); trim(name); trim(value); if (name.compare("flags") == 0) { hardware_flags_ |= ParseCPUFlags(value); } else if (name.compare("cpu MHz") == 0) { // Every core will report a different speed. We'll take the max, assuming // that when impala is running, the core will not be in a lower power state. // TODO: is there a more robust way to do this, such as // Window's QueryPerformanceFrequency() float mhz = atof(value.c_str()); max_mhz = max(mhz, max_mhz); } else if (name.compare("processor") == 0) { ++num_cores; } else if (name.compare("model name") == 0) { model_name_ = value; } } } if (cpuinfo.is_open()) cpuinfo.close(); // Call sysconf to query for the cache sizes cache_sizes_[0] = sysconf(_SC_LEVEL1_DCACHE_SIZE); cache_sizes_[1] = sysconf(_SC_LEVEL2_CACHE_SIZE); cache_sizes_[2] = sysconf(_SC_LEVEL3_CACHE_SIZE); if (max_mhz != 0) { cycles_per_ms_ = max_mhz * 1000; } else { cycles_per_ms_ = 1000000; } original_hardware_flags_ = hardware_flags_; if (num_cores > 0) { num_cores_ = num_cores; } else { num_cores_ = 1; } if (FLAGS_num_cores > 0) num_cores_ = FLAGS_num_cores; initialized_ = true; } void CpuInfo::VerifyCpuRequirements() { if (!CpuInfo::IsSupported(CpuInfo::SSSE3)) { LOG(ERROR) << "CPU does not support the Supplemental SSE3 (SSSE3) instruction set, " << "which is required. Exiting if Supplemental SSE3 is not functional..."; } } void CpuInfo::EnableFeature(long flag, bool enable) { DCHECK(initialized_); if (!enable) { hardware_flags_ &= ~flag; } else { // Can't turn something on that can't be supported DCHECK((original_hardware_flags_ & flag) != 0); hardware_flags_ |= flag; } } string CpuInfo::DebugString() { DCHECK(initialized_); stringstream stream; int64_t L1 = CacheSize(L1_CACHE); int64_t L2 = CacheSize(L2_CACHE); int64_t L3 = CacheSize(L3_CACHE); stream << "Cpu Info:" << endl << " Model: " << model_name_ << endl << " Cores: " << num_cores_ << endl << " L1 Cache: " << PrettyPrinter::Print(L1, TUnit::BYTES) << endl << " L2 Cache: " << PrettyPrinter::Print(L2, TUnit::BYTES) << endl << " L3 Cache: " << PrettyPrinter::Print(L3, TUnit::BYTES) << endl << " Hardware Supports:" << endl; for (int i = 0; i < num_flags; ++i) { if (IsSupported(flag_mappings[i].flag)) { stream << " " << flag_mappings[i].name << endl; } } return stream.str(); } }
{ "content_hash": "2632176654837860327b70e3ca19b07f", "timestamp": "", "source": "github", "line_count": 146, "max_line_length": 89, "avg_line_length": 29.575342465753426, "alnum_prop": 0.612088930060213, "repo_name": "henryr/Impala", "id": "19fecb10cd4772e9ffa3ec475879a7ae68c26735", "size": "5146", "binary": false, "copies": "5", "ref": "refs/heads/cdh5-trunk", "path": "be/src/util/cpu-info.cc", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Bison", "bytes": "78633" }, { "name": "C", "bytes": "15836" }, { "name": "C++", "bytes": "5841728" }, { "name": "CMake", "bytes": "89740" }, { "name": "CSS", "bytes": "86925" }, { "name": "Groff", "bytes": "1633" }, { "name": "HTML", "bytes": "56" }, { "name": "Java", "bytes": "3270730" }, { "name": "PLpgSQL", "bytes": "393" }, { "name": "Python", "bytes": "1642846" }, { "name": "SQLPL", "bytes": "3253" }, { "name": "Shell", "bytes": "143698" }, { "name": "Thrift", "bytes": "240077" } ], "symlink_target": "" }
using System; using System.Collections.Generic; using System.Windows.Forms; namespace RViewer { static class Program { /// <summary> /// 应用程序的主入口点。 /// </summary> [STAThread] static void Main() { Application.EnableVisualStyles(); Application.SetCompatibleTextRenderingDefault(false); Application.Run(new frmRviewMain()); } } }
{ "content_hash": "0c7ae6a214119718cf93f2c8624e1b8b", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 65, "avg_line_length": 21.6, "alnum_prop": 0.5787037037037037, "repo_name": "guojianbin/IRunner", "id": "0eff5e9674c5b6ddeefa9304eaab1dfab8b653e4", "size": "454", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "RViewer/RViewer/Program.cs", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
/*! \file OneStepIntegratorTypes.hpp \brief enum of the available types for one-step time integrators. */ #ifndef OSITYPES_HPP #define OSITYPES_HPP #include "SiconosFwd.hpp" /** Namespace for user-defined types related to relations */ namespace OSI { /** List of possible OneStepIntegrator types*/ enum TYPES { /** */ EULERMOREAUOSI, /** */ MOREAUJEANOSI, /** */ LSODAROSI, /** */ HEM5OSI, /** */ MOREAUJEANOSI2, /** */ MOREAUDIRECTPROJECTIONOSI, /** */ MOREAUCOMBINEDPROJECTIONOSI, /** */ D1MINUSLINEAROSI, /** */ SCHATZMANPAOLIOSI, /** */ ZOHOSI, /** */ NEWMARKALPHAOSI }; } #endif
{ "content_hash": "0dd9923e9a1bea66117a1575044c3d2c", "timestamp": "", "source": "github", "line_count": 42, "max_line_length": 67, "avg_line_length": 15.142857142857142, "alnum_prop": 0.639937106918239, "repo_name": "siconos/siconos-deb", "id": "e9e1ee66ced9ec8d296eb3e7a74623233307472d", "size": "1326", "binary": false, "copies": "1", "ref": "refs/heads/ubuntu/xenial", "path": "kernel/src/simulationTools/OneStepIntegratorTypes.hpp", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "2725" }, { "name": "C", "bytes": "4317052" }, { "name": "C++", "bytes": "8854932" }, { "name": "CMake", "bytes": "381170" }, { "name": "CSS", "bytes": "29334" }, { "name": "Fortran", "bytes": "2539066" }, { "name": "GAMS", "bytes": "5614" }, { "name": "HTML", "bytes": "4771178" }, { "name": "JavaScript", "bytes": "422105" }, { "name": "Makefile", "bytes": "11474" }, { "name": "PostScript", "bytes": "1435858" }, { "name": "Python", "bytes": "1207294" }, { "name": "Shell", "bytes": "44867" }, { "name": "TeX", "bytes": "82998" } ], "symlink_target": "" }
var Event = require('mongoose').model('Event'); module.exports = { create: function(event, callback) { Event.create(event, callback); } };
{ "content_hash": "80b8d133deb0e1da7eeb6e5925be63e2", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 47, "avg_line_length": 22.142857142857142, "alnum_prop": 0.6258064516129033, "repo_name": "razsilev/TelerikAcademy_Homework", "id": "32a48c1c11e6ac94301649c5e7e61fe963ffc203", "size": "155", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "JavaScript/End to end JS/Exam End to End JS Apps/server/data/events.js", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "1604" }, { "name": "C#", "bytes": "2431431" }, { "name": "CSS", "bytes": "321817" }, { "name": "CoffeeScript", "bytes": "943" }, { "name": "HTML", "bytes": "530569" }, { "name": "JavaScript", "bytes": "1370708" }, { "name": "XSLT", "bytes": "3344" } ], "symlink_target": "" }
using System; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Sockets; using System.Text; namespace Yasb.Common.Messaging.Configuration { public class SubscriptionServiceConfiguration { private int _port = 6379; private string _hostName; public SubscriptionServiceConfiguration WithHostName(string hostName) { _hostName = hostName; return this; } public EndPoint ServerAddress { get { var ipAddress = Dns.GetHostAddresses(_hostName).Where(ip => ip.AddressFamily == AddressFamily.InterNetwork).First(); return new IPEndPoint(ipAddress, _port); } } } }
{ "content_hash": "04e3d5ae4dc6db493aa48512b43ea959", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 132, "avg_line_length": 28.178571428571427, "alnum_prop": 0.596958174904943, "repo_name": "derfy/yasb", "id": "1dc7b73efb939a2062c3a018d64701c6fc61ef3b", "size": "791", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Yasb.Common/Messaging/Configuration/SubscriptionServiceConfiguration.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "203077" }, { "name": "Lua", "bytes": "1515" } ], "symlink_target": "" }
<!--- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> --- layout: page_api --- <h2>List of available tutorials</h2> <ul> {% for p in site.pages %} {% if p.is_tutorial == true %} {% if page.tag == p.tag %} <li><a href="{{ p.url | relative_url}}">{{ p.title }}</a></li> {% endif %} <!-- page-category --> {% endif %} <!-- resource-p --> {% endfor %} <!-- page --> </ul>
{ "content_hash": "e4888a9a072b8723827ffa8be59f9f96", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 66, "avg_line_length": 36.74193548387097, "alnum_prop": 0.675153643546971, "repo_name": "leezu/mxnet", "id": "38ae6def3c23b2c51e7e267095f7f61ae9ba6a73", "size": "1139", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "docs/static_site/src/_layouts/page_landing_tutorials.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "1731" }, { "name": "Batchfile", "bytes": "13130" }, { "name": "C", "bytes": "233623" }, { "name": "C++", "bytes": "9758652" }, { "name": "CMake", "bytes": "164032" }, { "name": "Clojure", "bytes": "622640" }, { "name": "Cuda", "bytes": "1292731" }, { "name": "Dockerfile", "bytes": "101147" }, { "name": "Groovy", "bytes": "168211" }, { "name": "HTML", "bytes": "40268" }, { "name": "Java", "bytes": "205196" }, { "name": "Julia", "bytes": "445413" }, { "name": "Jupyter Notebook", "bytes": "3660357" }, { "name": "MATLAB", "bytes": "34903" }, { "name": "Makefile", "bytes": "149220" }, { "name": "Perl", "bytes": "1558421" }, { "name": "PowerShell", "bytes": "9244" }, { "name": "Python", "bytes": "9866322" }, { "name": "R", "bytes": "357982" }, { "name": "Raku", "bytes": "9012" }, { "name": "SWIG", "bytes": "161870" }, { "name": "Scala", "bytes": "1304635" }, { "name": "Shell", "bytes": "458535" }, { "name": "Smalltalk", "bytes": "3497" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <!-- You may freely edit this file. See commented blocks below for --> <!-- some examples of how to customize the build. --> <!-- (If you delete it and reopen the project it will be recreated.) --> <!-- By default, only the Clean and Build commands use this build script. --> <!-- Commands such as Run, Debug, and Test only use this build script if --> <!-- the Compile on Save feature is turned off for the project. --> <!-- You can turn off the Compile on Save (or Deploy on Save) setting --> <!-- in the project's Project Properties dialog box.--> <project name="calc" default="default" basedir="."> <description>Builds, tests, and runs the project calc.</description> <import file="nbproject/build-impl.xml"/> <!-- There exist several targets which are by default empty and which can be used for execution of your tasks. These targets are usually executed before and after some main targets. They are: -pre-init: called before initialization of project properties -post-init: called after initialization of project properties -pre-compile: called before javac compilation -post-compile: called after javac compilation -pre-compile-single: called before javac compilation of single file -post-compile-single: called after javac compilation of single file -pre-compile-test: called before javac compilation of JUnit tests -post-compile-test: called after javac compilation of JUnit tests -pre-compile-test-single: called before javac compilation of single JUnit test -post-compile-test-single: called after javac compilation of single JUunit test -pre-jar: called before JAR building -post-jar: called after JAR building -post-clean: called after cleaning build products (Targets beginning with '-' are not intended to be called on their own.) Example of inserting an obfuscator after compilation could look like this: <target name="-post-compile"> <obfuscate> <fileset dir="${build.classes.dir}"/> </obfuscate> </target> For list of available properties check the imported nbproject/build-impl.xml file. Another way to customize the build is by overriding existing main targets. The targets of interest are: -init-macrodef-javac: defines macro for javac compilation -init-macrodef-junit: defines macro for junit execution -init-macrodef-debug: defines macro for class debugging -init-macrodef-java: defines macro for class execution -do-jar: JAR building run: execution of project -javadoc-build: Javadoc generation test-report: JUnit report generation An example of overriding the target for project execution could look like this: <target name="run" depends="calc-impl.jar"> <exec dir="bin" executable="launcher.exe"> <arg file="${dist.jar}"/> </exec> </target> Notice that the overridden target depends on the jar target and not only on the compile target as the regular run target does. Again, for a list of available properties which you can use, check the target you are overriding in the nbproject/build-impl.xml file. --> </project>
{ "content_hash": "a73af8a841f9e30b750d61934e22f41a", "timestamp": "", "source": "github", "line_count": 73, "max_line_length": 86, "avg_line_length": 48.273972602739725, "alnum_prop": 0.6523836549375709, "repo_name": "LukeMcNemee/java", "id": "10987f1a716db2ab26c3191bab463fb44087dff4", "size": "3524", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "15/calc/build.xml", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "71066" }, { "name": "Java", "bytes": "182949" } ], "symlink_target": "" }
<?php namespace Blixt\Tokenization; class Token { /** * @var string */ protected $text; /** * @var int */ protected $position; /** * @var string */ protected $prefix; /** * Token constructor. * * @param string $text * @param int $position * @param string $prefix */ public function __construct(string $text, int $position, string $prefix = '') { $this->setText($text); $this->setPosition($position); $this->setPrefix($prefix); } /** * Get the text. * * @return string */ public function getText(): string { return $this->text; } /** * Set the text. * * @param string $text */ public function setText(string $text): void { $this->text = $text; } /** * Get the position. * * @return int */ public function getPosition(): int { return $this->position; } /** * Set the position. * * @param int $position */ public function setPosition(int $position): void { $this->position = $position; } /** * Get the prefix. * * @return string */ public function getPrefix(): string { return $this->prefix; } /** * Set the prefix. * * @param string $prefix */ public function setPrefix(string $prefix): void { $this->prefix = $prefix; } }
{ "content_hash": "382cf643b79195d7003fb73608fcd6d1", "timestamp": "", "source": "github", "line_count": 95, "max_line_length": 81, "avg_line_length": 16, "alnum_prop": 0.48157894736842105, "repo_name": "jrhenderson1988/blixt", "id": "8186c6bae796e3524eb8eac9fa84796979a4c840", "size": "1520", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Tokenization/Token.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "278735" } ], "symlink_target": "" }
package com.abdulradi.redikka.core import akka.actor.{ ActorSystem } import akka.io.{ IO, Tcp } import akka.util.ByteString import akka.contrib.pattern.{ShardRegion, ClusterSharding} import com.abdulradi.redikka.core.api.KeyCommand object Redikka { val ShardName = "RedikkaValueHolder" protected val idExtractor: ShardRegion.IdExtractor = { case cmd: KeyCommand => (cmd.key, cmd) } protected val shardResolver: ShardRegion.ShardResolver = msg => msg match { case cmd: KeyCommand => (math.abs(cmd.key.hashCode) % 100).toString } def init(implicit system: ActorSystem) = { system.log.debug("Redikka starting initialization. ShardName={}", ShardName) ClusterSharding(system).start( typeName = ShardName, entryProps = Some(ValueHolder.props), idExtractor = idExtractor, shardResolver = shardResolver) system.log.debug("Redikka initialization complete") } def apply(implicit system: ActorSystem) = ClusterSharding(system).shardRegion(ShardName) }
{ "content_hash": "2a0af0d269ce96492a4f00643b96b4bf", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 80, "avg_line_length": 30.11764705882353, "alnum_prop": 0.7255859375, "repo_name": "tabdulradi/redikka", "id": "12b441da49fa09a1441ec35658630ac46a8f8149", "size": "1024", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "redikka-core/src/main/scala/Redikka.scala", "mode": "33188", "license": "mit", "language": [ { "name": "Scala", "bytes": "15943" } ], "symlink_target": "" }
#include <algorithm> #include <dae.h> #include <dae/daeURI.h> #include <ctype.h> #include <dae/daeDocument.h> #include <dae/daeErrorHandler.h> #include <dae/daeUtils.h> #include <pcrecpp.h> using namespace std; using namespace cdom; void daeURI::initialize() { reset(); container = NULL; } daeURI::~daeURI() { } daeURI::daeURI(DAE& dae) : dae(&dae) { initialize(); } daeURI::daeURI(DAE& dae, const string& uriStr, daeBool nofrag) : dae(&dae) { initialize(); if (nofrag) { size_t pos = uriStr.find_last_of('#'); if (pos != string::npos) { set(uriStr.substr(0, pos)); return; } } set(uriStr); } daeURI::daeURI(const daeURI& baseURI, const string& uriStr) : dae(baseURI.getDAE()) { initialize(); set(uriStr, &baseURI); } daeURI::daeURI(const daeURI& copyFrom_) : dae(copyFrom_.getDAE()), container(NULL) { initialize(); copyFrom(copyFrom_); } daeURI::daeURI(daeElement& container_, const std::string& uriStr) : dae(container_.getDAE()) { initialize(); container = &container_; set(uriStr); } daeURI::daeURI(DAE& dae, daeElement& container_, const string& uriStr) : dae(&dae) { initialize(); container = &container_; set(uriStr); } void daeURI::copyFrom(const daeURI& copyFrom) { if (!container) container = copyFrom.container; set(copyFrom.originalStr()); } daeURI& daeURI::operator=(const daeURI& other) { copyFrom(other); return *this; } daeURI& daeURI::operator=(const string& uriStr) { set(uriStr); return *this; } void daeURI::reset() { // Clear everything except the container, which doesn't change for the lifetime of the daeURI uriString = ""; originalURIString = ""; _scheme = ""; _authority = ""; _path = ""; _query = ""; _fragment = ""; } DAE* daeURI::getDAE() const { return dae; } const string& daeURI::str() const { return uriString; } const string& daeURI::originalStr() const { return originalURIString; } daeString daeURI::getURI() const { return str().c_str(); } daeString daeURI::getOriginalURI() const { return originalStr().c_str(); } namespace { void parsePath(const string& path, /* out */ string& dir, /* out */ string& baseName, /* out */ string& extension) { // !!!steveT Currently, if we have a file name that begins with a '.', as in // ".emacs", that will be treated as having no base name with an extension // of ".emacs". We might want to change this behavior, so that the base name // is considered ".emacs" and the extension is empty. I think this is more // in line with what path parsers in other libraries/languages do, and it // more accurately reflects the intended structure of the file name. // The following implementation cannot handle paths like this: // /tmp/se.3/file //static pcrecpp::RE re("(.*/)?([^.]*)?(\\..*)?"); //dir = baseName = extension = ""; //re.FullMatch(path, &dir, &baseName, &extension); static pcrecpp::RE findDir("(.*/)?(.*)?"); static pcrecpp::RE findExt("([^.]*)?(\\..*)?"); string tmpFile; dir = baseName = extension = tmpFile = ""; findDir.PartialMatch(path, &dir, &tmpFile); findExt.PartialMatch(tmpFile, &baseName, &extension); } } void daeURI::set(const string& uriStr_, const daeURI* baseURI) { // We make a copy of the uriStr so that set(originalURIString, ...) works properly. string uriStr = uriStr_; reset(); originalURIString = uriStr; if (!parseUriRef(uriStr, _scheme, _authority, _path, _query, _fragment)) { reset(); return; } validate(baseURI); } void daeURI::set(const string& scheme_, const string& authority_, const string& path_, const string& query_, const string& fragment_, const daeURI* baseURI) { set(assembleUri(scheme_, authority_, path_, query_, fragment_), baseURI); } void daeURI::setURI(daeString _URIString, const daeURI* baseURI) { string uriStr = _URIString ? _URIString : ""; set(uriStr, baseURI); } const string& daeURI::scheme() const { return _scheme; } const string& daeURI::authority() const { return _authority; } const string& daeURI::path() const { return _path; } const string& daeURI::query() const { return _query; } const string& daeURI::fragment() const { return _fragment; } const string& daeURI::id() const { return fragment(); } namespace { string addSlashToEnd(const string& s) { return (!s.empty() && s[s.length()-1] != '/') ? s + '/' : s; } } void daeURI::pathComponents(string& dir, string& baseName, string& ext) const { parsePath(_path, dir, baseName, ext); } string daeURI::pathDir() const { string dir, base, ext; parsePath(_path, dir, base, ext); return dir; } string daeURI::pathFileBase() const { string dir, base, ext; parsePath(_path, dir, base, ext); return base; } string daeURI::pathExt() const { string dir, base, ext; parsePath(_path, dir, base, ext); return ext; } string daeURI::pathFile() const { string dir, base, ext; parsePath(_path, dir, base, ext); return base + ext; } void daeURI::path(const string& dir, const string& baseName, const string& ext) { path(addSlashToEnd(dir) + baseName + ext); } void daeURI::pathDir(const string& dir) { string tmp, base, ext; parsePath(_path, tmp, base, ext); path(addSlashToEnd(dir), base, ext); } void daeURI::pathFileBase(const string& baseName) { string dir, tmp, ext; parsePath(_path, dir, tmp, ext); path(dir, baseName, ext); } void daeURI::pathExt(const string& ext) { string dir, base, tmp; parsePath(_path, dir, base, tmp); path(dir, base, ext); } void daeURI::pathFile(const string& file) { string dir, base, ext; parsePath(_path, dir, base, ext); path(dir, file, ""); } daeString daeURI::getScheme() const { return _scheme.c_str(); } daeString daeURI::getProtocol() const { return getScheme(); } daeString daeURI::getAuthority() const { return _authority.c_str(); } daeString daeURI::getPath() const { return _path.c_str(); } daeString daeURI::getQuery() const { return _query.c_str(); } daeString daeURI::getFragment() const { return _fragment.c_str(); } daeString daeURI::getID() const { return getFragment(); } daeBool daeURI::getPath(daeChar *dest, daeInt size) const { if (int(_path.length()) < size) { strcpy(dest, _path.c_str()); return true; } return false; } void daeURI::scheme(const string& scheme_) { set(scheme_, _authority, _path, _query, _fragment); }; void daeURI::authority(const string& authority_) { set(_scheme, authority_, _path, _query, _fragment); } void daeURI::path(const string& path_) { set(_scheme, _authority, path_, _query, _fragment); } void daeURI::query(const string& query_) { set(_scheme, _authority, _path, query_, _fragment); } void daeURI::fragment(const string& fragment_) { set(_scheme, _authority, _path, _query, fragment_); } void daeURI::id(const string& id) { fragment(id); } void daeURI::print() { fprintf(stderr,"URI(%s)\n",uriString.c_str()); fprintf(stderr,"scheme = %s\n",_scheme.c_str()); fprintf(stderr,"authority = %s\n",_authority.c_str()); fprintf(stderr,"path = %s\n",_path.c_str()); fprintf(stderr,"query = %s\n",_query.c_str()); fprintf(stderr,"fragment = %s\n",_fragment.c_str()); fprintf(stderr,"URI without base = %s\n",originalURIString.c_str()); fflush(stderr); } namespace { void normalize(string& path) { daeURI::normalizeURIPath(const_cast<char*>(path.c_str())); path = path.substr(0, strlen(path.c_str())); } } void daeURI::validate(const daeURI* baseURI) { // If no base URI was supplied, use the container's document URI. If there's // no container or the container doesn't have a doc URI, use the application // base URI. if (!baseURI) { if (container) { if (container->getDocument()) { if (container->getDocument()->isZAERootDocument()) baseURI = &container->getDocument()->getExtractedFileURI(); else baseURI = container->getDocumentURI(); } } if (!baseURI) baseURI = &dae->getBaseURI(); if (this == baseURI) return; } // This is rewritten according to the updated rfc 3986 if (!_scheme.empty()) // if defined(R.scheme) then { // Everything stays the same except path which we normalize // T.scheme = R.scheme; // T.authority = R.authority; // T.path = remove_dot_segments(R.path); // T.query = R.query; normalize(_path); } else { if (!_authority.empty()) // if defined(R.authority) then { // Authority and query stay the same, path is normalized // T.authority = R.authority; // T.path = remove_dot_segments(R.path); // T.query = R.query; normalize(_path); } else { if (_path.empty()) // if (R.path == "") then { // T.path = Base.path; _path = baseURI->_path; //if defined(R.query) then // T.query = R.query; //else // T.query = Base.query; //endif; if (_query.empty()) _query = baseURI->_query; } else { if (_path[0] == '/') // if (R.path starts-with "/") then { // T.path = remove_dot_segments(R.path); normalize(_path); } else { // T.path = merge(Base.path, R.path); if (!baseURI->_authority.empty() && baseURI->_path.empty()) // authority defined, path empty _path.insert(0, "/"); else { string dir, baseName, ext; parsePath(baseURI->_path, dir, baseName, ext); _path = dir + _path; } // T.path = remove_dot_segments(T.path); normalize(_path); } // T.query = R.query; } // T.authority = Base.authority; _authority = baseURI->_authority; } // T.scheme = Base.scheme; _scheme = baseURI->_scheme; } // T.fragment = R.fragment; // Reassemble all this into a string version of the URI uriString = assembleUri(_scheme, _authority, _path, _query, _fragment); } daeElementRef daeURI::getElement() const { return internalResolveElement(); } daeElement* daeURI::internalResolveElement() const { if (uriString.empty()) return NULL; return dae->getURIResolvers().resolveElement(*this); } void daeURI::resolveElement() { } void daeURI::setContainer(daeElement* cont) { container = cont; // Since we have a new container element, the base URI may have changed. Re-resolve. set(originalURIString); } daeBool daeURI::isExternalReference() const { if (uriString.empty()) return false; if (container && container->getDocumentURI()) { daeURI* docURI = container->getDocumentURI(); if (_path != docURI->_path || _scheme != docURI->_scheme || _authority != docURI->_authority) { return true; } } return false; } daeDocument* daeURI::getReferencedDocument() const { string doc = assembleUri(_scheme, _authority, _path, "", ""); return dae->getDatabase()->getDocument(doc.c_str(), true); } daeURI::ResolveState daeURI::getState() const { return uriString.empty() ? uri_empty : uri_loaded; } void daeURI::setState(ResolveState newState) { } // This code is loosely based on the RFC 2396 normalization code from // libXML. Specifically it does the RFC steps 6.c->6.g from section 5.2 // The path is modified in place, there is no error return. void daeURI::normalizeURIPath(char* path) { char *cur, // location we are currently processing *out; // Everything from this back we are done with // Return if the path pointer is null if (path == NULL) return; // Skip any initial / characters to get us to the start of the first segment for(cur=path; *cur == '/'; cur++); // Return if we hit the end of the string if (*cur == 0) return; // Keep everything we've seen so far. out = cur; // Analyze each segment in sequence for cases (c) and (d). while (*cur != 0) { // (c) All occurrences of "./", where "." is a complete path segment, are removed from the buffer string. if ((*cur == '.') && (*(cur+1) == '/')) { cur += 2; // If there were multiple slashes, skip them too while (*cur == '/') cur++; continue; } // (d) If the buffer string ends with "." as a complete path segment, that "." is removed. if ((*cur == '.') && (*(cur+1) == 0)) break; // If we passed the above tests copy the segment to the output side while (*cur != '/' && *cur != 0) { *(out++) = *(cur++); } if(*cur != 0) { // Skip any occurrances of // at the end of the segment while ((*cur == '/') && (*(cur+1) == '/')) cur++; // Bring the last character in the segment (/ or a null terminator) into the output *(out++) = *(cur++); } } *out = 0; // Restart at the beginning of the first segment for the next part for(cur=path; *cur == '/'; cur++); if (*cur == 0) return; // Analyze each segment in sequence for cases (e) and (f). // // e) All occurrences of "<segment>/../", where <segment> is a // complete path segment not equal to "..", are removed from the // buffer string. Removal of these path segments is performed // iteratively, removing the leftmost matching pattern on each // iteration, until no matching pattern remains. // // f) If the buffer string ends with "<segment>/..", where <segment> // is a complete path segment not equal to "..", that // "<segment>/.." is removed. // // To satisfy the "iterative" clause in (e), we need to collapse the // string every time we find something that needs to be removed. Thus, // we don't need to keep two pointers into the string: we only need a // "current position" pointer. // while (true) { char *segp, *tmp; // At the beginning of each iteration of this loop, "cur" points to // the first character of the segment we want to examine. // Find the end of the current segment. for(segp = cur;(*segp != '/') && (*segp != 0); ++segp); // If this is the last segment, we're done (we need at least two // segments to meet the criteria for the (e) and (f) cases). if (*segp == 0) break; // If the first segment is "..", or if the next segment _isn't_ "..", // keep this segment and try the next one. ++segp; if (((*cur == '.') && (cur[1] == '.') && (segp == cur+3)) || ((*segp != '.') || (segp[1] != '.') || ((segp[2] != '/') && (segp[2] != 0)))) { cur = segp; continue; } // If we get here, remove this segment and the next one and back up // to the previous segment (if there is one), to implement the // "iteratively" clause. It's pretty much impossible to back up // while maintaining two pointers into the buffer, so just compact // the whole buffer now. // If this is the end of the buffer, we're done. if (segp[2] == 0) { *cur = 0; break; } // Strings overlap during this copy, but not in a bad way, just avoid using strcpy tmp = cur; segp += 3; while ((*(tmp++) = *(segp++)) != 0); // If there are no previous segments, then keep going from here. segp = cur; while ((segp > path) && (*(--segp) == '/')); if (segp == path) continue; // "segp" is pointing to the end of a previous segment; find it's // start. We need to back up to the previous segment and start // over with that to handle things like "foo/bar/../..". If we // don't do this, then on the first pass we'll remove the "bar/..", // but be pointing at the second ".." so we won't realize we can also // remove the "foo/..". for(cur = segp;(cur > path) && (*(cur-1) != '/'); cur--); } *out = 0; // g) If the resulting buffer string still begins with one or more // complete path segments of "..", then the reference is // considered to be in error. Implementations may handle this // error by retaining these components in the resolved path (i.e., // treating them as part of the final URI), by removing them from // the resolved path (i.e., discarding relative levels above the // root), or by avoiding traversal of the reference. // // We discard them from the final path. if (*path == '/') { for(cur=path; (*cur == '/') && (cur[1] == '.') && (cur[2] == '.') && ((cur[3] == '/') || (cur[3] == 0)); cur += 3); if (cur != path) { for(out=path; *cur != 0; *(out++) = *(cur++)); *out = 0; } } return; } // This function will take a resolved URI and create a version of it that is relative to // another existing URI. The new URI is stored in the "originalURI" int daeURI::makeRelativeTo(const daeURI* relativeToURI) { // Can only do this function if both URIs have the same scheme and authority if (_scheme != relativeToURI->_scheme || _authority != relativeToURI->_authority) return DAE_ERR_INVALID_CALL; // advance till we find a segment that doesn't match const char *this_path = getPath(); const char *relativeTo_path = relativeToURI->getPath(); const char *this_slash = this_path; const char *relativeTo_slash = relativeTo_path; while((*this_path == *relativeTo_path) && *this_path) { if(*this_path == '/') { this_slash = this_path; relativeTo_slash = relativeTo_path; } this_path++; relativeTo_path++; } // Decide how many ../ segments are needed (Filepath should always end in a /) int segment_count = 0; relativeTo_slash++; while(*relativeTo_slash != 0) { if(*relativeTo_slash == '/') segment_count ++; relativeTo_slash++; } this_slash++; string newPath; for (int i = 0; i < segment_count; i++) newPath += "../"; newPath += this_slash; set("", "", newPath, _query, _fragment, relativeToURI); return(DAE_OK); } daeBool daeURIResolver::_loadExternalDocuments = true; daeURIResolver::daeURIResolver(DAE& dae) : dae(&dae) { } daeURIResolver::~daeURIResolver() { } void daeURIResolver::setAutoLoadExternalDocuments( daeBool load ) { _loadExternalDocuments = load; } daeBool daeURIResolver::getAutoLoadExternalDocuments() { return _loadExternalDocuments; } daeURIResolverList::daeURIResolverList() { } daeURIResolverList::~daeURIResolverList() { for (size_t i = 0; i < resolvers.getCount(); i++) delete resolvers[i]; } daeTArray<daeURIResolver*>& daeURIResolverList::list() { return resolvers; } daeElement* daeURIResolverList::resolveElement(const daeURI& uri) { for (size_t i = 0; i < resolvers.getCount(); i++) if (daeElement* elt = resolvers[i]->resolveElement(uri)) return elt; return NULL; } // Returns true if parsing succeeded, false otherwise. Parsing can fail if the uri // reference isn't properly formed. bool cdom::parseUriRef(const string& uriRef, string& scheme, string& authority, string& path, string& query, string& fragment) { // This regular expression for parsing URI references comes from the URI spec: // http://tools.ietf.org/html/rfc3986#appendix-B static pcrecpp::RE re("^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\\?([^#]*))?(#(.*))?"); string s1, s3, s6, s8; if (re.FullMatch(uriRef, &s1, &scheme, &s3, &authority, &path, &s6, &query, &s8, &fragment)) return true; return false; } namespace { string safeSubstr(const string& s, size_t offset, size_t length) { string result = s.substr(offset, min(length, s.length() - offset)); result.resize(length, '\0'); return result; } } string cdom::assembleUri(const string& scheme, const string& authority, const string& path, const string& query, const string& fragment, bool forceLibxmlCompatible) { string p = safeSubstr(path, 0, 3); bool libxmlHack = forceLibxmlCompatible && scheme == "file"; bool uncPath = false; string uri; if (!scheme.empty()) uri += scheme + ":"; if (!authority.empty() || libxmlHack || (p[0] == '/' && p[1] == '/')) uri += "//"; if (!authority.empty()) { if (libxmlHack) { // We have a UNC path URI of the form file://otherMachine/file.dae. // Convert it to file://///otherMachine/file.dae, which is how libxml // does UNC paths. uri += "///" + authority; uncPath = true; } else { uri += authority; } } if (!uncPath && libxmlHack && getSystemType() == Windows) { // We have to be delicate in how we pass absolute path URIs to libxml on Windows. // If the path is an absolute path with no drive letter, add an extra slash to // appease libxml. if (p[0] == '/' && p[1] != '/' && p[2] != ':') { uri += "/"; } } uri += path; if (!query.empty()) uri += "?" + query; if (!fragment.empty()) uri += "#" + fragment; return uri; } string cdom::fixUriForLibxml(const string& uriRef) { string scheme, authority, path, query, fragment; cdom::parseUriRef(uriRef, scheme, authority, path, query, fragment); return assembleUri(scheme, authority, path, query, fragment, true); } string cdom::nativePathToUri(const string& nativePath, systemType type) { string uri = nativePath; if (type == Windows) { // Convert "c:\" to "/c:/" if (uri.length() >= 2 && isalpha(uri[0]) && uri[1] == ':') uri.insert(0, "/"); // Convert backslashes to forward slashes uri = replace(uri, "\\", "/"); } // Convert spaces to %20 uri = replace(uri, " ", "%20"); return uri; } string cdom::filePathToUri(const string& filePath) { return nativePathToUri(filePath); } string cdom::uriToNativePath(const string& uriRef, systemType type) { string scheme, authority, path, query, fragment; parseUriRef(uriRef, scheme, authority, path, query, fragment); // Make sure we have a file scheme URI, or that it doesn't have a scheme if (!scheme.empty() && scheme != "file") return ""; string filePath; if (type == Windows) { if (!authority.empty()) filePath += string("\\\\") + authority; // UNC path // Replace two leading slashes with one leading slash, so that // ///otherComputer/file.dae becomes //otherComputer/file.dae and // //folder/file.dae becomes /folder/file.dae if (path.length() >= 2 && path[0] == '/' && path[1] == '/') path.erase(0, 1); // Convert "/C:/" to "C:/" if (path.length() >= 3 && path[0] == '/' && path[2] == ':') path.erase(0, 1); // Convert forward slashes to back slashes path = replace(path, "/", "\\"); } filePath += path; // Replace %20 with space filePath = replace(filePath, "%20", " "); return filePath; } string cdom::uriToFilePath(const string& uriRef) { return uriToNativePath(uriRef); }
{ "content_hash": "819070809f8b90cee5812ba345b8e289", "timestamp": "", "source": "github", "line_count": 830, "max_line_length": 117, "avg_line_length": 27.1144578313253, "alnum_prop": 0.6211952899355698, "repo_name": "veter-team/daeview", "id": "c9b6936764ec952f0a8585b1f702ddd18621df5c", "size": "22713", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/collada-dom/dae/daeURI.cpp", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "2402158" }, { "name": "C++", "bytes": "8792611" }, { "name": "Objective-C", "bytes": "100424" }, { "name": "Python", "bytes": "949" } ], "symlink_target": "" }
var interval; onmessage = function(e) { clearTimeout(interval); startTimer(e.data * 60); } function startTimer(duration) { var start = Date.now(), diff, minutes, seconds; function timer() { diff = duration - (((Date.now() - start) / 1000) | 0); minutes = (diff / 60) | 0; seconds = (diff % 60) | 0; minutes = minutes < 10 ? '0' + minutes : minutes; seconds = seconds < 10 ? '0' + seconds : seconds; postMessage({ finished: false, time: minutes + ':' + seconds }); if (diff <= 0) { clearTimeout(interval); postMessage({ finished: true, time: minutes + ':' + seconds }); } }; timer(); interval = setInterval(timer, 1000); }
{ "content_hash": "d10ecd104b4302a7025d35030f24818d", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 69, "avg_line_length": 22.933333333333334, "alnum_prop": 0.5813953488372093, "repo_name": "davidmogar/mela", "id": "8db3682a6719e3974f0ef214b58b5b0b7e912aa5", "size": "688", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "js/timer-worker.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "3997" }, { "name": "HTML", "bytes": "1936" }, { "name": "JavaScript", "bytes": "7312" } ], "symlink_target": "" }
package org.apache.rya.joinselect.mr.utils; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.WritableComparable; public class CardinalityType implements WritableComparable<CardinalityType> { private LongWritable card; private Text cardType; private LongWritable ts; public CardinalityType() { card = new LongWritable(); cardType = new Text(); ts = new LongWritable(); } public CardinalityType(int card, String cardType, long ts) { this.card = new LongWritable(card); this.cardType = new Text(cardType); this.ts = new LongWritable(ts); } public CardinalityType(LongWritable card, Text cardType, LongWritable ts) { this.card = card; this.ts = ts; this.cardType = cardType; } public void set(CardinalityType ct) { this.card.set(ct.card.get()); this.ts.set(ct.ts.get()); this.cardType.set(ct.cardType); } public void setCard(LongWritable card) { this.card = card; } public void setCardType(Text cardType) { this.cardType = cardType; } public void setTS(LongWritable ts) { this.ts = ts; } public LongWritable getCard() { return this.card; } public Text getCardType() { return this.cardType; } public LongWritable getTS() { return this.ts; } @Override public void write(DataOutput out) throws IOException { card.write(out); cardType.write(out); ts.write(out); } @Override public void readFields(DataInput in) throws IOException { card.readFields(in); cardType.readFields(in); ts.readFields(in); } @Override public int hashCode() { int result = 7; result = result * 17 + card.hashCode(); result = result * 17 + cardType.hashCode(); result = result * 17 + ts.hashCode(); return result; } @Override public boolean equals(Object o) { if (o instanceof CardinalityType) { CardinalityType trip = (CardinalityType) o; return card.equals(trip.card) && cardType.equals(trip.cardType) && ts.equals(trip.ts); } return false; } @Override public String toString() { return card + " " + cardType + " " + ts; } @Override public int compareTo(CardinalityType o) { int cmp = cardType.compareTo(o.cardType); if (cmp != 0) { return cmp; } cmp = ts.compareTo(o.ts); if (cmp != 0) { return cmp; } return card.compareTo(o.card); } }
{ "content_hash": "c35d7406bdd2836c6527b2f5ae4ac1b6", "timestamp": "", "source": "github", "line_count": 132, "max_line_length": 92, "avg_line_length": 19.303030303030305, "alnum_prop": 0.6534536891679749, "repo_name": "amihalik/incubator-rya", "id": "e327f3832213beafb030fdd884c3a3e2ecb2d692", "size": "3357", "binary": false, "copies": "8", "ref": "refs/heads/master", "path": "extras/rya.prospector/src/main/java/org/apache/rya/joinselect/mr/utils/CardinalityType.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "7242" }, { "name": "FreeMarker", "bytes": "755" }, { "name": "Java", "bytes": "8666462" }, { "name": "JavaScript", "bytes": "1059" }, { "name": "Ruby", "bytes": "17096" }, { "name": "Shell", "bytes": "8335" }, { "name": "XSLT", "bytes": "7341" } ], "symlink_target": "" }
function CowTransportShip(){ TransportShip.call(this); this._cowzillaProb = g_config.enemies.cowTransport.cowzillaProbability; this._megaProb = g_config.enemies.cowTransport.megacowProbability; this._santaProb = g_config.enemies.cowTransport.santacowProbability; } CowTransportShip.inherits(TransportShip); CowTransportShip.prototype.unloadSoldier = function() { var gsx = g_game.getGridSizeX(); var gsy = g_game.getGridSizeY(); this._num_soldiers--; if(g_game.wantCowzilla()){ g_game.spawnCowzilla (this.getX() + this._unload_offset_x - 32 + Math.rand(-gsx,0),this.getY() + this._unload_offset_y + Math.rand(-gsy,gsy)); }else{ var r = Math.random(); if(r < this._cowzillaProb){ g_game.spawnCowzilla (this.getX() + this._unload_offset_x - 32 + Math.rand(-gsx,0),this.getY() + this._unload_offset_y + Math.rand(-gsy,gsy)); }else if(r < this._megaProb){ g_game.spawnMegaCow(this.getX() + this._unload_offset_x + Math.rand(-gsx,0),this.getY() + this._unload_offset_y + Math.rand(-gsy,gsy)); }else if(r < this._santaProb){ g_game.spawnSantaCow(this.getX() + this._unload_offset_x + Math.rand(-gsx,0),this.getY() + this._unload_offset_y + Math.rand(-gsy,gsy)); }else{ g_game.spawnCow(this.getX() + this._unload_offset_x + Math.rand(-gsx,0),this.getY() + this._unload_offset_y + Math.rand(-gsy,gsy)); } } this._unload_timer.stop(); if(this._num_soldiers > 0) { this._unload_timer.reset().start(); } else { this.endLanding(); } };
{ "content_hash": "e6b244716df7db49fb9c426ad6aa822e", "timestamp": "", "source": "github", "line_count": 39, "max_line_length": 145, "avg_line_length": 37.87179487179487, "alnum_prop": 0.6844955991875423, "repo_name": "superarts/JekyllMetro", "id": "f111aaf5243bb65ae4094eeeef66f0b862834e3a", "size": "1672", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "games/acow/js/game/CowTransportShip.js", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "765" }, { "name": "C", "bytes": "75732" }, { "name": "C++", "bytes": "1200276" }, { "name": "CSS", "bytes": "268411" }, { "name": "HTML", "bytes": "161907" }, { "name": "JavaScript", "bytes": "5252527" }, { "name": "PHP", "bytes": "16934" }, { "name": "Protocol Buffer", "bytes": "19802" }, { "name": "Python", "bytes": "65156" }, { "name": "QMake", "bytes": "3137" }, { "name": "Ruby", "bytes": "56567" }, { "name": "Shell", "bytes": "1910" }, { "name": "TypeScript", "bytes": "45573" } ], "symlink_target": "" }
using namespace ::testing; // ƒeƒXƒg—pActor namespace TestActorMessage { struct GetThreadID { }; struct ExecFunc { ExecFunc( std::function<void(void)> _func ) : func( _func ){} std::function<void( void )> func; }; typedef boost::variant<GetThreadID, ExecFunc> Message; }; class TestActor : public ActorBase<TestActorMessage::Message> { public: boost::signals2::signal<void( boost::thread::id )> changeThreadID; TestActor( void ) : ActorBase(){} void connectChangeThreadID( std::function<void( boost::thread::id )> func ) { changeThreadID.connect( func ); } private: void processMessage( std::shared_ptr<TestActorMessage::Message> msg ) { MessageVisitor mv( this ); boost::apply_visitor( mv, *msg ); } class MessageVisitor : public boost::static_visitor < void > { public: MessageVisitor( TestActor* const obj ) : base( obj ){} void operator()( const TestActorMessage::GetThreadID& msg ) const { base->changeThreadID( boost::this_thread::get_id() ); } void operator()( const TestActorMessage::ExecFunc& msg ) const { msg.func(); } private: TestActor* const base; }; }; class ActorTest : public ::testing::Test { }; TEST_F( ActorTest, noSpawnTest ) { auto p = std::make_shared<TestActor>(); boost::thread::id threadID; p->connectChangeThreadID( [p,&threadID]( const boost::thread::id& i ){ p->entry( TestActorMessage::ExecFunc( [i,&threadID]( void ){ threadID = i; } ) ); } ); p->entry( TestActorMessage::GetThreadID() ); bool ret; ret = p->receive(); // GetThreaID‚ÌŽÀŽ{ ASSERT_TRUE( ret ); ret = p->receive(); // SetThreadID‚ÌŽÀŽ{ ASSERT_TRUE( ret ); ASSERT_THAT( threadID, Eq( boost::this_thread::get_id() ) ); } namespace AnotherActorMessage { class AnotherActor; struct GetThreadID { }; typedef boost::variant<GetThreadID> Message; class MessageVisitor; }; class AnotherActor : public Actor<AnotherActorMessage::Message> { public: boost::signals2::signal<void( boost::thread::id )> changeThreadID; AnotherActor( void ) : Actor() {} ~AnotherActor( void ) { } void connectChangeThreadID( std::function<void( boost::thread::id )> func ) { changeThreadID.connect( func ); } boost::thread::id getThreadID( void ) const { return th.get_id(); } private: void processMessage( std::shared_ptr<AnotherActorMessage::Message> msg ) { MessageVisitor mv( this ); boost::apply_visitor( mv, *msg ); } class MessageVisitor : public boost::static_visitor < void > { public: MessageVisitor( ::AnotherActor* const obj ) : base( obj ){} void operator()( const AnotherActorMessage::GetThreadID& msg ) const { base->changeThreadID( boost::this_thread::get_id() ); } private: ::AnotherActor* const base; }; }; TEST_F( ActorTest, spawnTest ) { auto aActor = std::make_shared<AnotherActor>(); ASSERT_THAT( aActor->getThreadID(), Ne( boost::this_thread::get_id() ) ); auto tActor = std::make_shared<TestActor>(); boost::thread::id threadID; aActor->connectChangeThreadID( [tActor,&threadID]( const boost::thread::id& i ) { tActor->entry( TestActorMessage::ExecFunc( [i, &threadID]( void ) { threadID = i; } ) ); } ); aActor->entry( AnotherActorMessage::GetThreadID() ); while ( !tActor->receive() ){} ASSERT_THAT( threadID, aActor->getThreadID() ); }
{ "content_hash": "6694c126690f478748aae1ca4d28bc96", "timestamp": "", "source": "github", "line_count": 158, "max_line_length": 82, "avg_line_length": 20.848101265822784, "alnum_prop": 0.6733454766241651, "repo_name": "toshimana/Actor", "id": "383979c0a0a29696bea408ddadfe69b838ae714d", "size": "3466", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/ActorTest.cpp", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "6111" }, { "name": "CMake", "bytes": "911" } ], "symlink_target": "" }
package com.warkiz.widget; import android.content.Context; import android.util.TypedValue; /** * created by zhuangguangquan on 2017/9/9 */ public class SizeUtils { public static int dp2px(Context context, float dpValue) { return (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, dpValue, context.getResources().getDisplayMetrics()); } public static int sp2px(Context context, float spValue) { return (int) (spValue * context.getResources().getDisplayMetrics().scaledDensity + 0.5f); } public static int px2sp(Context context, float pxValue) { return (int) (pxValue / context.getResources().getDisplayMetrics().scaledDensity + 0.5f); } }
{ "content_hash": "a00f0ab456e74dd55721dd06f37ef575", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 129, "avg_line_length": 30.652173913043477, "alnum_prop": 0.7120567375886525, "repo_name": "warkiz/IndicatorSeekBar", "id": "6925889c1c338359e1527dcf00c4c34dcf3ac562", "size": "705", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "indicatorseekbar/src/main/java/com/warkiz/widget/SizeUtils.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "168330" } ], "symlink_target": "" }
* Continuously replicates data using DCP data change protocol between clusters <p> <img src="../../media/Xdcr.png" style="display: block; margin-left: auto; margin-right: auto"/> </p> * Version 1 uses the REST protocol for replication, important for ElasticSearch plug-in * Version 2 uses memcached REST protocol, higher performance mode * May require more cores and memory, firewall and Tls considerations are important
{ "content_hash": "5651343ac69dff2d2523728dd05e0224", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 87, "avg_line_length": 42.8, "alnum_prop": 0.764018691588785, "repo_name": "vkhazin/couchbase-courseware", "id": "e0cd24f862df7defb3a334efbf7eb965eb061226", "size": "470", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "content/chapter-04/Xdcr.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "43925" }, { "name": "HTML", "bytes": "1869206" }, { "name": "JavaScript", "bytes": "18901" } ], "symlink_target": "" }
function status = IDAAdjReInit() %IDAAdjReInit re-initializes memory for ASA with CVODES. % % Usage: IDAAdjReInit % % Radu Serban <[email protected]> % LLNS Copyright Start % Copyright (c) 2014, Lawrence Livermore National Security % This work was performed under the auspices of the U.S. Department % of Energy by Lawrence Livermore National Laboratory in part under % Contract W-7405-Eng-48 and in part under Contract DE-AC52-07NA27344. % Produced at the Lawrence Livermore National Laboratory. % All rights reserved. % For details, see the LICENSE file. % LLNS Copyright End % $Revision: 4075 $Date: 2007/08/21 17:38:42 $ mode = 14; status = idm(mode);
{ "content_hash": "4b45940b5d12322184e8dd03dbf59909", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 70, "avg_line_length": 31.38095238095238, "alnum_prop": 0.7541729893778453, "repo_name": "dflowers7/kroneckerbio", "id": "6ac09f9c545b0bae29096f38a977e513c940bfaa", "size": "659", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "External/sundialsTB/idas/IDAAdjReInit.m", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "7895963" }, { "name": "C++", "bytes": "85779" }, { "name": "CMake", "bytes": "311645" }, { "name": "Fortran", "bytes": "225937" }, { "name": "HTML", "bytes": "251012" }, { "name": "M", "bytes": "8171" }, { "name": "Matlab", "bytes": "2911733" }, { "name": "Mercury", "bytes": "68" }, { "name": "Objective-C", "bytes": "340" }, { "name": "Python", "bytes": "27293" }, { "name": "mupad", "bytes": "3767" } ], "symlink_target": "" }
""" sentry.runner.commands.init ~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2015 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import, print_function import os import click @click.command() @click.argument('directory', required=False) @click.pass_context def init(ctx, directory): "Initialize new configuration directory." from sentry.runner.settings import discover_configs, generate_settings if directory is not None: os.environ['SENTRY_CONF'] = directory directory, py, yaml = discover_configs() # In this case, the config is pointing directly to a file, so we # must maintain old behavior, and just abort if yaml is None and os.path.isfile(py): # TODO: Link to docs explaining about new behavior of SENTRY_CONF? raise click.ClickException("Found legacy '%s' file, so aborting." % click.format_filename(py)) if yaml is None: raise click.ClickException("DIRECTORY must not be a file.") if directory and not os.path.exists(directory): os.makedirs(directory) py_contents, yaml_contents = generate_settings() if os.path.isfile(yaml): click.confirm("File already exists at '%s', overwrite?" % click.format_filename(yaml), abort=True) with click.open_file(yaml, 'w') as fp: fp.write(yaml_contents) if os.path.isfile(py): click.confirm("File already exists at '%s', overwrite?" % click.format_filename(py), abort=True) with click.open_file(py, 'w') as fp: fp.write(py_contents)
{ "content_hash": "1bde408776522c9b6863025b980f4cf5", "timestamp": "", "source": "github", "line_count": 49, "max_line_length": 106, "avg_line_length": 32.6530612244898, "alnum_prop": 0.67625, "repo_name": "nicholasserra/sentry", "id": "98ece4d40b797013ab8d6958e0514cdb9e0437b0", "size": "1600", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/sentry/runner/commands/init.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "174940" }, { "name": "HTML", "bytes": "199996" }, { "name": "JavaScript", "bytes": "609445" }, { "name": "Lua", "bytes": "21966" }, { "name": "Makefile", "bytes": "4816" }, { "name": "Python", "bytes": "8613631" } ], "symlink_target": "" }
module.exports = function(grunt){ require('allex-webalizer').grunt_web_component(grunt); };
{ "content_hash": "f76977d0498014e8aa06b2dc3909f912", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 56, "avg_line_length": 31.333333333333332, "alnum_prop": 0.7340425531914894, "repo_name": "allexjs/sdk", "id": "70cc27ff544ab1ba188d335894799222845aebac", "size": "94", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "templates/webalizer/component/generic/Gruntfile.js", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "7370" }, { "name": "JavaScript", "bytes": "123909" }, { "name": "Ruby", "bytes": "904" }, { "name": "SCSS", "bytes": "33543" }, { "name": "Shell", "bytes": "15209" } ], "symlink_target": "" }
mocha --check-leaks --reporter spec $(find test -name "*.js")
{ "content_hash": "e85ee3d2eec70b09ab4518688132f52b", "timestamp": "", "source": "github", "line_count": 1, "max_line_length": 61, "avg_line_length": 62, "alnum_prop": 0.6612903225806451, "repo_name": "joegoldbeck/move-modulate", "id": "b96bfc4a2fb8fc2870be4f8fc05da6c754b87edb", "size": "99", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test.sh", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "172891" }, { "name": "JavaScript", "bytes": "1240849" }, { "name": "Shell", "bytes": "99" } ], "symlink_target": "" }
package org.nutz.lang.util; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Constructor; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * 通过读取Class文件,获得方法形参名称列表 * @author wendal([email protected]) * */ public class MethodParamNamesScaner { /** * 获取Method的形参名称列表 * @param method 需要解析的方法 * @return 形参名称列表,如果没有调试信息,将返回null */ public static List<String> getParamNames(Method method) { try { int size = method.getParameterTypes().length; if (size == 0) return new ArrayList<String>(0); List<String> list = ClassMetaReader.getParamNames(method.getDeclaringClass()).get(ClassMetaReader.getKey(method)); if (list == null) return null; if (list.size() == size) return list; if (list.size() > size) return list.subList(0, size); return null; } catch (Throwable e) { throw new RuntimeException(e); } } /** * 获取Constructor的形参名称列表 * @param constructor 需要解析的构造函数 * @return 形参名称列表,如果没有调试信息,将返回null */ public static List<String> getParamNames(Constructor<?> constructor) { try { int size = constructor.getParameterTypes().length; if (size == 0) return new ArrayList<String>(0); List<String> list = ClassMetaReader.getParamNames(constructor.getDeclaringClass()).get(ClassMetaReader.getKey(constructor)); if (list != null && list.size() != size) return list.subList(0, size); return list; } catch (Throwable e) { throw new RuntimeException(e); } } public static Map<String, List<String>> getParamNames(Class<?> klass) throws IOException { String key = klass.getName(); if (caches.containsKey(key)) return caches.get(key); InputStream in = klass.getResourceAsStream("/" + klass.getName().replace('.', '/') + ".class"); Map<String, List<String>> names = getParamNames(in); caches.put(key, names); return names; } public static Map<String, List<String>> getParamNames(InputStream ins) throws IOException { if (ins == null) return new HashMap<String, List<String>>(); return ClassMetaReader.build(ins).paramNames; } protected static Map<String, Map<String, List<String>>> caches = new HashMap<String, Map<String,List<String>>>(); }
{ "content_hash": "f46352b07ac2c95c16ce1c256ea2878b", "timestamp": "", "source": "github", "line_count": 78, "max_line_length": 137, "avg_line_length": 34.1025641025641, "alnum_prop": 0.6, "repo_name": "nutzam/nutz", "id": "56868f8cafb6c9bcc467da908ba1a8d4a9d0ff8d", "size": "2828", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "src/org/nutz/lang/util/MethodParamNamesScaner.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "202" }, { "name": "Batchfile", "bytes": "301" }, { "name": "Java", "bytes": "4637208" }, { "name": "JavaScript", "bytes": "7299" }, { "name": "Procfile", "bytes": "43" }, { "name": "Python", "bytes": "782" }, { "name": "Ruby", "bytes": "16672" }, { "name": "Shell", "bytes": "263" } ], "symlink_target": "" }
<title> Static HTML Boilerplate {{title}} </title> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="description" content=""> <meta property="og:title" content=""> <meta property="og:image" content="/build/images/og.jpg"> <meta property="og:description" content=""> <meta name="format-detection" content="telephone=no"> {{>favicon}} <link rel="stylesheet" href="/css/style-rtl.css" type="text/css" /> {{>ga}}
{ "content_hash": "93816424e03292f8b9dffb9982668690", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 68, "avg_line_length": 37.214285714285715, "alnum_prop": 0.6986564299424184, "repo_name": "PrototypeInteractive/static-html-boilerplate", "id": "9f15ce7c405aa9b3f2a95f1013ac751d5ce38dbd", "size": "521", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "partials/ar/head.html", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "3284" }, { "name": "JavaScript", "bytes": "6757" }, { "name": "SCSS", "bytes": "15329" } ], "symlink_target": "" }
package com.DCSP.screen; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.Input; import com.badlogic.gdx.InputAdapter; import com.badlogic.gdx.InputMultiplexer; import com.badlogic.gdx.graphics.GL20; import com.badlogic.gdx.scenes.scene2d.Stage; import com.badlogic.gdx.scenes.scene2d.ui.List; import com.badlogic.gdx.scenes.scene2d.ui.ScrollPane; import com.badlogic.gdx.scenes.scene2d.ui.Skin; import com.badlogic.gdx.scenes.scene2d.ui.Table; import com.badlogic.gdx.utils.Array; import com.badlogic.gdx.utils.OrderedMap; /** * * @author Alex Dodd (wad79) */ public class HighScoresScreen extends ScreenInterface { private List scoreList; private Table scoreTable; private ScrollPane scoreScroll; private Stage scoreStage; private Skin skin; private final OrderedMap scoreFromDB; public HighScoresScreen(OrderedMap scoreFromDB) { this.scoreFromDB = scoreFromDB; } @Override public void show() { scoreStage = new Stage(); InputMultiplexer scoreInput = new InputMultiplexer(); scoreInput.addProcessor(scoreStage); scoreInput.addProcessor(new InputAdapter(){ @Override public boolean keyUp(int keycode) { switch(keycode){ case Input.Keys.ESCAPE: case Input.Keys.BACK: gameParent.setScreen(new GameMenuScreen()); break; default: return false; } return true; } }); Gdx.input.setInputProcessor(scoreInput); skin = new Skin(Gdx.files.internal("uiskin.json")); scoreTable = new Table(skin); scoreTable.pad(5); // scoreList = new List(skin,"user"); // Array words = new Array(new String[]{"a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p", // "q","r","s","t","u","v","w","x","y","z"}); // scoreList.setItems(words); // scoreTable.add(scoreList).padRight(15); // // scoreList = new List(skin,"user"); // words = new Array(new String[]{"A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P", // "Q","R","S","T","U","V","W","X","Y","Z"}); // scoreList.setItems(words); // scoreTable.add(scoreList); scoreTable.add("High Scores").colspan(2).padBottom(15).row(); try { Array levels = scoreFromDB.keys().toArray(); Array scores = scoreFromDB.values().toArray(); scoreList = new List(skin,"user"); scoreList.setItems(levels); scoreTable.add(scoreList).padRight(15); scoreList = new List(skin,"user"); scoreList.setItems(scores); scoreTable.add(scoreList); } catch (Exception e) { System.out.println(e.toString()); } scoreList = new List(skin, "user"); scoreScroll = new ScrollPane(scoreTable); scoreScroll.setFillParent(true); scoreScroll.setX(scoreScroll.getX()+5); scoreStage.addActor(scoreScroll); } @Override public void render(float delta) { Gdx.gl.glClearColor(0,0,0,1); Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT); scoreStage.act(delta); scoreStage.draw(); } @Override public void resize(int width, int height) { } @Override public void pause() { } @Override public void resume() { } @Override public void hide() { } @Override public void dispose() { } }
{ "content_hash": "c5f8e588c9bae55279b9f0eceb130b2c", "timestamp": "", "source": "github", "line_count": 130, "max_line_length": 111, "avg_line_length": 28.984615384615385, "alnum_prop": 0.5514861995753716, "repo_name": "JacobAMason/DCSP", "id": "ac46bcd480c7064dd4d91a382e37e99f0f803813", "size": "4908", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "core/src/com/DCSP/screen/HighScoresScreen.java", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "32716" }, { "name": "HTML", "bytes": "6081" }, { "name": "Java", "bytes": "132789" }, { "name": "JavaScript", "bytes": "24" }, { "name": "PHP", "bytes": "73521" } ], "symlink_target": "" }
irods::error mock_archive_generate_full_path( irods::plugin_property_map& _prop_map, const std::string& _phy_path, std::string& _ret_string ) { irods::error result = SUCCESS(); irods::error ret; std::string vault_path; // TODO - getting vault path by property will not likely work for coordinating nodes ret = _prop_map.get<std::string>( irods::RESOURCE_PATH, vault_path ); if ( ( result = ASSERT_PASS( ret, "Resource has no vault path." ) ).ok() ) { if ( _phy_path.compare( 0, 1, "/" ) != 0 && _phy_path.compare( 0, vault_path.size(), vault_path ) != 0 ) { _ret_string = vault_path; _ret_string += "/"; _ret_string += _phy_path; } else { // The physical path already contains the vault path _ret_string = _phy_path; } } return result; } // mock_archive_generate_full_path // =-=-=-=-=-=-=- /// @brief update the physical path in the file object irods::error unix_check_path( irods::resource_plugin_context& _ctx ) { irods::error result = SUCCESS(); try { irods::data_object_ptr data_obj = boost::dynamic_pointer_cast< irods::data_object >( _ctx.fco() ); // =-=-=-=-=-=-=- // NOTE: Must do this for all storage resources std::string full_path; irods::error ret = mock_archive_generate_full_path( _ctx.prop_map(), data_obj->physical_path(), full_path ); if ( ( result = ASSERT_PASS( ret, "Failed generating full path for object." ) ).ok() ) { data_obj->physical_path( full_path ); } return result; } catch ( const std::bad_cast& ) { return ERROR( SYS_INVALID_INPUT_PARAM, "failed to cast fco to data_object" ); } } // unix_check_path // =-=-=-=-=-=-=- /// @brief Checks the basic operation parameters and updates the physical path in the file object template< typename DEST_TYPE > irods::error unix_check_params_and_path( irods::resource_plugin_context& _ctx ) { irods::error result = SUCCESS(); irods::error ret; // =-=-=-=-=-=-=- // verify that the resc context is valid ret = _ctx.valid< DEST_TYPE >(); if ( ( result = ASSERT_PASS( ret, "Resource context is invalid." ) ).ok() ) { result = unix_check_path( _ctx ); } return result; } // unix_check_params_and_path // =-=-=-=-=-=-=- //@brief Recursively make all of the dirs in the path irods::error mock_archive_mkdir_r( const std::string& path, mode_t mode ) { irods::error result = SUCCESS(); std::string subdir; std::size_t pos = 0; bool done = false; while ( !done && result.ok() ) { pos = path.find_first_of( '/', pos + 1 ); if ( pos > 0 ) { subdir = path.substr( 0, pos ); int status = mkdir( subdir.c_str(), mode ); // =-=-=-=-=-=-=- // handle error cases result = ASSERT_ERROR( status >= 0 || errno == EEXIST, UNIX_FILE_RENAME_ERR - errno, "mkdir error for \"%s\", errno = \"%s\", status = %d.", subdir.c_str(), strerror( errno ), status ); } if ( pos == std::string::npos ) { done = true; } } return result; } // mock_archive_mkdir_r irods::error make_hashed_path( irods::plugin_property_map& _prop_map, const std::string& _path, std::string& _hashed ) { irods::error result; // =-=-=-=-=-=-=- // hash the physical path to reflect object store behavior MD5_CTX context; char md5Buf[ MAX_NAME_LEN ]; unsigned char hash [ MAX_NAME_LEN ]; strncpy( md5Buf, _path.c_str(), _path.size() ); MD5_Init( &context ); MD5_Update( &context, ( unsigned char* )md5Buf, _path.size() ); MD5_Final( ( unsigned char* )hash, &context ); std::stringstream ins; for ( int i = 0; i < 16; ++i ) { ins << std::setfill( '0' ) << std::setw( 2 ) << std::hex << ( int )hash[i]; } // =-=-=-=-=-=-=- // get the vault path for the resource std::string path; irods::error ret = _prop_map.get< std::string >( irods::RESOURCE_PATH, path ); if ( ( result = ASSERT_PASS( ret, "Failed to get vault path for resource." ) ).ok() ) { // =-=-=-=-=-=-=- // append the hash to the path as the new 'cache file name' path += "/"; path += ins.str(); _hashed = path; } return result; } // make_hashed_path extern "C" { // =-=-=-=-=-=-=- // 3. Define operations which will be called by the file* // calls declared in server/driver/include/fileDriver.h // =-=-=-=-=-=-=- // =-=-=-=-=-=-=- // NOTE :: to access properties in the _prop_map do the // :: following : // :: double my_var = 0.0; // :: irods::error ret = _prop_map.get< double >( "my_key", my_var ); // =-=-=-=-=-=-=- // =-=-=-=-=-=-=- // interface for POSIX mkdir irods::error mock_archive_mkdir_plugin( irods::resource_plugin_context& _ctx ) { irods::error result = SUCCESS(); // =-=-=-=-=-=-=- // NOTE :: this function assumes the object's physical path is correct and // should not have the vault path prepended - hcj irods::error ret = _ctx.valid< irods::collection_object >(); if ( ( result = ASSERT_PASS( ret, "resource context is invalid." ) ).ok() ) { // =-=-=-=-=-=-=- // cast down the chain to our understood object type irods::collection_object_ptr fco = boost::dynamic_pointer_cast< irods::collection_object >( _ctx.fco() ); // =-=-=-=-=-=-=- // make the call to mkdir & umask mode_t myMask = umask( ( mode_t ) 0000 ); int status = mkdir( fco->physical_path().c_str(), fco->mode() ); // =-=-=-=-=-=-=- // reset the old mask umask( ( mode_t ) myMask ); // =-=-=-=-=-=-=- // return an error if necessary result.code( status ); int err_status = UNIX_FILE_MKDIR_ERR - errno; if ( ( result = ASSERT_ERROR( status >= 0, err_status, "mkdir error for [%s], errno = [%s], status = %d.", fco->physical_path().c_str(), strerror( errno ), err_status ) ).ok() ) { result.code( status ); } } return result; } // mock_archive_mkdir_plugin irods::error mock_archive_stat_plugin( irods::resource_plugin_context& , struct stat* _statbuf ) { irods::error result = SUCCESS(); // =-=-=-=-=-=-=- // manufacture a stat as we do not have a // microservice to perform this duty _statbuf->st_mode = S_IFREG; _statbuf->st_nlink = 1; _statbuf->st_uid = getuid(); _statbuf->st_gid = getgid(); _statbuf->st_atime = _statbuf->st_mtime = _statbuf->st_ctime = time( 0 ); _statbuf->st_size = UNKNOWN_FILE_SZ; return SUCCESS(); } // mock_archive_stat_plugin // =-=-=-=-=-=-=- // interface for POSIX readdir irods::error mock_archive_rename_plugin( irods::resource_plugin_context& _ctx, const char* _new_file_name ) { // =-=-=-=-=-=-=- // Check the operation parameters and update the physical path irods::error result = SUCCESS(); irods::error ret = unix_check_params_and_path< irods::data_object >( _ctx ); if ( ( result = ASSERT_PASS( ret, "Invalid parameters or physical path." ) ).ok() ) { // =-=-=-=-=-=-=- // manufacture a new path from the new file name std::string new_full_path; ret = mock_archive_generate_full_path( _ctx.prop_map(), _new_file_name, new_full_path ); if ( ( result = ASSERT_PASS( ret, "Unable to generate full path for destination file: \"%s\".", _new_file_name ) ).ok() ) { // =-=-=-=-=-=-=- // cast down the hierarchy to the desired object irods::file_object_ptr fco = boost::dynamic_pointer_cast< irods::file_object >( _ctx.fco() ); // =-=-=-=-=-=-=- // get hashed names for the old path std::string new_hash; ret = make_hashed_path( _ctx.prop_map(), _new_file_name, new_hash ); if ( ( result = ASSERT_PASS( ret, "Failed to gen hashed path" ) ).ok() ) { // =-=-=-=-=-=-=- // make the call to rename int status = rename( fco->physical_path().c_str(), new_hash.c_str() ); // =-=-=-=-=-=-=- // handle error cases int err_status = UNIX_FILE_RENAME_ERR - errno; if ( ( result = ASSERT_ERROR( status >= 0, err_status, "Rename error for \"%s\" to \"%s\", errno = \"%s\", status = %d.", fco->physical_path().c_str(), new_hash.c_str(), strerror( errno ), err_status ) ).ok() ) { fco->physical_path( new_hash ); result.code( status ); } } } } return result; } // mock_archive_rename_plugin // =-=-=-=-=-=-=- // interface for POSIX Truncate irods::error mock_archive_truncate_plugin( irods::resource_plugin_context& _ctx ) { irods::error result = SUCCESS(); // =-=-=-=-=-=-=- // Check the operation parameters and update the physical path irods::error ret = unix_check_params_and_path< irods::file_object >( _ctx ); if ( ( result = ASSERT_PASS( ret, "Invalid plugin context." ) ).ok() ) { // =-=-=-=-=-=-=- // get ref to fco irods::file_object_ptr fco = boost::dynamic_pointer_cast< irods::file_object >( _ctx.fco() ); // =-=-=-=-=-=-=- // make the call to unlink int status = truncate( fco->physical_path().c_str(), fco->size() ); // =-=-=-=-=-=-=- // error handling int err_status = UNIX_FILE_UNLINK_ERR - errno; result = ASSERT_ERROR( status >= 0, err_status, "Truncate error for: \"%s\", errno = \"%s\", status = %d.", fco->physical_path().c_str(), strerror( errno ), err_status ); } return result; } // mock_archive_truncate_plugin // =-=-=-=-=-=-=- // interface for POSIX Unlink irods::error mock_archive_unlink_plugin( irods::resource_plugin_context& _ctx ) { irods::error result = SUCCESS(); // =-=-=-=-=-=-=- // Check the operation parameters and update the physical path irods::error ret = unix_check_params_and_path< irods::file_object >( _ctx ); if ( ( result = ASSERT_PASS( ret, "Invalid plugin context." ) ).ok() ) { // =-=-=-=-=-=-=- // get ref to fco irods::file_object_ptr fco = boost::dynamic_pointer_cast< irods::file_object >( _ctx.fco() ); // =-=-=-=-=-=-=- // make the call to unlink int status = unlink( fco->physical_path().c_str() ); // =-=-=-=-=-=-=- // error handling int err_status = UNIX_FILE_UNLINK_ERR - errno; result = ASSERT_ERROR( status >= 0, err_status, "Unlink error for: \"%s\", errno = \"%s\", status = %d.", fco->physical_path().c_str(), strerror( errno ), err_status ); } return result; } // mock_archive_unlink_plugin int mockArchiveCopyPlugin( int mode, const char* srcFileName, const char* destFileName ) { int trans_buff_size = 0; irods::error ret = irods::get_advanced_setting<int>( irods::CFG_TRANS_BUFFER_SIZE_FOR_PARA_TRANS, trans_buff_size ); if ( !ret.ok() ) { return ret.code(); } trans_buff_size *= 1024 * 1024; int inFd, outFd; std::vector<char> myBuf( trans_buff_size ); rodsLong_t bytesCopied = 0; int bytesRead; int bytesWritten; int status; struct stat statbuf; inFd = open( srcFileName, O_RDONLY, 0 ); status = stat( srcFileName, &statbuf ); if ( inFd < 0 ) { status = UNIX_FILE_OPEN_ERR - errno; rodsLog( LOG_ERROR, "mockArchiveCopyPlugin: open error for srcFileName %s, status = %d", srcFileName, status ); return status; } else if ( status < 0 ) { status = UNIX_FILE_STAT_ERR - errno; rodsLog( LOG_ERROR, "mockArchiveCopyPlugin: stat of %s error, status = %d", srcFileName, status ); close( inFd ); // JMC cppcheck - resource return status; } else if ( ( statbuf.st_mode & S_IFREG ) == 0 ) { rodsLog( LOG_ERROR, "mockArchiveCopyPlugin: open error for srcFileName %s, status = %d", srcFileName, UNIX_FILE_OPEN_ERR ); close( inFd ); // JMC cppcheck - resource return status; } outFd = open( destFileName, O_WRONLY | O_CREAT | O_TRUNC, mode ); if ( outFd < 0 ) { status = UNIX_FILE_OPEN_ERR - errno; rodsLog( LOG_ERROR, "mockArchiveCopyPlugin: open error for destFileName %s, status = %d", destFileName, status ); close( inFd ); return status; } while ( ( bytesRead = read( inFd, ( void * ) myBuf.data(), trans_buff_size ) ) > 0 ) { bytesWritten = write( outFd, ( void * ) myBuf.data(), bytesRead ); if ( bytesWritten <= 0 ) { status = UNIX_FILE_WRITE_ERR - errno; rodsLog( LOG_ERROR, "mockArchiveCopyPlugin: write error for srcFileName %s, status = %d", destFileName, status ); close( inFd ); close( outFd ); return status; } bytesCopied += bytesWritten; } close( inFd ); close( outFd ); if ( bytesCopied != statbuf.st_size ) { rodsLog( LOG_ERROR, "mockArchiveCopyPlugin: Copied size %lld does not match source \ size %lld of %s", bytesCopied, statbuf.st_size, srcFileName ); return SYS_COPY_LEN_ERR; } else { return 0; } } // mockArchiveCopyPlugin // =-=-=-=-=-=-=- // unixStageToCache - This routine is for testing the TEST_STAGE_FILE_TYPE. // Just copy the file from filename to cacheFilename. optionalInfo info // is not used. irods::error mock_archive_stagetocache_plugin( irods::resource_plugin_context& _ctx, const char* _cache_file_name ) { irods::error result = SUCCESS(); // =-=-=-=-=-=-=- // Check the operation parameters and update the physical path irods::error ret = unix_check_params_and_path< irods::file_object >( _ctx ); if ( ( result = ASSERT_PASS( ret, "Invalid plugin context." ) ).ok() ) { // =-=-=-=-=-=-=- // get ref to fco irods::file_object_ptr fco = boost::dynamic_pointer_cast< irods::file_object >( _ctx.fco() ); // =-=-=-=-=-=-=- // get the vault path for the resource std::string path; ret = _ctx.prop_map().get< std::string >( irods::RESOURCE_PATH, path ); if ( ( result = ASSERT_PASS( ret, "Failed to retrieve vault path for resource." ) ).ok() ) { // =-=-=-=-=-=-=- // append the hash to the path as the new 'cache file name' path += "/"; path += fco->physical_path().c_str(); int status = mockArchiveCopyPlugin( fco->mode(), fco->physical_path().c_str(), _cache_file_name ); result = ASSERT_ERROR( status >= 0, status, "Failed copying archive file: \"%s\" to cache file: \"%s\".", fco->physical_path().c_str(), _cache_file_name ); } } return result; } // mock_archive_stagetocache_plugin // =-=-=-=-=-=-=- // unixSyncToArch - This routine is for testing the TEST_STAGE_FILE_TYPE. // Just copy the file from cacheFilename to filename. optionalInfo info // is not used. irods::error mock_archive_synctoarch_plugin( irods::resource_plugin_context& _ctx, char* _cache_file_name ) { irods::error result = SUCCESS(); // =-=-=-=-=-=-=- // Check the operation parameters and update the physical path irods::error ret = unix_check_params_and_path< irods::file_object >( _ctx ); if ( ( result = ASSERT_PASS( ret, "Invalid plugin context." ) ).ok() ) { // =-=-=-=-=-=-=- // get ref to fco irods::file_object_ptr fco = boost::dynamic_pointer_cast< irods::file_object >( _ctx.fco() ); // =-=-=-=-=-=-=- // get the vault path for the resource std::string path; ret = make_hashed_path( _ctx.prop_map(), fco->physical_path(), path ); if ( ( result = ASSERT_PASS( ret, "Failed to gen hashed path" ) ).ok() ) { // =-=-=-=-=-=-=- // append the hash to the path as the new 'cache file name' rodsLog( LOG_NOTICE, "mock archive :: cache file name [%s]", _cache_file_name ); rodsLog( LOG_NOTICE, "mock archive :: new hashed file name for [%s] is [%s]", fco->physical_path().c_str(), path.c_str() ); // =-=-=-=-=-=-=- // make the directories in the path to the new file std::string new_path = path; std::size_t last_slash = new_path.find_last_of( '/' ); new_path.erase( last_slash ); ret = mock_archive_mkdir_r( new_path.c_str(), 0750 ); if ( ( result = ASSERT_PASS( ret, "Mkdir error for \"%s\".", new_path.c_str() ) ).ok() ) { } // =-=-=-=-=-=-=- // make the copy to the 'archive' int status = mockArchiveCopyPlugin( fco->mode(), _cache_file_name, path.c_str() ); if ( ( result = ASSERT_ERROR( status >= 0, status, "Sync to arch failed." ) ).ok() ) { fco->physical_path( path ); } } } return result; } // mock_archive_synctoarch_plugin // =-=-=-=-=-=-=- // redirect_create - code to determine redirection for get operation // Create never gets called on an archive. // =-=-=-=-=-=-=- // redirect_get - code to determine redirection for get operation irods::error mock_archive_redirect_open( irods::plugin_property_map& _prop_map, irods::file_object_ptr _file_obj, const std::string& _resc_name, const std::string& _curr_host, float& _out_vote ) { irods::error result = SUCCESS(); // =-=-=-=-=-=-=- // initially set a good default _out_vote = 0.0; // =-=-=-=-=-=-=- // determine if the resource is down int resc_status = 0; irods::error get_ret = _prop_map.get< int >( irods::RESOURCE_STATUS, resc_status ); if ( ( result = ASSERT_PASS( get_ret, "Failed to get \"status\" property." ) ).ok() ) { // =-=-=-=-=-=-=- // if the status is down, vote no. if ( INT_RESC_STATUS_DOWN != resc_status ) { // =-=-=-=-=-=-=- // get the resource host for comparison to curr host std::string host_name; get_ret = _prop_map.get< std::string >( irods::RESOURCE_LOCATION, host_name ); if ( ( result = ASSERT_PASS( get_ret, "Failed to get \"location\" property." ) ).ok() ) { // =-=-=-=-=-=-=- // set a flag to test if were at the curr host, if so we vote higher bool curr_host = ( _curr_host == host_name ); // =-=-=-=-=-=-=- // make some flags to clairify decision making bool need_repl = ( _file_obj->repl_requested() > -1 ); // =-=-=-=-=-=-=- // set up variables for iteration bool found = false; std::vector< irods::physical_object > objs = _file_obj->replicas(); std::vector< irods::physical_object >::iterator itr = objs.begin(); // =-=-=-=-=-=-=- // check to see if the replica is in this resource, if one is requested for ( ; !found && itr != objs.end(); ++itr ) { // =-=-=-=-=-=-=- // run the hier string through the parser and get the last // entry. std::string last_resc; irods::hierarchy_parser parser; parser.set_string( itr->resc_hier() ); parser.last_resc( last_resc ); // =-=-=-=-=-=-=- // more flags to simplify decision making bool repl_us = ( _file_obj->repl_requested() == itr->repl_num() ); bool resc_us = ( _resc_name == last_resc ); // =-=-=-=-=-=-=- // success - correct resource and dont need a specific // replication, or the repl nums match if ( resc_us ) { if ( !need_repl || ( need_repl && repl_us ) ) { found = true; if ( curr_host ) { _out_vote = 1.0; } else { _out_vote = 0.5; } } } // if resc_us } // for itr } } } return result; } // mock_archive_redirect_open // =-=-=-=-=-=-=- // used to allow the resource to determine which host // should provide the requested operation irods::error mock_archive_redirect_plugin( irods::resource_plugin_context& _ctx, const std::string* _opr, const std::string* _curr_host, irods::hierarchy_parser* _out_parser, float* _out_vote ) { irods::error result = SUCCESS(); // =-=-=-=-=-=-=- // check the context validity irods::error ret = _ctx.valid< irods::file_object >(); if ( ( result = ASSERT_PASS( ret, "Invalid plugin context." ) ).ok() ) { if ( ( result = ASSERT_ERROR( _opr && _curr_host && _out_parser && _out_vote, SYS_INVALID_INPUT_PARAM, "Invalid input parameters." ) ).ok() ) { // =-=-=-=-=-=-=- // cast down the chain to our understood object type irods::file_object_ptr file_obj = boost::dynamic_pointer_cast< irods::file_object >( _ctx.fco() ); // =-=-=-=-=-=-=- // get the name of this resource std::string resc_name; ret = _ctx.prop_map().get< std::string >( irods::RESOURCE_NAME, resc_name ); if ( ( result = ASSERT_PASS( ret, "Failed to get property for resource name." ) ).ok() ) { // =-=-=-=-=-=-=- // add ourselves to the hierarchy parser by default _out_parser->add_child( resc_name ); // =-=-=-=-=-=-=- // test the operation to determine which choices to make if ( irods::OPEN_OPERATION == ( *_opr ) ) { // =-=-=-=-=-=-=- // call redirect determination for 'get' operation result = mock_archive_redirect_open( _ctx.prop_map(), file_obj, resc_name, ( *_curr_host ), ( *_out_vote ) ); } else if ( irods::CREATE_OPERATION == ( *_opr ) ) { // =-=-=-=-=-=-=- // call redirect determination for 'create' operation result = ASSERT_ERROR( false, SYS_INVALID_INPUT_PARAM, "Create operation not supported for an archive" ); } else { // =-=-=-=-=-=-=- // must have been passed a bad operation result = ASSERT_ERROR( false, SYS_INVALID_INPUT_PARAM, "Operation not supported: \"%s\".", _opr->c_str() ); } } } } return result; } // mock_archive_redirect_plugin // =-=-=-=-=-=-=- // mock_archive_rebalance - code which would rebalance the subtree irods::error mock_archive_rebalance( irods::resource_plugin_context& _ctx ) { return update_resource_object_count( _ctx.comm(), _ctx.prop_map() ); } // mock_archive_file_rebalancec // =-=-=-=-=-=-=- // 3. create derived class to handle mock_archive file system resources // necessary to do custom parsing of the context string to place // any useful values into the property map for reference in later // operations. semicolon is the preferred delimiter class mockarchive_resource : public irods::resource { // =-=-=-=-=-=-=- // 3a. create a class to provide maintenance operations, this is only for example // and will not be called. class maintenance_operation { public: maintenance_operation( const std::string& _n ) : name_( _n ) { } maintenance_operation( const maintenance_operation& _rhs ) { name_ = _rhs.name_; } maintenance_operation& operator=( const maintenance_operation& _rhs ) { name_ = _rhs.name_; return *this; } irods::error operator()( rcComm_t* ) { rodsLog( LOG_NOTICE, "mockarchive_resource::post_disconnect_maintenance_operation - [%s]", name_.c_str() ); return SUCCESS(); } private: std::string name_; }; // class maintenance_operation public: mockarchive_resource( const std::string& _inst_name, const std::string& _context ) : irods::resource( _inst_name, _context ) { } // ctor irods::error need_post_disconnect_maintenance_operation( bool& _b ) { _b = false; return SUCCESS(); } // =-=-=-=-=-=-=- // 3b. pass along a functor for maintenance work after // the client disconnects, uncomment the first two lines for effect. irods::error post_disconnect_maintenance_operation( irods::pdmo_type& ) { return ERROR( -1, "nop" ); } }; // class mockarchive_resource // =-=-=-=-=-=-=- // 4. create the plugin factory function which will return a dynamically // instantiated object of the previously defined derived resource. use // the add_operation member to associate a 'call name' to the interfaces // defined above. for resource plugins these call names are standardized // as used by the irods facing interface defined in // server/drivers/src/fileDriver.c irods::resource* plugin_factory( const std::string& _inst_name, const std::string& _context ) { // =-=-=-=-=-=-=- // 4a. create mockarchive_resource mockarchive_resource* resc = new mockarchive_resource( _inst_name, _context ); // =-=-=-=-=-=-=- // 4b. map function names to operations. this map will be used to load // the symbols from the shared object in the delay_load stage of // plugin loading. resc->add_operation( irods::RESOURCE_OP_UNLINK, "mock_archive_unlink_plugin" ); resc->add_operation( irods::RESOURCE_OP_STAGETOCACHE, "mock_archive_stagetocache_plugin" ); resc->add_operation( irods::RESOURCE_OP_SYNCTOARCH, "mock_archive_synctoarch_plugin" ); resc->add_operation( irods::RESOURCE_OP_RESOLVE_RESC_HIER, "mock_archive_redirect_plugin" ); resc->add_operation( irods::RESOURCE_OP_REBALANCE, "mock_archive_rebalance" ); resc->add_operation( irods::RESOURCE_OP_MKDIR, "mock_archive_mkdir_plugin" ); resc->add_operation( irods::RESOURCE_OP_RENAME, "mock_archive_rename_plugin" ); resc->add_operation( irods::RESOURCE_OP_STAT, "mock_archive_stat_plugin" ); resc->add_operation( irods::RESOURCE_OP_TRUNCATE, "mock_archive_truncate_plugin" ); // =-=-=-=-=-=-=- // set some properties necessary for backporting to iRODS legacy code resc->set_property< int >( irods::RESOURCE_CHECK_PATH_PERM, 2 );//DO_CHK_PATH_PERM ); resc->set_property< int >( irods::RESOURCE_CREATE_PATH, 1 );//CREATE_PATH ); // =-=-=-=-=-=-=- // 4c. return the pointer through the generic interface of an // irods::resource pointer return dynamic_cast<irods::resource*>( resc ); } // plugin_factory }; // extern "C"
{ "content_hash": "1890230733462270a0a685c1289e806e", "timestamp": "", "source": "github", "line_count": 771, "max_line_length": 152, "avg_line_length": 39.93385214007782, "alnum_prop": 0.4817629672935139, "repo_name": "janiheikkinen/irods", "id": "ca56b66ca491440cf58ccdd775596c17a3fa7c69", "size": "32453", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "plugins/resources/mockarchive/libmockarchive.cpp", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "438371" }, { "name": "C++", "bytes": "8162401" }, { "name": "CMake", "bytes": "854" }, { "name": "CSS", "bytes": "3246" }, { "name": "FORTRAN", "bytes": "6804" }, { "name": "HTML", "bytes": "27675" }, { "name": "JavaScript", "bytes": "5231" }, { "name": "Lex", "bytes": "3088" }, { "name": "Makefile", "bytes": "75630" }, { "name": "Objective-C", "bytes": "1160" }, { "name": "PLSQL", "bytes": "3241" }, { "name": "Pascal", "bytes": "20991" }, { "name": "Perl", "bytes": "281394" }, { "name": "Python", "bytes": "779176" }, { "name": "R", "bytes": "10664" }, { "name": "Rebol", "bytes": "159165" }, { "name": "Ruby", "bytes": "5914" }, { "name": "Shell", "bytes": "205324" }, { "name": "Yacc", "bytes": "17441" } ], "symlink_target": "" }
package org.squiddev.plethora.integration.refinedstorage; import com.raoulvdberge.refinedstorage.RS; import com.raoulvdberge.refinedstorage.api.autocrafting.preview.ICraftingPreviewElement; import com.raoulvdberge.refinedstorage.apiimpl.autocrafting.preview.CraftingPreviewElementItemStack; import net.minecraft.init.Items; import net.minecraft.item.ItemStack; import org.squiddev.plethora.api.Injects; import org.squiddev.plethora.api.meta.BaseMetaProvider; import org.squiddev.plethora.api.method.IPartialContext; import javax.annotation.Nonnull; import java.util.HashMap; import java.util.Map; @Injects(RS.ID) public final class MetaCraftingPreviewElement extends BaseMetaProvider<ICraftingPreviewElement<?>> { @Nonnull @Override public Map<String, ?> getMeta(@Nonnull IPartialContext<ICraftingPreviewElement<?>> context) { ICraftingPreviewElement<?> preview = context.getTarget(); Map<String, Object> out = new HashMap<>(); out.put("id", preview.getId()); out.put("available", preview.getAvailable()); out.put("toCraft", preview.getToCraft()); out.put("component", context.makePartialChild(preview.getElement()).getMeta()); return out; } @Nonnull @Override public ICraftingPreviewElement<?> getExample() { return new CraftingPreviewElementItemStack(new ItemStack(Items.STICK, 4)); } }
{ "content_hash": "01746e1063732afbce7ccd1d28197f95", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 100, "avg_line_length": 35.648648648648646, "alnum_prop": 0.796057619408643, "repo_name": "SquidDev-CC/plethora", "id": "98d13ac4f2f5a4d8f80bcfd7f33312ee57b07999", "size": "1319", "binary": false, "copies": "1", "ref": "refs/heads/minecraft-1.12", "path": "src/main/java/org/squiddev/plethora/integration/refinedstorage/MetaCraftingPreviewElement.java", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "3836" }, { "name": "Java", "bytes": "1212420" }, { "name": "Lua", "bytes": "15957" } ], "symlink_target": "" }
Program runs with Cipher Block Chaining (CBC) mode. To better understand how the encryption algorithm and cbc work: - `./info/cbc.pdf` - `./info/s-des.pdf` ### Usage: Clone the program and run it with cargo (Rust's package manager) Both parties: sending (encrypting), and receiving (decrypting), need to securely share a 10-digit binary symmetric key, and an 8-digit binary initialization vector. If the key, initialization vector, and file match, encryption and decryption should work for any type of file. #### Run the program with the following arguments: Encryption: `cargo run <key> <vector> <plaintext_filename> <ciphertext_filename>` Decryption: `cargo run <d> <key> <vector> <ciphertext_filename> <plaintext_filename>` ## Example: ### File: andromeda.jpg (plaintext) <img src="./files/andromeda.jpg" width="400"> ### Encrypting `./files/andromeda.jpg` with key `0111111101` and initialization vector: `10101010`: ![encrypt](./info/encrypt.gif) ### File: andromeda.jpg.encrypted (ciphertext) File is now meaningless to image viewers, opening it produces this: <img src="./info/andromeda-encrypted.png" width="500"> ### Decrypting file `./andromeda.jpg.encrypted` with key `0111111101` and initialization vector: `10101010`: ![decrypt](./info/decrypt.gif) Produces the original file. I tried to write the code as close to the description of the algorithm as possible for the sake of readability. P.S.: This program is for learning purposes only, this encryption can be easily broken, so do not rely on it for important files!
{ "content_hash": "aadaa880f8ef63671350f4c6a2fdc165", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 275, "avg_line_length": 36.22727272727273, "alnum_prop": 0.7283563362609786, "repo_name": "lborg019/rust-sdes", "id": "40c5bab7f3d411d55cfa30454f80d578931de2ea", "size": "1672", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "Rust", "bytes": "23121" } ], "symlink_target": "" }
package com.example.tao.dl; import android.app.DatePickerDialog; import android.app.Dialog; import android.app.DialogFragment; import android.os.Bundle; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.DatePicker; import android.widget.EditText; import android.widget.TextView; import java.util.Calendar; /** * Created by mengyingfan on 9/22/17. */ public class DateDialog extends DialogFragment implements DatePickerDialog.OnDateSetListener { EditText duedate; @Override public Dialog onCreateDialog(Bundle savedInstanceState) { // set default as current date final Calendar calendar = Calendar.getInstance(); int day = calendar.get(Calendar.DAY_OF_MONTH); int month = calendar.get(Calendar.MONTH); int year = calendar.get(Calendar.YEAR); //get a instance of datepickerdialog duedate = (EditText) getActivity().findViewById(R.id.due_date); return new DatePickerDialog(getActivity(), this, year, month, day); } public void onDateSet(DatePicker view, int year, int month, int day) { month = month + 1; String date = year + "-" + month + "-" + day; duedate.setText(date); } }
{ "content_hash": "a83e318531f3c80795b7ec12f208b66f", "timestamp": "", "source": "github", "line_count": 41, "max_line_length": 94, "avg_line_length": 30.73170731707317, "alnum_prop": 0.7063492063492064, "repo_name": "taoalpha/DL", "id": "767698af7790b3f9c94c8887fb9670f9f546cc39", "size": "1260", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/java/com/example/tao/dl/DateDialog.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "19263" } ], "symlink_target": "" }
'use strict'; const chai = require('chai'), expect = chai.expect, Support = require(__dirname + '/../../support'), DataTypes = require(__dirname + '/../../../../lib/data-types'), dialect = Support.getTestDialect(), _ = require('lodash'), moment = require('moment'), Operators = require('../../../../lib/operators'), QueryGenerator = require('../../../../lib/dialects/sqlite/query-generator'); if (dialect === 'sqlite') { describe('[SQLITE Specific] QueryGenerator', () => { beforeEach(function() { this.User = this.sequelize.define('User', { username: DataTypes.STRING }); return this.User.sync({ force: true }); }); const suites = { arithmeticQuery: [ { title: 'Should use the plus operator', arguments: ['+', 'myTable', { foo: 'bar' }, {}], expectation: 'UPDATE `myTable` SET `foo`=`foo`+ \'bar\' ' }, { title: 'Should use the plus operator with where clause', arguments: ['+', 'myTable', { foo: 'bar' }, { bar: 'biz'}], expectation: 'UPDATE `myTable` SET `foo`=`foo`+ \'bar\' WHERE `bar` = \'biz\'' }, { title: 'Should use the minus operator', arguments: ['-', 'myTable', { foo: 'bar' }], expectation: 'UPDATE `myTable` SET `foo`=`foo`- \'bar\' ' }, { title: 'Should use the minus operator with negative value', arguments: ['-', 'myTable', { foo: -1 }], expectation: 'UPDATE `myTable` SET `foo`=`foo`- -1 ' }, { title: 'Should use the minus operator with where clause', arguments: ['-', 'myTable', { foo: 'bar' }, { bar: 'biz'}], expectation: 'UPDATE `myTable` SET `foo`=`foo`- \'bar\' WHERE `bar` = \'biz\'' } ], attributesToSQL: [ { arguments: [{id: 'INTEGER'}], expectation: {id: 'INTEGER'} }, { arguments: [{id: 'INTEGER', foo: 'VARCHAR(255)'}], expectation: {id: 'INTEGER', foo: 'VARCHAR(255)'} }, { arguments: [{id: {type: 'INTEGER'}}], expectation: {id: 'INTEGER'} }, { arguments: [{id: {type: 'INTEGER', allowNull: false}}], expectation: {id: 'INTEGER NOT NULL'} }, { arguments: [{id: {type: 'INTEGER', allowNull: true}}], expectation: {id: 'INTEGER'} }, { arguments: [{id: {type: 'INTEGER', primaryKey: true, autoIncrement: true}}], expectation: {id: 'INTEGER PRIMARY KEY AUTOINCREMENT'} }, { arguments: [{id: {type: 'INTEGER', defaultValue: 0}}], expectation: {id: 'INTEGER DEFAULT 0'} }, { arguments: [{id: {type: 'INTEGER', defaultValue: undefined}}], expectation: {id: 'INTEGER'} }, { arguments: [{id: {type: 'INTEGER', unique: true}}], expectation: {id: 'INTEGER UNIQUE'} }, // New references style { arguments: [{id: {type: 'INTEGER', references: { model: 'Bar' }}}], expectation: {id: 'INTEGER REFERENCES `Bar` (`id`)'} }, { arguments: [{id: {type: 'INTEGER', references: { model: 'Bar', key: 'pk' }}}], expectation: {id: 'INTEGER REFERENCES `Bar` (`pk`)'} }, { arguments: [{id: {type: 'INTEGER', references: { model: 'Bar' }, onDelete: 'CASCADE'}}], expectation: {id: 'INTEGER REFERENCES `Bar` (`id`) ON DELETE CASCADE'} }, { arguments: [{id: {type: 'INTEGER', references: { model: 'Bar' }, onUpdate: 'RESTRICT'}}], expectation: {id: 'INTEGER REFERENCES `Bar` (`id`) ON UPDATE RESTRICT'} }, { arguments: [{id: {type: 'INTEGER', allowNull: false, defaultValue: 1, references: { model: 'Bar' }, onDelete: 'CASCADE', onUpdate: 'RESTRICT'}}], expectation: {id: 'INTEGER NOT NULL DEFAULT 1 REFERENCES `Bar` (`id`) ON DELETE CASCADE ON UPDATE RESTRICT'} } ], createTableQuery: [ { arguments: ['myTable', {data: 'BLOB'}], expectation: 'CREATE TABLE IF NOT EXISTS `myTable` (`data` BLOB);' }, { arguments: ['myTable', {data: 'LONGBLOB'}], expectation: 'CREATE TABLE IF NOT EXISTS `myTable` (`data` LONGBLOB);' }, { arguments: ['myTable', {title: 'VARCHAR(255)', name: 'VARCHAR(255)'}], expectation: 'CREATE TABLE IF NOT EXISTS `myTable` (`title` VARCHAR(255), `name` VARCHAR(255));' }, { arguments: ['myTable', {title: 'VARCHAR BINARY(255)', number: 'INTEGER(5) UNSIGNED PRIMARY KEY '}], // length and unsigned are not allowed on primary key expectation: 'CREATE TABLE IF NOT EXISTS `myTable` (`title` VARCHAR BINARY(255), `number` INTEGER PRIMARY KEY);' }, { arguments: ['myTable', {title: 'ENUM("A", "B", "C")', name: 'VARCHAR(255)'}], expectation: 'CREATE TABLE IF NOT EXISTS `myTable` (`title` ENUM(\"A\", \"B\", \"C\"), `name` VARCHAR(255));' }, { arguments: ['myTable', {title: 'VARCHAR(255)', name: 'VARCHAR(255)', id: 'INTEGER PRIMARY KEY'}], expectation: 'CREATE TABLE IF NOT EXISTS `myTable` (`title` VARCHAR(255), `name` VARCHAR(255), `id` INTEGER PRIMARY KEY);' }, { arguments: ['myTable', {title: 'VARCHAR(255)', name: 'VARCHAR(255)', otherId: 'INTEGER REFERENCES `otherTable` (`id`) ON DELETE CASCADE ON UPDATE NO ACTION'}], expectation: 'CREATE TABLE IF NOT EXISTS `myTable` (`title` VARCHAR(255), `name` VARCHAR(255), `otherId` INTEGER REFERENCES `otherTable` (`id`) ON DELETE CASCADE ON UPDATE NO ACTION);' }, { arguments: ['myTable', {id: 'INTEGER PRIMARY KEY AUTOINCREMENT', name: 'VARCHAR(255)'}], expectation: 'CREATE TABLE IF NOT EXISTS `myTable` (`id` INTEGER PRIMARY KEY AUTOINCREMENT, `name` VARCHAR(255));' }, { arguments: ['myTable', {id: 'INTEGER PRIMARY KEY AUTOINCREMENT', name: 'VARCHAR(255)', surname: 'VARCHAR(255)'}, {uniqueKeys: {uniqueConstraint: {fields: ['name', 'surname'], customIndex: true }}}], expectation: 'CREATE TABLE IF NOT EXISTS `myTable` (`id` INTEGER PRIMARY KEY AUTOINCREMENT, `name` VARCHAR(255), `surname` VARCHAR(255), UNIQUE (`name`, `surname`));' } ], selectQuery: [ { arguments: ['myTable'], expectation: 'SELECT * FROM `myTable`;', context: QueryGenerator }, { arguments: ['myTable', {attributes: ['id', 'name']}], expectation: 'SELECT `id`, `name` FROM `myTable`;', context: QueryGenerator }, { arguments: ['myTable', {where: {id: 2}}], expectation: 'SELECT * FROM `myTable` WHERE `myTable`.`id` = 2;', context: QueryGenerator }, { arguments: ['myTable', {where: {name: 'foo'}}], expectation: "SELECT * FROM `myTable` WHERE `myTable`.`name` = 'foo';", context: QueryGenerator }, { arguments: ['myTable', {where: {name: "foo';DROP TABLE myTable;"}}], expectation: "SELECT * FROM `myTable` WHERE `myTable`.`name` = 'foo\'\';DROP TABLE myTable;';", context: QueryGenerator }, { arguments: ['myTable', {where: 2}], expectation: 'SELECT * FROM `myTable` WHERE `myTable`.`id` = 2;', context: QueryGenerator }, { arguments: ['foo', { attributes: [['count(*)', 'count']] }], expectation: 'SELECT count(*) AS `count` FROM `foo`;', context: QueryGenerator }, { arguments: ['myTable', {order: ['id']}], expectation: 'SELECT * FROM `myTable` ORDER BY `id`;', context: QueryGenerator }, { arguments: ['myTable', {order: ['id', 'DESC']}], expectation: 'SELECT * FROM `myTable` ORDER BY `id`, `DESC`;', context: QueryGenerator }, { arguments: ['myTable', {order: ['myTable.id']}], expectation: 'SELECT * FROM `myTable` ORDER BY `myTable`.`id`;', context: QueryGenerator }, { arguments: ['myTable', {order: [['myTable.id', 'DESC']]}], expectation: 'SELECT * FROM `myTable` ORDER BY `myTable`.`id` DESC;', context: QueryGenerator }, { arguments: ['myTable', {order: [['id', 'DESC']]}, function(sequelize) {return sequelize.define('myTable', {});}], expectation: 'SELECT * FROM `myTable` AS `myTable` ORDER BY `myTable`.`id` DESC;', context: QueryGenerator, needsSequelize: true }, { arguments: ['myTable', {order: [['id', 'DESC'], ['name']]}, function(sequelize) {return sequelize.define('myTable', {});}], expectation: 'SELECT * FROM `myTable` AS `myTable` ORDER BY `myTable`.`id` DESC, `myTable`.`name`;', context: QueryGenerator, needsSequelize: true }, { title: 'sequelize.where with .fn as attribute and default comparator', arguments: ['myTable', function(sequelize) { return { where: sequelize.and( sequelize.where(sequelize.fn('LOWER', sequelize.col('user.name')), 'jan'), { type: 1 } ) }; }], expectation: "SELECT * FROM `myTable` WHERE (LOWER(`user`.`name`) = 'jan' AND `myTable`.`type` = 1);", context: QueryGenerator, needsSequelize: true }, { title: 'sequelize.where with .fn as attribute and LIKE comparator', arguments: ['myTable', function(sequelize) { return { where: sequelize.and( sequelize.where(sequelize.fn('LOWER', sequelize.col('user.name')), 'LIKE', '%t%'), { type: 1 } ) }; }], expectation: "SELECT * FROM `myTable` WHERE (LOWER(`user`.`name`) LIKE '%t%' AND `myTable`.`type` = 1);", context: QueryGenerator, needsSequelize: true }, { title: 'functions can take functions as arguments', arguments: ['myTable', function(sequelize) { return { order: [[sequelize.fn('f1', sequelize.fn('f2', sequelize.col('id'))), 'DESC']] }; }], expectation: 'SELECT * FROM `myTable` ORDER BY f1(f2(`id`)) DESC;', context: QueryGenerator, needsSequelize: true }, { title: 'functions can take all types as arguments', arguments: ['myTable', function(sequelize) { return { order: [ [sequelize.fn('f1', sequelize.col('myTable.id')), 'DESC'], [sequelize.fn('f2', 12, 'lalala', new Date(Date.UTC(2011, 2, 27, 10, 1, 55))), 'ASC'] ] }; }], expectation: "SELECT * FROM `myTable` ORDER BY f1(`myTable`.`id`) DESC, f2(12, 'lalala', '2011-03-27 10:01:55.000 +00:00') ASC;", context: QueryGenerator, needsSequelize: true }, { title: 'single string argument should be quoted', arguments: ['myTable', {group: 'name'}], expectation: 'SELECT * FROM `myTable` GROUP BY `name`;', context: QueryGenerator }, { arguments: ['myTable', {group: ['name']}], expectation: 'SELECT * FROM `myTable` GROUP BY `name`;', context: QueryGenerator }, { title: 'functions work for group by', arguments: ['myTable', function(sequelize) { return { group: [sequelize.fn('YEAR', sequelize.col('createdAt'))] }; }], expectation: 'SELECT * FROM `myTable` GROUP BY YEAR(`createdAt`);', context: QueryGenerator, needsSequelize: true }, { title: 'It is possible to mix sequelize.fn and string arguments to group by', arguments: ['myTable', function(sequelize) { return { group: [sequelize.fn('YEAR', sequelize.col('createdAt')), 'title'] }; }], expectation: 'SELECT * FROM `myTable` GROUP BY YEAR(`createdAt`), `title`;', context: QueryGenerator, needsSequelize: true }, { arguments: ['myTable', {group: ['name', 'title']}], expectation: 'SELECT * FROM `myTable` GROUP BY `name`, `title`;', context: QueryGenerator }, { arguments: ['myTable', {group: 'name', order: [['id', 'DESC']]}], expectation: 'SELECT * FROM `myTable` GROUP BY `name` ORDER BY `id` DESC;', context: QueryGenerator }, { title: 'HAVING clause works with where-like hash', arguments: ['myTable', function(sequelize) { return { attributes: ['*', [sequelize.fn('YEAR', sequelize.col('createdAt')), 'creationYear']], group: ['creationYear', 'title'], having: { creationYear: { gt: 2002 } } }; }], expectation: 'SELECT *, YEAR(`createdAt`) AS `creationYear` FROM `myTable` GROUP BY `creationYear`, `title` HAVING `creationYear` > 2002;', context: QueryGenerator, needsSequelize: true }, { arguments: ['myTable', {limit: 10}], expectation: 'SELECT * FROM `myTable` LIMIT 10;', context: QueryGenerator }, { arguments: ['myTable', {limit: 10, offset: 2}], expectation: 'SELECT * FROM `myTable` LIMIT 2, 10;', context: QueryGenerator }, { title: 'uses default limit if only offset is specified', arguments: ['myTable', {offset: 2}], expectation: 'SELECT * FROM `myTable` LIMIT 2, 10000000000000;', context: QueryGenerator }, { title: 'multiple where arguments', arguments: ['myTable', {where: {boat: 'canoe', weather: 'cold'}}], expectation: "SELECT * FROM `myTable` WHERE `myTable`.`boat` = 'canoe' AND `myTable`.`weather` = 'cold';", context: QueryGenerator }, { title: 'no where arguments (object)', arguments: ['myTable', {where: {}}], expectation: 'SELECT * FROM `myTable`;', context: QueryGenerator }, { title: 'no where arguments (string)', arguments: ['myTable', {where: ['']}], expectation: 'SELECT * FROM `myTable` WHERE 1=1;', context: QueryGenerator }, { title: 'no where arguments (null)', arguments: ['myTable', {where: null}], expectation: 'SELECT * FROM `myTable`;', context: QueryGenerator }, { title: 'buffer as where argument', arguments: ['myTable', {where: { field: new Buffer('Sequelize')}}], expectation: "SELECT * FROM `myTable` WHERE `myTable`.`field` = X'53657175656c697a65';", context: QueryGenerator }, { title: 'use != if ne !== null', arguments: ['myTable', {where: {field: {ne: 0}}}], expectation: 'SELECT * FROM `myTable` WHERE `myTable`.`field` != 0;', context: QueryGenerator }, { title: 'use IS NOT if ne === null', arguments: ['myTable', {where: {field: {ne: null}}}], expectation: 'SELECT * FROM `myTable` WHERE `myTable`.`field` IS NOT NULL;', context: QueryGenerator }, { title: 'use IS NOT if not === BOOLEAN', arguments: ['myTable', {where: {field: {not: true}}}], expectation: 'SELECT * FROM `myTable` WHERE `myTable`.`field` IS NOT 1;', context: QueryGenerator }, { title: 'use != if not !== BOOLEAN', arguments: ['myTable', {where: {field: {not: 3}}}], expectation: 'SELECT * FROM `myTable` WHERE `myTable`.`field` != 3;', context: QueryGenerator } ], insertQuery: [ { arguments: ['myTable', { name: 'foo' }], expectation: "INSERT INTO `myTable` (`name`) VALUES ('foo');" }, { arguments: ['myTable', { name: "'bar'" }], expectation: "INSERT INTO `myTable` (`name`) VALUES ('''bar''');" }, { arguments: ['myTable', {data: new Buffer('Sequelize') }], expectation: "INSERT INTO `myTable` (`data`) VALUES (X'53657175656c697a65');" }, { arguments: ['myTable', { name: 'bar', value: null }], expectation: "INSERT INTO `myTable` (`name`,`value`) VALUES ('bar',NULL);" }, { arguments: ['myTable', { name: 'bar', value: undefined }], expectation: "INSERT INTO `myTable` (`name`,`value`) VALUES ('bar',NULL);" }, { arguments: ['myTable', {name: 'foo', birthday: moment('2011-03-27 10:01:55 +0000', 'YYYY-MM-DD HH:mm:ss Z').toDate()}], expectation: "INSERT INTO `myTable` (`name`,`birthday`) VALUES ('foo','2011-03-27 10:01:55.000 +00:00');" }, { arguments: ['myTable', { name: 'foo', value: true }], expectation: "INSERT INTO `myTable` (`name`,`value`) VALUES ('foo',1);" }, { arguments: ['myTable', { name: 'foo', value: false }], expectation: "INSERT INTO `myTable` (`name`,`value`) VALUES ('foo',0);" }, { arguments: ['myTable', {name: 'foo', foo: 1, nullValue: null}], expectation: "INSERT INTO `myTable` (`name`,`foo`,`nullValue`) VALUES ('foo',1,NULL);" }, { arguments: ['myTable', {name: 'foo', foo: 1, nullValue: null}], expectation: "INSERT INTO `myTable` (`name`,`foo`,`nullValue`) VALUES ('foo',1,NULL);", context: {options: {omitNull: false}} }, { arguments: ['myTable', {name: 'foo', foo: 1, nullValue: null}], expectation: "INSERT INTO `myTable` (`name`,`foo`) VALUES ('foo',1);", context: {options: {omitNull: true}} }, { arguments: ['myTable', {name: 'foo', foo: 1, nullValue: undefined}], expectation: "INSERT INTO `myTable` (`name`,`foo`) VALUES ('foo',1);", context: {options: {omitNull: true}} }, { arguments: ['myTable', function(sequelize) { return { foo: sequelize.fn('NOW') }; }], expectation: 'INSERT INTO `myTable` (`foo`) VALUES (NOW());', needsSequelize: true } ], bulkInsertQuery: [ { arguments: ['myTable', [{name: 'foo'}, {name: 'bar'}]], expectation: "INSERT INTO `myTable` (`name`) VALUES ('foo'),('bar');" }, { arguments: ['myTable', [{name: "'bar'"}, {name: 'foo'}]], expectation: "INSERT INTO `myTable` (`name`) VALUES ('''bar'''),('foo');" }, { arguments: ['myTable', [{name: 'foo', birthday: moment('2011-03-27 10:01:55 +0000', 'YYYY-MM-DD HH:mm:ss Z').toDate()}, {name: 'bar', birthday: moment('2012-03-27 10:01:55 +0000', 'YYYY-MM-DD HH:mm:ss Z').toDate()}]], expectation: "INSERT INTO `myTable` (`name`,`birthday`) VALUES ('foo','2011-03-27 10:01:55.000 +00:00'),('bar','2012-03-27 10:01:55.000 +00:00');" }, { arguments: ['myTable', [{name: 'bar', value: null}, {name: 'foo', value: 1}]], expectation: "INSERT INTO `myTable` (`name`,`value`) VALUES ('bar',NULL),('foo',1);" }, { arguments: ['myTable', [{name: 'bar', value: undefined}, {name: 'bar', value: 2}]], expectation: "INSERT INTO `myTable` (`name`,`value`) VALUES ('bar',NULL),('bar',2);" }, { arguments: ['myTable', [{name: 'foo', value: true}, {name: 'bar', value: false}]], expectation: "INSERT INTO `myTable` (`name`,`value`) VALUES ('foo',1),('bar',0);" }, { arguments: ['myTable', [{name: 'foo', value: false}, {name: 'bar', value: false}]], expectation: "INSERT INTO `myTable` (`name`,`value`) VALUES ('foo',0),('bar',0);" }, { arguments: ['myTable', [{name: 'foo', foo: 1, nullValue: null}, {name: 'bar', foo: 2, nullValue: null}]], expectation: "INSERT INTO `myTable` (`name`,`foo`,`nullValue`) VALUES ('foo',1,NULL),('bar',2,NULL);" }, { arguments: ['myTable', [{name: 'foo', foo: 1, nullValue: null}, {name: 'bar', foo: 2, nullValue: null}]], expectation: "INSERT INTO `myTable` (`name`,`foo`,`nullValue`) VALUES ('foo',1,NULL),('bar',2,NULL);", context: {options: {omitNull: false}} }, { arguments: ['myTable', [{name: 'foo', foo: 1, nullValue: null}, {name: 'bar', foo: 2, nullValue: null}]], expectation: "INSERT INTO `myTable` (`name`,`foo`,`nullValue`) VALUES ('foo',1,NULL),('bar',2,NULL);", context: {options: {omitNull: true}} // Note: We don't honour this because it makes little sense when some rows may have nulls and others not }, { arguments: ['myTable', [{name: 'foo', foo: 1, nullValue: null}, {name: 'bar', foo: 2, nullValue: null}]], expectation: "INSERT INTO `myTable` (`name`,`foo`,`nullValue`) VALUES ('foo',1,NULL),('bar',2,NULL);", context: {options: {omitNull: true}} // Note: As above }, { arguments: ['myTable', [{name: 'foo'}, {name: 'bar'}], {ignoreDuplicates: true}], expectation: "INSERT OR IGNORE INTO `myTable` (`name`) VALUES ('foo'),('bar');" } ], updateQuery: [ { arguments: ['myTable', {name: 'foo', birthday: moment('2011-03-27 10:01:55 +0000', 'YYYY-MM-DD HH:mm:ss Z').toDate()}, {id: 2}], expectation: "UPDATE `myTable` SET `name`='foo',`birthday`='2011-03-27 10:01:55.000 +00:00' WHERE `id` = 2" }, { arguments: ['myTable', {name: 'foo', birthday: moment('2011-03-27 10:01:55 +0000', 'YYYY-MM-DD HH:mm:ss Z').toDate()}, {id: 2}], expectation: "UPDATE `myTable` SET `name`='foo',`birthday`='2011-03-27 10:01:55.000 +00:00' WHERE `id` = 2" }, { arguments: ['myTable', { name: 'foo' }, { id: 2 }], expectation: "UPDATE `myTable` SET `name`='foo' WHERE `id` = 2" }, { arguments: ['myTable', { name: "'bar'" }, { id: 2 }], expectation: "UPDATE `myTable` SET `name`='''bar''' WHERE `id` = 2" }, { arguments: ['myTable', { name: 'bar', value: null }, { id: 2 }], expectation: "UPDATE `myTable` SET `name`='bar',`value`=NULL WHERE `id` = 2" }, { arguments: ['myTable', { name: 'bar', value: undefined }, { id: 2 }], expectation: "UPDATE `myTable` SET `name`='bar',`value`=NULL WHERE `id` = 2" }, { arguments: ['myTable', { flag: true }, { id: 2 }], expectation: 'UPDATE `myTable` SET `flag`=1 WHERE `id` = 2' }, { arguments: ['myTable', { flag: false }, { id: 2 }], expectation: 'UPDATE `myTable` SET `flag`=0 WHERE `id` = 2' }, { arguments: ['myTable', {bar: 2, nullValue: null}, {name: 'foo'}], expectation: "UPDATE `myTable` SET `bar`=2,`nullValue`=NULL WHERE `name` = 'foo'" }, { arguments: ['myTable', {bar: 2, nullValue: null}, {name: 'foo'}], expectation: "UPDATE `myTable` SET `bar`=2,`nullValue`=NULL WHERE `name` = 'foo'", context: {options: {omitNull: false}} }, { arguments: ['myTable', {bar: 2, nullValue: null}, {name: 'foo'}], expectation: "UPDATE `myTable` SET `bar`=2 WHERE `name` = 'foo'", context: {options: {omitNull: true}} }, { arguments: ['myTable', function(sequelize) { return { bar: sequelize.fn('NOW') }; }, {name: 'foo'}], expectation: "UPDATE `myTable` SET `bar`=NOW() WHERE `name` = 'foo'", needsSequelize: true }, { arguments: ['myTable', function(sequelize) { return { bar: sequelize.col('foo') }; }, {name: 'foo'}], expectation: "UPDATE `myTable` SET `bar`=`foo` WHERE `name` = 'foo'", needsSequelize: true } ], renameColumnQuery: [ { title: 'Properly quotes column names', arguments: ['myTable', 'foo', 'commit', {commit: 'VARCHAR(255)', bar: 'VARCHAR(255)'}], expectation: 'CREATE TEMPORARY TABLE IF NOT EXISTS `myTable_backup` (`commit` VARCHAR(255), `bar` VARCHAR(255));' + 'INSERT INTO `myTable_backup` SELECT `foo` AS `commit`, `bar` FROM `myTable`;' + 'DROP TABLE `myTable`;' + 'CREATE TABLE IF NOT EXISTS `myTable` (`commit` VARCHAR(255), `bar` VARCHAR(255));' + 'INSERT INTO `myTable` SELECT `commit`, `bar` FROM `myTable_backup`;' + 'DROP TABLE `myTable_backup`;' } ], removeColumnQuery: [ { title: 'Properly quotes column names', arguments: ['myTable', {commit: 'VARCHAR(255)', bar: 'VARCHAR(255)'}], expectation: 'CREATE TABLE IF NOT EXISTS `myTable_backup` (`commit` VARCHAR(255), `bar` VARCHAR(255));' + 'INSERT INTO `myTable_backup` SELECT `commit`, `bar` FROM `myTable`;' + 'DROP TABLE `myTable`;' + 'CREATE TABLE IF NOT EXISTS `myTable` (`commit` VARCHAR(255), `bar` VARCHAR(255));' + 'INSERT INTO `myTable` SELECT `commit`, `bar` FROM `myTable_backup`;' + 'DROP TABLE `myTable_backup`;' } ] }; _.each(suites, (tests, suiteTitle) => { describe(suiteTitle, () => { tests.forEach(test => { const title = test.title || 'SQLite correctly returns ' + test.expectation + ' for ' + JSON.stringify(test.arguments); it(title, function() { // Options would normally be set by the query interface that instantiates the query-generator, but here we specify it explicitly const context = test.context || {options: {}}; if (test.needsSequelize) { if (_.isFunction(test.arguments[1])) test.arguments[1] = test.arguments[1](this.sequelize); if (_.isFunction(test.arguments[2])) test.arguments[2] = test.arguments[2](this.sequelize); } QueryGenerator.options = _.assign(context.options, { timezone: '+00:00' }); QueryGenerator._dialect = this.sequelize.dialect; QueryGenerator.sequelize = this.sequelize; QueryGenerator.setOperatorsAliases(Operators.LegacyAliases); const conditions = QueryGenerator[suiteTitle].apply(QueryGenerator, test.arguments); expect(conditions).to.deep.equal(test.expectation); }); }); }); }); }); }
{ "content_hash": "580f40fa024c85071266a0ebc59013c8", "timestamp": "", "source": "github", "line_count": 562, "max_line_length": 227, "avg_line_length": 47.86832740213523, "alnum_prop": 0.5254999628280426, "repo_name": "yonjah/sequelize", "id": "5b71bc0e6b139a7472f08fd5b02af86ddce62290", "size": "26902", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "test/unit/dialects/sqlite/query-generator.test.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "2839865" }, { "name": "PowerShell", "bytes": "1468" } ], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_151) on Wed Dec 02 13:39:43 MST 2020 --> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>Uses of Interface org.wildfly.swarm.config.ManagementCoreServiceConsumer (BOM: * : All 2.7.1.Final-SNAPSHOT API)</title> <meta name="date" content="2020-12-02"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Interface org.wildfly.swarm.config.ManagementCoreServiceConsumer (BOM: * : All 2.7.1.Final-SNAPSHOT API)"; } } catch(err) { } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../org/wildfly/swarm/config/ManagementCoreServiceConsumer.html" title="interface in org.wildfly.swarm.config">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../../../../../overview-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> <div class="aboutLanguage">Thorntail API, 2.7.1.Final-SNAPSHOT</div> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/wildfly/swarm/config/class-use/ManagementCoreServiceConsumer.html" target="_top">Frames</a></li> <li><a href="ManagementCoreServiceConsumer.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Interface org.wildfly.swarm.config.ManagementCoreServiceConsumer" class="title">Uses of Interface<br>org.wildfly.swarm.config.ManagementCoreServiceConsumer</h2> </div> <div class="classUseContainer"> <ul class="blockList"> <li class="blockList"> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation"> <caption><span>Packages that use <a href="../../../../../org/wildfly/swarm/config/ManagementCoreServiceConsumer.html" title="interface in org.wildfly.swarm.config">ManagementCoreServiceConsumer</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Package</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="#org.wildfly.swarm.config">org.wildfly.swarm.config</a></td> <td class="colLast">&nbsp;</td> </tr> </tbody> </table> </li> <li class="blockList"> <ul class="blockList"> <li class="blockList"><a name="org.wildfly.swarm.config"> <!-- --> </a> <h3>Uses of <a href="../../../../../org/wildfly/swarm/config/ManagementCoreServiceConsumer.html" title="interface in org.wildfly.swarm.config">ManagementCoreServiceConsumer</a> in <a href="../../../../../org/wildfly/swarm/config/package-summary.html">org.wildfly.swarm.config</a></h3> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation"> <caption><span>Methods in <a href="../../../../../org/wildfly/swarm/config/package-summary.html">org.wildfly.swarm.config</a> that return <a href="../../../../../org/wildfly/swarm/config/ManagementCoreServiceConsumer.html" title="interface in org.wildfly.swarm.config">ManagementCoreServiceConsumer</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Method and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><code>default <a href="../../../../../org/wildfly/swarm/config/ManagementCoreServiceConsumer.html" title="interface in org.wildfly.swarm.config">ManagementCoreServiceConsumer</a>&lt;<a href="../../../../../org/wildfly/swarm/config/ManagementCoreServiceConsumer.html" title="type parameter in ManagementCoreServiceConsumer">T</a>&gt;</code></td> <td class="colLast"><span class="typeNameLabel">ManagementCoreServiceConsumer.</span><code><span class="memberNameLink"><a href="../../../../../org/wildfly/swarm/config/ManagementCoreServiceConsumer.html#andThen-org.wildfly.swarm.config.ManagementCoreServiceConsumer-">andThen</a></span>(<a href="../../../../../org/wildfly/swarm/config/ManagementCoreServiceConsumer.html" title="interface in org.wildfly.swarm.config">ManagementCoreServiceConsumer</a>&lt;<a href="../../../../../org/wildfly/swarm/config/ManagementCoreServiceConsumer.html" title="type parameter in ManagementCoreServiceConsumer">T</a>&gt;&nbsp;after)</code>&nbsp;</td> </tr> </tbody> </table> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation"> <caption><span>Methods in <a href="../../../../../org/wildfly/swarm/config/package-summary.html">org.wildfly.swarm.config</a> with parameters of type <a href="../../../../../org/wildfly/swarm/config/ManagementCoreServiceConsumer.html" title="interface in org.wildfly.swarm.config">ManagementCoreServiceConsumer</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Method and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><code>default <a href="../../../../../org/wildfly/swarm/config/ManagementCoreServiceConsumer.html" title="interface in org.wildfly.swarm.config">ManagementCoreServiceConsumer</a>&lt;<a href="../../../../../org/wildfly/swarm/config/ManagementCoreServiceConsumer.html" title="type parameter in ManagementCoreServiceConsumer">T</a>&gt;</code></td> <td class="colLast"><span class="typeNameLabel">ManagementCoreServiceConsumer.</span><code><span class="memberNameLink"><a href="../../../../../org/wildfly/swarm/config/ManagementCoreServiceConsumer.html#andThen-org.wildfly.swarm.config.ManagementCoreServiceConsumer-">andThen</a></span>(<a href="../../../../../org/wildfly/swarm/config/ManagementCoreServiceConsumer.html" title="interface in org.wildfly.swarm.config">ManagementCoreServiceConsumer</a>&lt;<a href="../../../../../org/wildfly/swarm/config/ManagementCoreServiceConsumer.html" title="type parameter in ManagementCoreServiceConsumer">T</a>&gt;&nbsp;after)</code>&nbsp;</td> </tr> </tbody> </table> </li> </ul> </li> </ul> </div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../org/wildfly/swarm/config/ManagementCoreServiceConsumer.html" title="interface in org.wildfly.swarm.config">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../../../../../overview-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> <div class="aboutLanguage">Thorntail API, 2.7.1.Final-SNAPSHOT</div> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/wildfly/swarm/config/class-use/ManagementCoreServiceConsumer.html" target="_top">Frames</a></li> <li><a href="ManagementCoreServiceConsumer.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &#169; 2020 <a href="http://www.jboss.org">JBoss by Red Hat</a>. All rights reserved.</small></p> </body> </html>
{ "content_hash": "07f2dfefc8b1a217bfe38001a24beac0", "timestamp": "", "source": "github", "line_count": 181, "max_line_length": 636, "avg_line_length": 52.193370165745854, "alnum_prop": 0.6743939875092622, "repo_name": "wildfly-swarm/wildfly-swarm-javadocs", "id": "2b6c26b31ab183b46720a3692b79d1bc0de019bd", "size": "9447", "binary": false, "copies": "1", "ref": "refs/heads/gh-pages", "path": "2.7.1.Final-SNAPSHOT/apidocs/org/wildfly/swarm/config/class-use/ManagementCoreServiceConsumer.html", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
using System; using System.Collections.Generic; using System.Linq; using Microsoft.WindowsAzure.Common.Internals; namespace Microsoft.WindowsAzure.Management.Compute.Models { /// <summary> /// Parameters supplied to the Update Hosted Service operation. /// </summary> public partial class HostedServiceUpdateParameters { private string _description; /// <summary> /// Optional. A description for the cloud service. The description may /// be up to 1024 characters in length. You must specify a value for /// at least one of Label or Description. /// </summary> public string Description { get { return this._description; } set { this._description = value; } } private IDictionary<string, string> _extendedProperties; /// <summary> /// Optional. Represents the name of an extended cloud service /// property. Each extended property must have a defined name and a /// value. You can have a maximum of 50 extended property name and /// value pairs. The maximum length of the name element is 64 /// characters, only alphanumeric characters and underscores are valid /// in the name, and it must start with a letter. Attempting to use /// other characters, starting with a non-letter character, or /// entering a name that is identical to that of another extended /// property owned by the same service will result in a status code /// 400 (Bad Request) error. Each extended property value has a /// maximum length of 255 characters. /// </summary> public IDictionary<string, string> ExtendedProperties { get { return this._extendedProperties; } set { this._extendedProperties = value; } } private string _label; /// <summary> /// Optional. A name for the cloud service. The name may be up to 100 /// characters in length. You must specify a value for at least one of /// Label or Description. It is recommended that the label be unique /// within the subscription. The name can be used identify the service /// for your tracking purposes. /// </summary> public string Label { get { return this._label; } set { this._label = value; } } private string _reverseDnsFqdn; /// <summary> /// Optional. Dns address to which the cloud service's IP address /// resolves when queried using a reverse Dns query. /// </summary> public string ReverseDnsFqdn { get { return this._reverseDnsFqdn; } set { this._reverseDnsFqdn = value; } } /// <summary> /// Initializes a new instance of the HostedServiceUpdateParameters /// class. /// </summary> public HostedServiceUpdateParameters() { this.ExtendedProperties = new LazyDictionary<string, string>(); } } }
{ "content_hash": "c6546d1ea56799d12d6fa64ccf6baa54", "timestamp": "", "source": "github", "line_count": 83, "max_line_length": 78, "avg_line_length": 38.06024096385542, "alnum_prop": 0.5998733776511554, "repo_name": "travismc1/azure-sdk-for-net", "id": "708faecc2ee3283935c59dfaa9ad40f4a87e5ec9", "size": "3947", "binary": false, "copies": "9", "ref": "refs/heads/master", "path": "src/ComputeManagement/Generated/Models/HostedServiceUpdateParameters.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "675" }, { "name": "C#", "bytes": "25858810" }, { "name": "Cucumber", "bytes": "89597" } ], "symlink_target": "" }
<?xml version="1.0"?> <project name="Djinni-test"> <target name="compile"> <mkdir dir="build"/> <mkdir dir="build/local"/> <!-- Veteran djinni users might not have cmake. Given them some advice. --> <property environment="env" /> <fail message="****${line.separator} Can't find cmake! Please install, using e.g.:${line.separator} $ brew install cmake ${line.separator} $ sudo port install cmake ${line.separator} $ apt-get install cmake ${line.separator} $ yum install cmake ${line.separator} ****${line.separator}"> <condition> <not><available file="cmake" filepath="${env.PATH}" /></not> </condition> </fail> <exec executable="cmake" failonerror="true" dir="build"> <!-- Verbose helps make debugging compiler issues easier --> <arg value="-DCMAKE_VERBOSE_MAKEFILE=ON"/> <!-- Make CMake configure Makefile install target to install locall (to ./build/local) to make the shared library easier to reference in java --> <arg value="-DCMAKE_INSTALL_PREFIX:PATH=${basedir}/build/local"/> <arg value=".."/> </exec> <exec executable="make" failonerror="true" dir="build"> <arg value="-j12"/> </exec> <exec executable="make" failonerror="true" dir="build"> <arg value="install"/> </exec> <mkdir dir="build/classes"/> <javac destdir="build/classes" includeantruntime="false" encoding="UTF-8" debug="on" target="1.7" source="1.7"> <classpath> <fileset dir="../../deps/java/"><include name="*.jar"/></fileset> <fileset dir="../../deps/java/test"><include name="*.jar"/></fileset> </classpath> <src path="../../support-lib/java/"/> <src path="../generated-src"/> <src path="../handwritten-src"/> </javac> </target> <target name="test"> <java classname="com.dropbox.djinni.test.AllTests" fork="true" failonerror="true"> <classpath> <fileset dir="../../deps/java/"><include name="*.jar"/></fileset> <fileset dir="../../deps/java/test"><include name="*.jar"/></fileset> <pathelement path="${basedir}/build/classes"/> </classpath> <jvmarg value="-Xcheck:jni"/> <sysproperty key="djinni.native_libs_dirs" value="${basedir}/build/local/lib"/> </java> </target> <target name="jar"> <jar destfile="build/jar/DjinniTestSuite.jar" basedir="build/classes"> <manifest> <attribute name="Main-Class" value="com.dropbox.djinni.test.AllTests"/> </manifest> <zipfileset dir="${basedir}/build/local/lib" prefix="resources/djinni_native_libs"> <include name="lib*"/> </zipfileset> <zipgroupfileset dir="../../deps/java" includes="*.jar" /> <zipgroupfileset dir="../../deps/java/test" includes="*.jar" /> </jar> </target> <target name="run-jar"> <java jar="build/jar/DjinniTestSuite.jar" fork="true"/> </target> <target name="clean"> <delete dir="build"/> </target> </project>
{ "content_hash": "967859513efaa97f7152fde8ebcb547c", "timestamp": "", "source": "github", "line_count": 78, "max_line_length": 115, "avg_line_length": 39.08974358974359, "alnum_prop": 0.597244998360118, "repo_name": "jrogers/djinni", "id": "eea2ce5e458108b1af08eeec3475e8c6b656cbec", "size": "3049", "binary": false, "copies": "9", "ref": "refs/heads/master", "path": "test-suite/java/build.xml", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C++", "bytes": "276014" }, { "name": "CMake", "bytes": "2811" }, { "name": "Java", "bytes": "234466" }, { "name": "Lex", "bytes": "4569" }, { "name": "Makefile", "bytes": "3164" }, { "name": "Objective-C", "bytes": "80626" }, { "name": "Objective-C++", "bytes": "106067" }, { "name": "Python", "bytes": "4336" }, { "name": "Scala", "bytes": "188140" }, { "name": "Shell", "bytes": "11211" } ], "symlink_target": "" }
class Api::V1::ProposalsController < ApplicationController def index render json: Proposal.where(law_id: params[:law_id]).map{|law| law.scoped_with(current_player)} end def create proposal = Proposal.create(proposal_params.merge({player_id: current_player.id})) Point.points_for_proposal(current_player) render json: proposal end def update proposal = Proposal.find(params[:id]) proposal = proposal.scoped_with(current_player).update_vote(post_params) render json: proposal.scoped_with(current_player) end private def post_params params.require(:proposal).permit(:favor) end def proposal_params params.require(:proposal).permit(:description, :player_id, :law_id) end end
{ "content_hash": "a2ce2da3e8a975fe6fc0592f0afe3b05", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 99, "avg_line_length": 27.333333333333332, "alnum_prop": 0.7181571815718157, "repo_name": "plraphael/parlamento_virtual", "id": "0f2651bb3eed554a8e742ee0be901296836c9c7e", "size": "738", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/controllers/api/v1/proposals_controller.rb", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "39393" }, { "name": "CoffeeScript", "bytes": "13707" }, { "name": "JavaScript", "bytes": "19262" }, { "name": "Perl", "bytes": "65" }, { "name": "Ruby", "bytes": "329457" }, { "name": "Shell", "bytes": "4652" } ], "symlink_target": "" }
package com.horn.common.cdi.sampleclass; /** * @author lesinsa on 23.03.14. */ public class A { }
{ "content_hash": "1e5b8aeb2750d99f96fc5bfaee8176ab", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 40, "avg_line_length": 14.428571428571429, "alnum_prop": 0.6633663366336634, "repo_name": "lesinsa/horn-soft-pub", "id": "e799e9f008093ddf808759be97ef44865ffe0843", "size": "101", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "commons/common-jee/src/test/java/com/horn/common/cdi/sampleclass/A.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "658639" } ], "symlink_target": "" }
{-+ This is a small utility to strip blank lines and comments from Haskell files. Haskell modules are read from files named on the command line. The result is output on stdout. Haskell files whose names end with ".lhs" are assumed to be in literate style. -} import System(getArgs) import Unlit(readHaskellFile) import StripComments(stripcomments) main = mapM_ stripFile =<< getArgs stripFile path = putStrLn . stripcomments =<< readHaskellFile path
{ "content_hash": "faeb3f5a729bc9d83aac2692ab2fc350", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 78, "avg_line_length": 26.823529411764707, "alnum_prop": 0.7763157894736842, "repo_name": "mpickering/HaRe", "id": "8d48fe23dea4c54bdae428d68b18ea6fa9e218e9", "size": "456", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "old/tools/hsutils/stripcomments.hs", "mode": "33261", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "7740" }, { "name": "Emacs Lisp", "bytes": "118236" }, { "name": "Groff", "bytes": "107" }, { "name": "HTML", "bytes": "231247" }, { "name": "Haskell", "bytes": "6457762" }, { "name": "Isabelle", "bytes": "5201" }, { "name": "LLVM", "bytes": "385" }, { "name": "Makefile", "bytes": "17891" }, { "name": "Objective-C++", "bytes": "382" }, { "name": "Ruby", "bytes": "4178" }, { "name": "Shell", "bytes": "66440" }, { "name": "TeX", "bytes": "329808" }, { "name": "VimL", "bytes": "35502" }, { "name": "Yacc", "bytes": "120163" } ], "symlink_target": "" }
require 'rdoc/context' ## # A TopLevel context is a representation of the contents of a single file class RDoc::TopLevel < RDoc::Context ## # This TopLevel's File::Stat struct attr_accessor :file_stat ## # Relative name of this file attr_accessor :relative_name ## # Absolute name of this file attr_accessor :absolute_name attr_accessor :diagram ## # The parser that processed this file attr_accessor :parser ## # Returns all classes and modules discovered by RDoc def self.all_classes_and_modules classes_hash.values + modules_hash.values end ## # Returns all classes discovered by RDoc def self.classes classes_hash.values end ## # Hash of all classes known to RDoc def self.classes_hash @all_classes end ## # All TopLevels known to RDoc def self.files @all_files.values end ## # Hash of all files known to RDoc def self.files_hash @all_files end ## # Finds the class with +name+ in all discovered classes def self.find_class_named(name) classes_hash[name] end ## # Finds the class with +name+ starting in namespace +from+ def self.find_class_named_from name, from from = find_class_named from unless RDoc::Context === from until RDoc::TopLevel === from do return nil unless from klass = from.find_class_named name return klass if klass from = from.parent end find_class_named name end ## # Finds the class or module with +name+ def self.find_class_or_module(name) name =~ /^::/ name = $' || name RDoc::TopLevel.classes_hash[name] || RDoc::TopLevel.modules_hash[name] end ## # Finds the file with +name+ in all discovered files def self.find_file_named(name) @all_files[name] end ## # Finds the module with +name+ in all discovered modules def self.find_module_named(name) modules_hash[name] end ## # Returns all modules discovered by RDoc def self.modules modules_hash.values end ## # Hash of all modules known to RDoc def self.modules_hash @all_modules end ## # Empties RDoc of stored class, module and file information def self.reset @all_classes = {} @all_modules = {} @all_files = {} end reset ## # Creates a new TopLevel for +file_name+ def initialize(file_name) super() @name = nil @relative_name = file_name @absolute_name = file_name @file_stat = File.stat(file_name) rescue nil # HACK for testing @diagram = nil @parser = nil RDoc::TopLevel.files_hash[file_name] = self end ## # Adds +method+ to Object instead of RDoc::TopLevel def add_method(method) object = self.class.find_class_named 'Object' object = add_class RDoc::NormalClass, 'Object' unless object object.add_method method end ## # Base name of this file def base_name File.basename @absolute_name end ## # See RDoc::TopLevel.find_class_or_module def find_class_or_module name RDoc::TopLevel.find_class_or_module name end ## # Finds a class or module named +symbol+ def find_local_symbol(symbol) find_class_or_module(symbol) || super end ## # Finds a module or class with +name+ def find_module_named(name) find_class_or_module(name) || find_enclosing_module_named(name) end ## # The name of this file def full_name @relative_name end ## # URL for this with a +prefix+ def http_url(prefix) path = [prefix, @relative_name.tr('.', '_')] File.join(*path.compact) + '.html' end def inspect # :nodoc: "#<%s:0x%x %p modules: %p classes: %p>" % [ self.class, object_id, base_name, @modules.map { |n,m| m }, @classes.map { |n,c| c } ] end ## # Date this file was last modified, if known def last_modified @file_stat ? file_stat.mtime.to_s : 'Unknown' end ## # Base name of this file alias name base_name ## # Path to this file def path http_url RDoc::RDoc.current.generator.file_dir end def pretty_print q # :nodoc: q.group 2, "[#{self.class}: ", "]" do q.text "base name: #{base_name.inspect}" q.breakable items = @modules.map { |n,m| m } items.push(*@modules.map { |n,c| c }) q.seplist items do |mod| q.pp mod end end end end
{ "content_hash": "b04cb51a712ad68b898f267b1455f870", "timestamp": "", "source": "github", "line_count": 248, "max_line_length": 74, "avg_line_length": 17.600806451612904, "alnum_prop": 0.6290950744558992, "repo_name": "takano32/rubinius", "id": "306790fc159de0c029eac2b96c5dcfb73c1cebdd", "size": "4365", "binary": false, "copies": "15", "ref": "refs/heads/master", "path": "preinstalled-gems/data/gems/rdoc-2.5.1/lib/rdoc/top_level.rb", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }