content
stringlengths
10
4.9M
/////////////////////////////////////////////////////////////////////////// // MemoryPool // /////////////////////////////////////////////////////////////////////////// // Pool is an encapsulation of a memory pool. It is a custom written // memory manager for Iridescent instead of relying on the compiler // supplied malloc/free (CRT) and new/delete (C++RT) to take advantages // of certain memory usage footprints commonly found in game engines. The // interface to this memory pool are the allocate/deallocate methods. // // TODO: Pool is a singleton. // // Externally, the pool can be viewed as a BIG area of free memory. // Whenever a client requests for some memory, the pool returns a chunk // of memory of size at least as big as the requested one. // // Internally, the pool consists of multiple blocks of memory, each of // which are made up of fixed sized chunks. For more details about the // Block and Chunk objects, take a look at Block.hpp and Chunk.hpp. // // ISSUES: // All allocations and deallocations must occur after the call to create() // and before the call the destroy() (unless you're looking for UB) /////////////////////////////////////////////////////////////////////////// #ifndef MEMORY_POOL_HPP #define MEMORY_POOL_HPP #include <map> #include "System/Types.hpp" #include "System/Log.hpp" #include "Chunk.hpp" #include "Block.hpp" namespace Iridescent { namespace System { namespace Memory { /////////////////////////////////////////////////////////////////////////// // Pool class implements a memory pool in terms of memory blocks. /////////////////////////////////////////////////////////////////////////// class Pool { public: static Pool& get(); public: /////////////////////////////////////////////////////////////////////////// // Default constructor /////////////////////////////////////////////////////////////////////////// Pool(); /////////////////////////////////////////////////////////////////////////// // Destructor /////////////////////////////////////////////////////////////////////////// ~Pool(); /////////////////////////////////////////////////////////////////////////// // Create the memory pool /////////////////////////////////////////////////////////////////////////// bool create(); /////////////////////////////////////////////////////////////////////////// // Destroy the memory pool /////////////////////////////////////////////////////////////////////////// // All memory blocks (and hence chunks) are released back to the OS. /////////////////////////////////////////////////////////////////////////// void destroy(); /////////////////////////////////////////////////////////////////////////// // Main allocation/deallocation API /////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////// // Returns a pointer to a memory buffer at least 'size' bytes wide /////////////////////////////////////////////////////////////////////////// Types::MemAddress allocate( const Types::UInt64 size ) { Types::MemAddress address = nullptr; Types::Int16 block = 4; // Pick the best possible block to use if ( size <= 4 ) { // Use block-4 address = m_block4B.requestChunk(); block = 4; } else if ( size > 4 && size <= 8 ) { // Use block-8 address = m_block8B.requestChunk(); block = 8; } else if ( size > 8 && size <= 16 ) { // Use block-16 address = m_block16B.requestChunk(); block = 16; } else if ( size > 16 && size <= 32 ) { // Use block-32 address = m_block32B.requestChunk(); block = 32; } else if ( size > 32 && size <= 64 ) { // Use block-64 address = m_block64B.requestChunk(); block = 64; } else if ( size > 64 && size <= 128 ) { // Use block-128 address = m_block128B.requestChunk(); block = 128; } else if ( size > 128 && size <= 256 ) { // Use block-256 address = m_block256B.requestChunk(); block = 256; } else if ( size > 256 && size <= 512 ) { // Use block-512 address = m_block512B.requestChunk(); block = 512; } else if ( size > 512 && size <= 1024 ) { // Use block-1024 address = m_block1024B.requestChunk(); block = 1024; } else { LOG(Log::Level::Error) << "Unable to find suitable memory block to allocate \'" << size << "\' bytes." << std::endl; //std::cout << "Unable to find suitable memory block to allocate \'" << size << "\' bytes." << std::endl; } if ( address != nullptr ) { // Map the chunk under use to it's correct block m_activeChunkMap.insert( { address, block } ); LOG(Log::Level::Debug) << "New chunk assigned by pool: " << address << ", Block: " << block << std::endl; //std::cout << "New chunk assigned by pool: " << address << ", Block: " << block << std::endl; } return address; } /////////////////////////////////////////////////////////////////////////// // Deallocates the assigned memory buffer and adds it back to the memory // pool. /////////////////////////////////////////////////////////////////////////// void deallocate( Types::MemAddress chunk ); /////////////////////////////////////////////////////////////////////////// // Get a pointer to a specified block. /////////////////////////////////////////////////////////////////////////// template <Types::Int16 S> Block<S>* getBlock() { // switch ( S ) // { // case 4: return &m_block4B; // // case 8: return &m_block8B; // // case 16: return &m_block16B; // // case 32: return &m_block32B; // // case 64: return &m_block64B; // // case 128: return &m_block128B; // // case 256: return &m_block256B; // // case 512: return &m_block512B; // // case 1024: return &m_block1024B; // default: return nullptr; // } return &m_block1024B; } private: // Memory blocks that build up the pool Block4B m_block4B; Block8B m_block8B; Block16B m_block16B; Block32B m_block32B; Block64B m_block64B; Block128B m_block128B; Block256B m_block256B; Block512B m_block512B; Block1024B m_block1024B; // The default no. of chunks per block const std::map<Types::Int16, Types::Int64> m_chunksPerBlock = { { 4, 100000 }, { 8, 100000 }, { 16, 100000 }, { 32, 100000 }, { 64, 100000 }, { 128, 10000 }, { 256, 10000 }, { 512, 1000 }, { 1024, 100 } }; //Types::Map<Types::Int16, Types::Int64> m_chunksPerBlock; // Currently assigned chunks (used for returning a deallocated chunk to it's currect block) std::map<Types::MemAddress, Types::Int16> m_activeChunkMap; //Types::Map<Types::MemAddress, Types::Int16> m_activeChunkMap; }; //static Pool MemoryPool; static Block4B testBlock; } // End of namespace Memory } // End of namespace System } // End of namespace Iridescent #endif // MEMORY_POOL_HPP
<filename>src/exporter.rs struct Exporter { dummy: bool, } impl Exporter { fn new() -> Exporter { Exporter { dummy: false, } } } // vim: et tw=78 sw=4:
def descr_fromstring(self, space, w_s): if self is w_s: raise oefmt(space.w_ValueError, "array.fromstring(x): x cannot be self") s = space.getarg_w('s#', w_s) msg = "fromstring() is deprecated. Use frombytes() instead." space.warn(space.newtext(msg), self.space.w_DeprecationWarning) self._frombytes(space, s)
export const CARS = [ { id: 1, brand: 'BMW', color: 'Gold', model: 'BMW X5', }, ];
<filename>usr/libexec/budd/BYDaemonProximityTargetProtocol-Protocol.h // // Generated by classdumpios 1.0.1 (64 bit) (iOS port by DreamDevLost)(Debug version compiled Sep 26 2020 13:48:20). // // Copyright (C) 1997-2019 <NAME>. // #import "NSObject-Protocol.h" @class SASProximityHandshake, SASProximityInformation; @protocol BYDaemonProximityTargetProtocol <NSObject> - (void)suspendConnectionForSoftwareUpdate:(void (^)(void))arg1; - (void)showMigrationInterfaceOnSource; - (void)endDeviceToDeviceMigration; - (void)fileTransferSessionTemplate:(void (^)(RPFileTransferSession *))arg1; - (void)storeHandshake:(SASProximityHandshake *)arg1; - (void)storeInformation:(SASProximityInformation *)arg1; - (void)resumeProximitySetup:(void (^)(SASProximityHandshake *, SASProximityInformation *, CUMessageSession *, NSString *, NSString *, _Bool))arg1; - (void)hasConnection:(void (^)(_Bool))arg1; - (void)endPairing; - (void)endAdvertisingProximitySetup; - (void)beginAdvertisingProximitySetup; @end
<gh_stars>0 package ru.nkotkin; import org.junit.Test; import static org.junit.Assert.assertThat; import static org.hamcrest.core.Is.is; import static org.hamcrest.number.IsCloseTo.closeTo; /** * Tests for Triangle.java. */ public class TriangleTest { /** * Delta for CloseTo. */ public static final double DELTA = 0.0001; /** * Just a number. */ public static final double THREE_DOUBLE = 3; /** * Just a number. */ public static final double ONE_POINT_FIVE_DOUBLE = 1.5; /** * whenAddPointsThenReturnArea. * @throws Exception - any */ @Test public final void whenAddPointsThenReturnArea() throws Exception { // Assign Point a = new Point(0d, 0d); Point b = new Point(1d, 1d); Point c = new Point(THREE_DOUBLE, 0d); Triangle triangle = new Triangle(a, b, c); double checked = ONE_POINT_FIVE_DOUBLE; // Act double result = triangle.area(); // Assert assertThat(result, is(closeTo(checked, DELTA))); } /** * whenWrongLinesThenReturnException. * @throws Exception - any */ @Test(expected = ArithmeticException.class) public final void whenWrongLinesThenReturnException() throws Exception { Point a = new Point(0d, 0d); Point b = new Point(0d, 0d); Point c = new Point(0d, 0d); Triangle triangle = new Triangle(a, b, c); triangle.area(); } }
GOP lawmakers face criticism for opposing Sept. 11 responders bill The measure would provide medical care to rescue workers and survivors of the terrorist attacks at the World Trade Center. Republicans recently blocked a Senate vote, but another may be called this week. Further eroding the GOP's political position has been support for the legislation from prominent Republican leaders, including Rudolph W. Giuliani, who was New York's mayor at the time of the attacks, and former Arkansas Gov. Mike Huckabee. He launched the nonprofit FealGood Foundation to lobby on behalf of first responders. "I can tell you, whoever votes against 9/11 responders a couple of days before Christmas is truly un-American," said John Feal, a former demolition supervisor who lost a foot when a steel beam fell on it during recovery efforts at the World Trade Center. After the Sept. 11 attacks, the GOP fashioned itself as the party that celebrated the heroism of the Sept. 11 workers, but now is seen by many as stalling the healthcare of last resort. As advocates press for Senate approval, Republican resistance to the measure has grown increasingly untenable. Reporting from Washington and New York — Congressional Republicans are coming under growing criticism for their opposition to a bill that would provide medical care for Sept. 11 attack responders and survivors, including ailing police officers and firefighters. New York's senators now believe they have the support to overcome a Republican filibuster in a vote that may be held as early as Tuesday. "We are on the verge of an eleventh-hour breakthrough," said Sen. Charles E. Schumer (D-N.Y.). The GOP largely opposes funding what many have characterized as a new entitlement program at a time of soaring deficits. Sen. Jon Kyl (R-Ariz.), the No. 2 party leader, said Sunday that the Senate should focus its remaining days on the annual year-end spending, with funding for the government set to expire Tuesday. Sen. John McCain (R-Ariz.) said last week the Senate should concentrate on an arms treaty with Russia. Passage of the healthcare measure even now may come too late for many. The Rev. Stephan Petrovic of Ohio, a chaplain who tended to the dead and dying at the World Trade Center and who now is in hospice care, does not expect to see another Christmas. Petrovic, his voice barely audible, suffered lung damage that he said resulted from breathing dust at the site following the attack. But for others like him, the chaplain said passage could help meet the high costs of paying for medical coverage on their own. Many responders and volunteers no longer have insurance as they became disabled from work or have bounced around the workers' compensation system. "What insurance?" said Petrovic, 59. "Most of us lost our jobs; we couldn't work anywhere. We're sick people." Mayor Michael R. Bloomberg called Monday for passage, saying that caring for the first responders "is nothing less than a national duty." "The time for excuses is over," Bloomberg said. "It's time to end the debate and let the bill be voted on." In response to the GOP complaints, lawmakers have scaled back the cost of the measure, from $7.4 billion to $6.2 billion. The larger bill was blocked two weeks ago in a party-line vote in the Senate. The House passed a similar bill in September after weeks of GOP-led opposition. Advocates say one of the biggest boosts to the legislative effort arrived last week when comedian Jon Stewart, host of Comedy Central's "The Daily Show," broadcast the stories of World Trade Center workers and survivors and pointedly criticized the GOP's obstruction.
// Copyright (c) 2016 Graphcore Ltd. All rights reserved. // Simple test case for IPU nonLinearity // #define BOOST_TEST_MODULE NonLinearityTest #include "../popnn/NonLinearityInternal.hpp" #include <boost/test/unit_test.hpp> #include <iostream> #include <limits> #include <poplar/Engine.hpp> #include <poplibs_support/TestDevice.hpp> #include <poplibs_test/NonLinearity.hpp> #include <poplibs_test/Util.hpp> #include <poplin/codelets.hpp> #include <popnn/NonLinearity.hpp> #include <popnn/NonLinearityDefUtil.hpp> #include <popnn/codelets.hpp> #include <popops/EncodingConstants.hpp> #include <popops/codelets.hpp> #include <poputil/TileMapping.hpp> using namespace poplar; using namespace poplar::program; using namespace poputil; using namespace popnn; using namespace poplibs_test; using namespace poplibs_test::util; using namespace poplibs_support; namespace utf = boost::unit_test; namespace fpc = boost::test_tools::fpc; #define TOL 0.1 // tolerance of 0.1% #define FLOAT_ATOL 1e-20 #define HALF_ATOL 1e-7 BOOST_AUTO_TEST_CASE( NonLinearity, *utf::tolerance<float>(fpc::percent_tolerance<float>(TOL)) * utf::tolerance<double>(fpc::percent_tolerance<double>(TOL))) { auto device = createTestDevice(TEST_TARGET); auto &target = device.getTarget(); Graph graph(target); popnn::addCodelets(graph); popops::addCodelets(graph); // layer parameters const unsigned zNGroups = 1; const std::size_t zChunk = 1; const std::size_t ySize = 100; const std::size_t xSize = 30; auto actF = graph.addVariable(FLOAT, {1, zNGroups, ySize, xSize, zChunk}, "actF"); auto actH = graph.addVariable(HALF, {1, zNGroups, ySize, xSize, zChunk}, "actH"); auto deltaF = graph.addVariable(FLOAT, {1, zNGroups, ySize, xSize, zChunk}, "actF"); auto deltaH = graph.addVariable(HALF, {1, zNGroups, ySize, xSize, zChunk}, "actH"); // arbitraray mappings mapTensorLinearly(graph, actF); mapTensorLinearly(graph, actH); mapTensorLinearly(graph, deltaF); mapTensorLinearly(graph, deltaH); graph.createHostWrite("inF", actF); graph.createHostWrite("inH", actH); graph.createHostRead("outF", actF); graph.createHostRead("outH", actH); graph.createHostWrite("inDeltaF", deltaF); graph.createHostWrite("inDeltaH", deltaH); graph.createHostRead("outDeltaF", deltaF); graph.createHostRead("outDeltaH", deltaH); const auto batchSize = 1; // test inputs calculated in harness boost::multi_array<double, 4> hActIn( boost::extents[batchSize][ySize][xSize][zChunk]); boost::multi_array<double, 4> hDeltaIn( boost::extents[batchSize][ySize][xSize][zChunk]); // outputs calculated by target code std::size_t actOutFSize = 0; std::size_t actOutHSize = 0; std::size_t actInFSize = 0; std::size_t actInHSize = 0; auto rawHActOutF = allocateHostMemoryForTensor(target, actF, 1, actOutFSize); auto rawHActOutH = allocateHostMemoryForTensor(target, actH, 1, actOutHSize); auto rawHActInF = allocateHostMemoryForTensor(target, actF, 1, actInFSize); auto rawHActInH = allocateHostMemoryForTensor(target, actH, 1, actInHSize); std::size_t dOutFSize = 0; std::size_t dOutHSize = 0; std::size_t dInFSize = 0; std::size_t dInHSize = 0; auto rawHDeltaOutF = allocateHostMemoryForTensor(target, deltaF, 1, dOutFSize); auto rawHDeltaOutH = allocateHostMemoryForTensor(target, deltaH, 1, dOutHSize); auto rawHDeltaInF = allocateHostMemoryForTensor(target, deltaF, 1, dInFSize); auto rawHDeltaInH = allocateHostMemoryForTensor(target, deltaH, 1, dInHSize); boost::multi_array<double, 4> hActOutF( boost::extents[batchSize][ySize][xSize][zChunk]); boost::multi_array<double, 4> hActOutH( boost::extents[batchSize][ySize][xSize][zChunk]); boost::multi_array<double, 4> hDeltaOutF( boost::extents[batchSize][ySize][xSize][zChunk]); boost::multi_array<double, 4> hDeltaOutH( boost::extents[batchSize][ySize][xSize][zChunk]); // reference results calculated in harness boost::multi_array<double, 4> hRefActOut( boost::extents[batchSize][ySize][xSize][zChunk]); boost::multi_array<double, 4> hRefDeltaOut( boost::extents[batchSize][ySize][xSize][zChunk]); // initialse hInF[][] to arbitrary values float val = -100.0; for (unsigned b = 0; b < batchSize; ++b) { for (unsigned y = 0; y < ySize; ++y) { for (unsigned x = 0; x < xSize; ++x) { for (unsigned chan = 0; chan < zChunk; chan++) { hRefDeltaOut[b][y][x][chan] = hDeltaIn[b][y][x][chan] = val / 200; hActIn[b][y][x][chan] = val + 1000 * chan; } val += 7.01; if (val > 200) val -= 400; } } } for (auto n : { NonLinearityType::RELU, NonLinearityType::SIGMOID, NonLinearityType::TANH, NonLinearityType::GELU, }) { // Check backward gradient calculations std::cerr << "Check nl type " << n << "\n"; // Check forward activation calculation hRefActOut = hActIn; poplibs_test::nonLinearity(n, hRefActOut); // build and run the target code auto fwdProg = Sequence(); nonLinearityInPlace(graph, n, actF, fwdProg); nonLinearityInPlace(graph, n, actH, fwdProg); ; Engine fwdEng(graph, fwdProg); device.bind([&](const Device &d) { fwdEng.load(d); copy(target, hActIn, FLOAT, rawHActInF.get()); fwdEng.writeTensor("inF", rawHActInF.get(), rawHActInF.get() + actInFSize); copy(target, hActIn, HALF, rawHActInH.get()); fwdEng.writeTensor("inH", rawHActInH.get(), rawHActInH.get() + actInHSize); fwdEng.run(); fwdEng.readTensor("outF", rawHActOutF.get(), rawHActOutF.get() + actOutFSize); fwdEng.readTensor("outH", rawHActOutH.get(), rawHActOutH.get() + actOutHSize); }); copy(target, HALF, rawHActOutH.get(), hActOutH); copy(target, FLOAT, rawHActOutF.get(), hActOutF); BOOST_TEST(checkIsClose("outF", hActOutF, hRefActOut, TOL, FLOAT_ATOL)); BOOST_TEST(checkIsClose("outH", hActOutH, hRefActOut, TOL, HALF_ATOL)); hRefDeltaOut = hDeltaIn; poplibs_test::bwdNonLinearity(n, hActIn, hRefDeltaOut); // build and run the target code auto bwdProg = Sequence(); auto deltaFF = nonLinearityInputGradient(graph, n, actF, deltaF, bwdProg); bwdProg.add(Copy(deltaFF, deltaF)); auto deltaHH = nonLinearityInputGradient(graph, n, actH, deltaH, bwdProg); bwdProg.add(Copy(deltaHH, deltaH)); Engine bwdEng(graph, bwdProg); device.bind([&](const Device &d) { bwdEng.load(d); copy(target, hActIn, FLOAT, rawHActInF.get()); bwdEng.writeTensor("inF", rawHActInF.get(), rawHActInF.get() + actInFSize); copy(target, hActIn, HALF, rawHActInH.get()); bwdEng.writeTensor("inH", rawHActInH.get(), rawHActInH.get() + actInHSize); copy(target, hDeltaIn, FLOAT, rawHDeltaInF.get()); bwdEng.writeTensor("inDeltaF", rawHDeltaInF.get(), rawHDeltaInF.get() + dInFSize); copy(target, hDeltaIn, HALF, rawHDeltaInH.get()); bwdEng.writeTensor("inDeltaH", rawHDeltaInH.get(), rawHDeltaInH.get() + dInHSize); bwdEng.run(); bwdEng.readTensor("outDeltaF", rawHDeltaOutF.get(), rawHDeltaOutF.get() + dOutFSize); bwdEng.readTensor("outDeltaH", rawHDeltaOutH.get(), rawHDeltaOutH.get() + dOutHSize); }); copy(target, HALF, rawHDeltaOutH.get(), hDeltaOutH); copy(target, FLOAT, rawHDeltaOutF.get(), hDeltaOutF); BOOST_TEST( checkIsClose("deltaOutF", hDeltaOutF, hRefDeltaOut, TOL, FLOAT_ATOL)); BOOST_TEST( checkIsClose("deltaOutH", hDeltaOutH, hRefDeltaOut, TOL, HALF_ATOL)); } } BOOST_AUTO_TEST_CASE( NonLinearitySoftMax, *utf::tolerance<float>(fpc::percent_tolerance<float>(0.1)) * utf::tolerance<double>(fpc::percent_tolerance<double>(0.1))) { auto device = createTestDevice(TEST_TARGET); auto &target = device.getTarget(); Graph graph(target); popnn::addCodelets(graph); popops::addCodelets(graph); poplin::addCodelets(graph); // support only 2D const unsigned batchSize = 2; const unsigned numChannels = 128; auto actF = graph.addVariable(FLOAT, {batchSize, numChannels}, "actF"); auto actH = graph.addVariable(HALF, {batchSize, numChannels}, "actH"); auto deltaF = graph.addVariable(FLOAT, {batchSize, numChannels}, "deltaF"); auto deltaH = graph.addVariable(HALF, {batchSize, numChannels}, "deltaH"); // arbitrary mappings mapTensorLinearly(graph, actF); mapTensorLinearly(graph, actH); mapTensorLinearly(graph, deltaF); mapTensorLinearly(graph, deltaH); std::vector<std::pair<std::string, char *>> tmap; Sequence uploadProg, downloadProg; auto rawHActF = allocateHostMemoryForTensor(actF, "actF", graph, uploadProg, downloadProg, tmap); auto rawHActH = allocateHostMemoryForTensor(actH, "actH", graph, uploadProg, downloadProg, tmap); auto rawHDeltaF = allocateHostMemoryForTensor(deltaF, "deltaF", graph, uploadProg, downloadProg, tmap); auto rawHDeltaH = allocateHostMemoryForTensor(deltaH, "deltaH", graph, uploadProg, downloadProg, tmap); boost::multi_array<double, 2> hActIn(boost::extents[batchSize][numChannels]), hDeltaIn(boost::extents[batchSize][numChannels]), hActOutF(boost::extents[batchSize][numChannels]), hActOutH(boost::extents[batchSize][numChannels]), hDeltaOutF(boost::extents[batchSize][numChannels]), hDeltaOutH(boost::extents[batchSize][numChannels]); for (unsigned b = 0; b < batchSize; ++b) { for (unsigned c = 0; c < numChannels; ++c) { double sample = (1.0 - 2 * (c & 1)) * (1 + b) * 0.01 * c; hActIn[b][c] = sample; hDeltaIn[b][c] = double(b * numChannels) - double(c * batchSize); } } for (const auto nl : {NonLinearityType::SOFTMAX, NonLinearityType::SOFTMAX_STABLE, NonLinearityType::SOFTMAX_SCALED}) { auto hActOut = hActIn; poplibs_test::nonLinearity(nl, hActOut); if (nl == NonLinearityType::SOFTMAX_SCALED) { for (unsigned i = 0; i < batchSize; i++) { for (unsigned j = 0; j < numChannels; j++) { hActOut[i][j] *= SOFTMAX_SCALING; } } } // build and run the target code auto fwdProg = Sequence(); float nonLinearityScalingF, nonLinearityScalingH; nonLinearityInPlace(graph, nl, actF, nonLinearityScalingF, fwdProg); nonLinearityInPlace(graph, nl, actH, nonLinearityScalingH, fwdProg); const float expectedScaling = nl == NonLinearityType::SOFTMAX_SCALED ? SOFTMAX_SCALING : 1.0f; BOOST_TEST(nonLinearityScalingF == expectedScaling); BOOST_TEST(nonLinearityScalingH == expectedScaling); copy(target, hActIn, FLOAT, rawHActF.get()); copy(target, hActIn, HALF, rawHActH.get()); Engine fwdEng(graph, Sequence(uploadProg, fwdProg, downloadProg)); attachStreams(fwdEng, tmap); device.bind([&](const Device &d) { fwdEng.loadAndRun(d); }); copy(target, FLOAT, rawHActF.get(), hActOutF); copy(target, HALF, rawHActH.get(), hActOutH); BOOST_TEST(checkIsClose("actOutF", hActOutF, hActOut, TOL, FLOAT_ATOL)); BOOST_TEST(checkIsClose("actOutH", hActOutH, hActOut, TOL, HALF_ATOL)); auto hRefDeltaOut = hDeltaIn; poplibs_test::bwdNonLinearity(nl, hActIn, hRefDeltaOut); auto bwdProg = Sequence(); auto deltaFF = nonLinearityInputGradient(graph, nl, actF, deltaF, bwdProg); auto deltaHH = nonLinearityInputGradient(graph, nl, actH, deltaH, bwdProg); bwdProg.add(Copy(deltaFF, deltaF)); bwdProg.add(Copy(deltaHH, deltaH)); copy(target, hActIn, FLOAT, rawHActF.get()); copy(target, hActIn, HALF, rawHActH.get()); copy(target, hDeltaIn, FLOAT, rawHDeltaF.get()); copy(target, hDeltaIn, HALF, rawHDeltaH.get()); Engine bwdEng(graph, Sequence(uploadProg, bwdProg, downloadProg)); attachStreams(bwdEng, tmap); device.bind([&](const Device &d) { bwdEng.loadAndRun(d); }); copy(target, FLOAT, rawHDeltaF.get(), hDeltaOutF); copy(target, HALF, rawHDeltaH.get(), hDeltaOutH); BOOST_TEST( checkIsClose("deltaOutF", hDeltaOutF, hRefDeltaOut, TOL, FLOAT_ATOL)); BOOST_TEST( checkIsClose("deltaOutH", hDeltaOutH, hRefDeltaOut, TOL, HALF_ATOL)); } } BOOST_AUTO_TEST_CASE( NonLinearitySoftMax1D, *utf::tolerance<float>(fpc::percent_tolerance<float>(0.1)) * utf::tolerance<double>(fpc::percent_tolerance<double>(0.1))) { auto device = createTestDevice(TEST_TARGET); auto &target = device.getTarget(); Graph graph(target); popnn::addCodelets(graph); popops::addCodelets(graph); poplin::addCodelets(graph); const unsigned numChannels = 128; auto actF = graph.addVariable(FLOAT, {numChannels}, "actF"); auto actH = graph.addVariable(HALF, {numChannels}, "actH"); auto deltaF = graph.addVariable(FLOAT, {numChannels}, "deltaF"); auto deltaH = graph.addVariable(HALF, {numChannels}, "deltaH"); // arbitrary mappings mapTensorLinearly(graph, actF); mapTensorLinearly(graph, actH); mapTensorLinearly(graph, deltaF); mapTensorLinearly(graph, deltaH); std::vector<std::pair<std::string, char *>> tmap; Sequence uploadProg, downloadProg; auto rawHActF = allocateHostMemoryForTensor(actF, "actF", graph, uploadProg, downloadProg, tmap); auto rawHActH = allocateHostMemoryForTensor(actH, "actH", graph, uploadProg, downloadProg, tmap); auto rawHDeltaF = allocateHostMemoryForTensor(deltaF, "deltaF", graph, uploadProg, downloadProg, tmap); auto rawHDeltaH = allocateHostMemoryForTensor(deltaH, "deltaH", graph, uploadProg, downloadProg, tmap); boost::multi_array<double, 2> hActIn(boost::extents[1][numChannels]), hDeltaIn(boost::extents[1][numChannels]), hActOutF(boost::extents[1][numChannels]), hActOutH(boost::extents[1][numChannels]), hDeltaOutF(boost::extents[1][numChannels]), hDeltaOutH(boost::extents[1][numChannels]); for (unsigned c = 0; c < numChannels; ++c) { double sample = (1.0 - 2 * (c & 1)) * 0.01 * c; hActIn[0][c] = sample; hDeltaIn[0][c] = double(numChannels - c); } for (const auto nl : {NonLinearityType::SOFTMAX, NonLinearityType::SOFTMAX_STABLE, NonLinearityType::SOFTMAX_SCALED}) { auto hActOut = hActIn; poplibs_test::nonLinearity(nl, hActOut); if (nl == NonLinearityType::SOFTMAX_SCALED) { for (unsigned i = 0; i < numChannels; i++) { hActOut[0][i] *= SOFTMAX_SCALING; } } // build and run the target code auto fwdProg = Sequence(); float nonLinearityScalingF, nonLinearityScalingH; nonLinearityInPlace(graph, nl, actF, nonLinearityScalingF, fwdProg); nonLinearityInPlace(graph, nl, actH, nonLinearityScalingH, fwdProg); const float expectedScaling = nl == NonLinearityType::SOFTMAX_SCALED ? SOFTMAX_SCALING : 1.0f; BOOST_TEST(nonLinearityScalingF == expectedScaling); BOOST_TEST(nonLinearityScalingH == expectedScaling); copy(target, hActIn, FLOAT, rawHActF.get()); copy(target, hActIn, HALF, rawHActH.get()); Engine fwdEng(graph, Sequence(uploadProg, fwdProg, downloadProg)); attachStreams(fwdEng, tmap); device.bind([&](const Device &d) { fwdEng.loadAndRun(d); }); copy(target, FLOAT, rawHActF.get(), hActOutF); copy(target, HALF, rawHActH.get(), hActOutH); BOOST_TEST(checkIsClose("actOutF", hActOutF, hActOut, TOL, FLOAT_ATOL)); BOOST_TEST(checkIsClose("actOutH", hActOutH, hActOut, TOL, HALF_ATOL)); auto hRefDeltaOut = hDeltaIn; poplibs_test::bwdNonLinearity(nl, hActIn, hRefDeltaOut); auto bwdProg = Sequence(); auto deltaFF = nonLinearityInputGradient(graph, nl, actF, deltaF, bwdProg); auto deltaHH = nonLinearityInputGradient(graph, nl, actH, deltaH, bwdProg); bwdProg.add(Copy(deltaFF, deltaF)); bwdProg.add(Copy(deltaHH, deltaH)); copy(target, hActIn, FLOAT, rawHActF.get()); copy(target, hActIn, HALF, rawHActH.get()); copy(target, hDeltaIn, FLOAT, rawHDeltaF.get()); copy(target, hDeltaIn, HALF, rawHDeltaH.get()); Engine bwdEng(graph, Sequence(uploadProg, bwdProg, downloadProg)); attachStreams(bwdEng, tmap); device.bind([&](const Device &d) { bwdEng.loadAndRun(d); }); copy(target, FLOAT, rawHDeltaF.get(), hDeltaOutF); copy(target, HALF, rawHDeltaH.get(), hDeltaOutH); BOOST_TEST( checkIsClose("deltaOutF", hDeltaOutF, hRefDeltaOut, TOL, FLOAT_ATOL)); BOOST_TEST( checkIsClose("deltaOutH", hDeltaOutH, hRefDeltaOut, TOL, HALF_ATOL)); } }
An adaptive receiver of joint data and channel estimation for meteor burst communications In view of the characteristics of the meteor burst channel, the variable rate data transmission should be adopted to improve the system average throughput, which results in channel tracing and equalization problems at the receiver. Although the joint data and channel estimation of maximum likelihood sequence detection performing the principle of per‐survivor processing (PSP) is considered as an optimal detection scheme, its great computational complexity is a major problem and can hardly agree with the decreasing of the meteor channel. Based on the estimation of the system parameters, an adaptive state reduction of the PSP (ASRP) algorithm with only a few states in the trellis diagram is employed, while these states are chosen by the time‐varying threshold according to the exponential decay of meteor channels. It is shown that, ASRP can make a good tradeoff between the performance and the computational complexity, and provides reliable data transmission for adaptive modulation and coding of the meteor burst communication system. Computer simulation results and performance analysis are also included to support our developments. Copyright © 2010 John Wiley & Sons, Ltd.
/** * Retrieve evals matching with parameters if there is no parameter retrieve all evals. * @param field name of the elasticsearch field. * @param data value of the field in elasticsearch. * @return evals matching with parameters as List of EvalDTO. */ @GetMapping("/") public List<EvalDTO> searchEval(@RequestParam(name = "field", required = false) final String field, @RequestParam(name = "data", required = false) final String data) { List<EvalDTO> evals = null; if (field == null && data == null) { evals = evserv.retrieveEval(); } else if (field == null || data == null) { throw new IllegalArgumentException("Field and Data are compulsory if at least one them is declared."); } else { evals = evserv.searchEval(field, data); } return evals; }
Apple CEO Tim Cook says his company will pay $848 million over 25 years to buy electricity from a large solar power plant to be built in Monterey County, California, by First Solar. The deal not only lets Apple tout its environmental conscience; it also probably makes sound business sense. Even a few years ago, solar power was generally much more expensive than power from the grid. As prices for solar have plummeted in recent years, however, solar has become cheap enough to compete with grid energy in some situations and some places (see “Hawaii’s Solar Push Strains the Grid”). Apple will purchase about 45 percent of the power from the 280-megawatt California Flats Power Project. Cook says this will cover the power needs for Apple’s new headquarters in Cupertino, California, and all the company’s existing stores and data centers in California. First Solar expects to start building the plant by mid-year and finish it in 2016. Typically in such agreements the power produced at a plant is fed into the grid, and whoever has bought the power gets to count it against their normal power consumption. The total cost for the new plant is not being disclosed; nor is how much Apple will pay per kilowatt-hour for electricity from it. But in 2010 electricity from such a plant would have cost around 24 cents per kilowatt-hour, and the U.S. Department of Energy estimates that the cost per watt for solar has dropped more than 50 percent since. The cost of electricity from the grid varies considerably in California, depending on factors such as peak demand and how much electricity a company requires. According to the U.S. Energy Information Administration, in 2012 the average commercial electricity rate in California was 13.4 cents per kilowatt-hour, although in some cases it was twice as much. So 12 cents per kilowatt-hour would be a good deal. There are a few other factors to consider. Building a solar farm in Monterey County is likely to be expensive, and First Solar will also need to make a profit. But given the falling cost of solar power, and the fact that conventional electricity rates are likely to go up, Apple seems to have made a smart decision.
// Method to create a directory based on the path private void mkdir(Path path) throws LauncherException { try { validatePath(path, true); FileSystem fs = FileSystem.get(path.toUri(), new Configuration()); if (!fs.exists(path)) { if (!fs.mkdirs(path)) { String mkdirFailed = "Creating directory at " + path + " failed."; System.out.println(mkdirFailed); throw new LauncherException(mkdirFailed); } else { System.out.println("Creating directory at path " + path + " was successful."); } } } catch (IOException ex) { throw new LauncherException(ex.getMessage(), ex); } }
<filename>apps/specaccel.552.pep/print_results.c #include <stdio.h> #include <math.h> #include "type.h" void print_results(char *name, char xclass, int n1, int n2, int n3, int niter, double t, double mops, char *optype, int verified, char *npbversion, char *compiletime, char *cs1, char *cs2, char *cs3, char *cs4, char *cs5, char *cs6, char *cs7) { char size[16]; int j; printf( "\n\n %s Benchmark Completed.\n", name ); printf( " Class = %12c\n", xclass ); // If this is not a grid-based problem (EP, FT, CG), then // we only print n1, which contains some measure of the // problem size. In that case, n2 and n3 are both zero. // Otherwise, we print the grid size n1xn2xn3 if ( ( n2 == 0 ) && ( n3 == 0 ) ) { if ( ( name[0] == 'E' ) && ( name[1] == 'P' ) ) { sprintf( size, "%15.0lf", pow(2.0, n1) ); j = 14; if ( size[j] == '.' ) { size[j] = ' '; j--; } size[j+1] = '\0'; printf( " Size = %15s\n", size ); } else { printf( " Size = %12d\n", n1 ); } } else { printf( " Size = %4dx%4dx%4d\n", n1, n2, n3 ); } #ifndef SPEC printf( " Iterations = %12d\n", niter ); printf( " Time in seconds = %12.2lf\n", t ); printf( " Mop/s total = %15.2lf\n", mops ); printf( " Operation type = %24s\n", optype ); #endif if ( verified ) printf( " Verification = %12s\n", "SUCCESSFUL" ); else printf( " Verification = %12s\n", "UNSUCCESSFUL" ); printf( " Version = %12s\n", npbversion ); #ifndef SPEC printf( " Compile date = %12s\n", compiletime ); printf( "\n Compile options:\n" " CC = %s\n", cs1 ); printf( " CLINK = %s\n", cs2 ); printf( " C_LIB = %s\n", cs3 ); printf( " C_INC = %s\n", cs4 ); printf( " CFLAGS = %s\n", cs5 ); printf( " CLINKFLAGS = %s\n", cs6 ); printf( " RAND = %s\n", cs7 ); #endif printf( "\n--------------------------------------\n" " Please send all errors/feedbacks to:\n" " Center for Manycore Programming\n" " <EMAIL>\n" " http://aces.snu.ac.kr\n" "--------------------------------------\n\n"); }
/** * Unit tests for the default json serializer for Morpheus DataFrames * * @author Xavier Witdouck */ public class JsonDefaultTests { @SuppressWarnings("unchecked") private static <R,C> DataFrame<R,C> random(Array<R> rowKeys, Array<C> colKeys) { return DataFrame.of(rowKeys, (Class<C>)colKeys.getValue(0).getClass(), columns -> { var index = new AtomicInteger(-1); var random = new Random(); colKeys.forEach(c -> { switch (index.incrementAndGet() % 10) { case 0: columns.add(c, Double.class, v -> random.nextDouble() * 1000); break; case 1: columns.add(c, Integer.class, v -> random.nextInt()); break; case 2: columns.add(c, Long.class, v -> random.nextLong()); break; case 3: columns.add(c, String.class, v -> String.valueOf("text-" + random.nextInt())); break; case 4: columns.add(c, LocalDate.class, v -> LocalDate.now().minusDays(v.rowOrdinal())); break; case 5: columns.add(c, LocalTime.class, v -> LocalTime.now().minusSeconds(v.rowOrdinal())); break; case 6: columns.add(c, LocalDateTime.class, v -> LocalDateTime.now().minusMinutes(v.rowOrdinal())); break; case 7: columns.add(c, ZonedDateTime.class, v -> ZonedDateTime.now().minusMinutes(v.rowOrdinal())); break; } }); }); } @Test() public void case1() { var rows = Range.of(0, 100).toArray(); var cols = Range.of(0, 10).map(v -> "Column-" + v).toArray(); var frame = random(rows, cols); serialize(frame); } @Test() public void case2() { var rows = Range.of(0, 100).map(v -> "Row-" + v).toArray(); var cols = Range.of(0, 10).map(v -> "Column-" + v).toArray(); var frame = random(rows, cols); serialize(frame); } @Test() public void case3() { var rows = Range.of(0, 100).map(v -> LocalDate.now().plusDays(v)).toArray(); var cols = Range.of(0, 10).map(v -> "Column-" + v).toArray(); var frame = random(rows, cols); serialize(frame); } @Test() public void case4() { var rows = Range.of(0, 100).map(v -> LocalDateTime.now().plusMinutes(v)).toArray(); var cols = Range.of(0, 10).map(v -> "Column-" + v).toArray(); var frame = random(rows, cols); serialize(frame); } @Test() public void case5() { var rows = Range.of(0, 100).map(v -> ZonedDateTime.now().plusMinutes(v)).toArray(); var cols = Range.of(0, 10).map(v -> "Column-" + v).toArray(); var frame = random(rows, cols); serialize(frame); } @Test() public void case6() { var rows = Range.of(0, 100).map(v -> LocalDate.now().plusDays(v)).toArray(); var cols = Range.of(0, 10).map(v -> LocalDate.now().plusDays(v)).toArray(); var frame = random(rows, cols); serialize(frame); } private <R,C> void serialize(DataFrame<R,C> frame) { var sink = new JsonSinkDefault<R,C>(); var os = new ByteArrayOutputStream(1024 * 10); sink.write(frame, o -> o.resource(Resource.of(os)).pretty(true)); var json = new String(os.toByteArray()); IO.println(json); var source = new JsonSourceDefault<R,C>(); var result = source.read(o -> o.resource(Resource.of(new ByteArrayInputStream(os.toByteArray())))); DataFrameAsserts.assertEqualsByIndex(frame, result); } }
Share. Kick the baby no more... Kick the baby no more... Warning: Full spoilers from the episode to follow. In recent South Park episodes, there seems to be a pattern forming where the storylines are generally strong out of the gate, but quickly lose steam by the first commercial break. Such was the case for this week's "Taming Strange," which set up two promising ideas that never really went anywhere. The first, of course, was Ike going through puberty. This in itself a pretty funny concept, one that normally wouldn't work on most shows (after all, 10-year-old Kyle is five years Ike's senior), but because it's South Park, we're willing to go along for the ride and see where it all goes. Unfortunately, Ike as a hormonal "teenager" wasn't quite as hilarious in practice. To me, what makes Ike so funny -- aside from his Canadian heritage, of course -- is his youthful voice. When you take that away and replace it with a mouthpiece more comparable to a teenager's, the circumstance loses its impact somewhat. Personally, I think this storyline might have been more enjoyable if Ike had retained his childlike vocalization. I understand that's not how puberty works, but this is Canadian puberty we're talking about here, and from a comedy standpoint, chewing tobacco and talking about "pounding strange" just doesn't really cut it for LOLs. In fact, it was the Canadian Board of Health segment that I found to be the most entertaining this week, as the department head explained how Canadian puberty works. ("For instance, when you fart, your d*** gets hard.") There was also something delightfully absurd about the department head's own revelation that farting and queefing weren't at all related to Canadian reproduction. I particularly enjoyed the recurring callbacks to "budday," "guy" and "fwiend." The Yo Gabba Gabba/Miley Cyrus tie-in was also only okay, and didn't serve much purpose in the large of scheme of things. Clearly, this subplot was meant to correlate with Ike's own struggles in growing up -- much in the same way that Miley's image as a Disney child star has devolved into... well, whatever Miley's image is now. Later, the references to Miley's VMA performance and "Wrecking Ball" felt forced in making obvious jokes that we've now heard and seen countless times in much funnier memes on the Internet. As for the "Intellilink" storyline, which largely focused on Mr. Mackey's vehement need to integrate the school's new digital portal into the curriculum, there were some funny bits, including the constant misuse of functions: dimming the lights, rolling down window shades and playing arbitrary song files -- I also got a laugh out of Ike's drawing of Kyle with a "whooping crane" on his head, and Butters' gleeful snickering. However, the Obamacare commentary at the end, again, seemed like an afterthought that was tacked on to make it all seem relevant. (I'll admit, though, I did chuckle at the Tom Brady cutaway.) Overall, I thought the satire this week was pretty scattered, with the episode shoehorning in as many "hot topics" as they could into the central plot. While the two main parodies (Miley Cyrus and Obamacare) probably could have worked in tandem, neither arc was strong enough here to warrant a connection at the end (unlike, say, the World War Z/Trayvon Martin mashup in "World War Zimmerman"). In the end, these jokes were simply too one-note to satisfy a full 22 minutes. Honestly, they probably could have run about the same length as an SNL sketch and gotten the same effect.
/** * Synchronise time using a reference value. */ void Airtight_Time_SetSynchronisationPoint(Airtight_Time *time, at_time_t sync_time) { at_time_t current = Airtight_Time_ClockMS() + time->offset - time->base_local; time->offset += sync_time - current; }
//********SPIB1_OutString***************** // Print a string of characters to the SPI channel. // Inputs: ptr pointer to NULL-terminated ASCII string // Outputs: none void SPIB1_OutString(char *ptr){ while(*ptr) { SPIB1_OutChar(*ptr); ptr++; } }
/** * A Canary Tool to perform synthetic tests for Query Server */ public class PhoenixCanaryTool extends Configured implements Tool { private static String TEST_SCHEMA_NAME = "TEST"; private static String TEST_TABLE_NAME = "PQSTEST"; private static String FQ_TABLE_NAME = "TEST.PQSTEST"; private boolean USE_NAMESPACE = true; private Sink sink = new StdOutSink(); /** * Base class for a Canary Test */ private abstract static class CanaryTest { CanaryTestResult result = new CanaryTestResult(); Connection connection = null; private void onCreate(Connection connection) { result.setTimestamp(getCurrentTimestamp()); result.setStartTime(System.currentTimeMillis()); this.connection = connection; } abstract void onExecute() throws Exception; private void onExit() { result.setExecutionTime(System.currentTimeMillis() - result.getStartTime()); } CanaryTestResult runTest(Connection connection) { try { onCreate(connection); onExecute(); result.setSuccessful(true); result.setMessage("Test " + result.getTestName() + " successful"); } catch (Exception e) { result.setSuccessful(false); result.setMessage(Throwables.getStackTraceAsString(e)); } finally { onExit(); } return result; } } /** * Test which prepares environment before other tests run */ static class PrepareTest extends CanaryTest { void onExecute() throws Exception { result.setTestName("prepare"); Statement statement = connection.createStatement(); DatabaseMetaData dbm = connection.getMetaData(); ResultSet tables = dbm.getTables(null, TEST_SCHEMA_NAME, TEST_TABLE_NAME, null); if (tables.next()) { // Drop test Table if exists statement.executeUpdate("DROP TABLE IF EXISTS " + FQ_TABLE_NAME); } // Drop test schema if exists if (TEST_SCHEMA_NAME != null) { statement = connection.createStatement(); statement.executeUpdate("DROP SCHEMA IF EXISTS " + TEST_SCHEMA_NAME); } } } /** * Create Schema Test */ static class CreateSchemaTest extends CanaryTest { void onExecute() throws Exception { result.setTestName("createSchema"); Statement statement = connection.createStatement(); statement.executeUpdate("CREATE SCHEMA IF NOT EXISTS " + TEST_SCHEMA_NAME); } } /** * Create Table Test */ static class CreateTableTest extends CanaryTest { void onExecute() throws Exception { result.setTestName("createTable"); Statement statement = connection.createStatement(); // Create Table statement.executeUpdate("CREATE TABLE IF NOT EXISTS" + FQ_TABLE_NAME + " (mykey " + "INTEGER " + "NOT " + "NULL PRIMARY KEY, " + "mycolumn VARCHAR)"); } } /** * Upsert Data into Table Test */ static class UpsertTableTest extends CanaryTest { void onExecute() throws Exception { result.setTestName("upsertTable"); // Insert data Statement statement = connection.createStatement(); statement.executeUpdate("UPSERT INTO " + FQ_TABLE_NAME + " VALUES (1, " + "'Hello" + " World')"); connection.commit(); } } /** * Read data from Table Test */ static class ReadTableTest extends CanaryTest { void onExecute() throws Exception { result.setTestName("readTable"); // Query for table PreparedStatement ps = connection.prepareStatement("SELECT * FROM " + FQ_TABLE_NAME); ResultSet rs = ps.executeQuery(); // Check correctness int totalRows = 0; while (rs.next()) { totalRows += 1; Integer myKey = rs.getInt(1); String myColumn = rs.getString(2); if (myKey != 1 || !myColumn.equals("Hello World")) { throw new Exception("Retrieved values do not match the inserted " + "values"); } } if (totalRows != 1) { throw new Exception(totalRows + " rows fetched instead of just one."); } ps.close(); rs.close(); } } /** * Delete test table Test */ static class DeleteTableTest extends CanaryTest { void onExecute() throws Exception { result.setTestName("deleteTable"); Statement statement = connection.createStatement(); statement.executeUpdate("DROP TABLE IF EXISTS" + FQ_TABLE_NAME); // Check if table dropped DatabaseMetaData dbm = connection.getMetaData(); ResultSet tables = dbm.getTables(null, TEST_SCHEMA_NAME, TEST_TABLE_NAME, null); if (tables.next()) { throw new Exception("Test Table could not be dropped"); } } } /** * Delete test Schema Test */ static class DeleteSchemaTest extends CanaryTest { void onExecute() throws Exception { result.setTestName("deleteSchema"); Statement statement = connection.createStatement(); statement.executeUpdate("DROP SCHEMA IF EXISTS " + TEST_SCHEMA_NAME); } } /** * Sink interface used by the canary to output information */ public interface Sink { List<CanaryTestResult> getResults(); void updateResults(CanaryTestResult result); void publishResults() throws Exception; void clearResults(); } /** * Implementation of Std Out Sink */ public static class StdOutSink implements Sink { private List<CanaryTestResult> results = new ArrayList<>(); @Override public void updateResults(CanaryTestResult result) { results.add(result); } @Override public List<CanaryTestResult> getResults() { return results; } @Override public void publishResults() throws Exception { Gson gson = new GsonBuilder().setPrettyPrinting().create(); String resultJson = gson.toJson(results); System.out.println(resultJson); } @Override public void clearResults() { results.clear(); } } /** * Implementation of File Out Sink */ public static class FileOutSink implements Sink { private List<CanaryTestResult> results = new ArrayList<>(); File dir; String logfileName; String propFileName = "phoenix-canary-file-sink.properties"; public FileOutSink() throws Exception { Properties prop = new Properties(); InputStream input = ClassLoader.getSystemResourceAsStream(propFileName); if (input == null) { throw new Exception("Cannot load " + propFileName + " file for " + "FileOutSink."); } prop.load(input); logfileName = prop.getProperty("file.name"); dir = new File(prop.getProperty("file.location")); dir.mkdirs(); } @Override public void updateResults(CanaryTestResult result) { results.add(result); } @Override public List<CanaryTestResult> getResults() { return results; } @Override public void publishResults() throws Exception { Gson gson = new GsonBuilder().setPrettyPrinting().create(); String resultJson = gson.toJson(results); String fileName = logfileName + "-" + new SimpleDateFormat("yyyy.MM.dd.HH" + ".mm" + ".ss").format(new Date()) + ".log"; File file = new File(dir, fileName); Files.write(Bytes.toBytes(resultJson), file); } @Override public void clearResults() { results.clear(); } } private static final Logger LOG = LoggerFactory.getLogger(PhoenixCanaryTool.class); private static String getCurrentTimestamp() { return new SimpleDateFormat("yyyy.MM.dd.HH.mm.ss.ms").format(new Date()); } private static Namespace parseArgs(String[] args) { ArgumentParser parser = ArgumentParsers.newFor("Phoenix Canary Test Tool").build() .description("Phoenix Canary Test Tool"); parser.addArgument("--hostname", "-hn").type(String.class).nargs("?").help("Hostname on " + "which Phoenix is running."); parser.addArgument("--port", "-p").type(String.class).nargs("?").help("Port on " + "which Phoenix is running."); parser.addArgument("--constring", "-cs").type(String.class).nargs("?").help("Pass an " + "explicit connection String to connect to Phoenix. " + "default: jdbc:phoenix:thin:serialization=PROTOBUF;url=[hostName:port]"); parser.addArgument("--timeout", "-t").type(String.class).nargs("?").setDefault("60").help ("Maximum time for which the app should run before returning error. default:" + "" + " 60 sec"); parser.addArgument("--testschema", "-ts").type(String.class).nargs("?").setDefault (TEST_SCHEMA_NAME).help("Custom name for the test table. " + "default: " + TEST_SCHEMA_NAME); parser.addArgument("--testtable", "-tt").type(String.class).nargs("?").setDefault (TEST_TABLE_NAME).help("Custom name for the test table." + " default: " + TEST_TABLE_NAME); parser.addArgument("--logsinkclass", "-lsc").type(String.class).nargs("?").setDefault ("PhoenixCanaryTool$StdOutSink").help ("Path to a Custom implementation for log sink class. default: stdout"); Namespace res = null; try { res = parser.parseKnownArgs(args, null); } catch (ArgumentParserException e) { parser.handleError(e); } return res; } private CanaryTestResult appInfo = new CanaryTestResult(); private Connection connection = null; @Override public int run(String[] args) throws Exception { try { Namespace cArgs = parseArgs(args); if (cArgs == null) { LOG.error("Argument parsing failed."); throw new RuntimeException("Argument parsing failed"); } final String hostName = cArgs.getString("hostname"); final String port = cArgs.getString("port"); final String timeout = cArgs.getString("timeout"); final String conString = cArgs.getString("constring"); final String testSchemaName = cArgs.getString("testschema"); final String testTableName = cArgs.getString("testtable"); final String logSinkClass = cArgs.getString("logsinkclass"); TEST_TABLE_NAME = testTableName; TEST_SCHEMA_NAME = testSchemaName; FQ_TABLE_NAME = testSchemaName + "." + testTableName; // Check if at least one from host+port or con string is provided. if ((hostName == null || port == null) && conString == null) { throw new RuntimeException("Provide at least one from host+port or constring"); } int timeoutVal = Integer.parseInt(timeout); // Dynamically load a class for sink sink = (Sink) ClassLoader.getSystemClassLoader().loadClass(logSinkClass).newInstance(); long startTime = System.currentTimeMillis(); String connectionURL = (conString != null) ? conString : "jdbc:phoenix:thin:serialization=PROTOBUF;url=" + hostName + ":" + port; appInfo.setTestName("appInfo"); appInfo.setMiscellaneous(connectionURL); Properties connProps = new Properties(); connProps.setProperty("phoenix.schema.mapSystemTablesToNamespace", "true"); connProps.setProperty("phoenix.schema.isNamespaceMappingEnabled", "true"); try { connection = DriverManager.getConnection(connectionURL, connProps); } catch (Exception e) { LOG.info("Namespace mapping cannot be set. Using default schema"); USE_NAMESPACE = false; connection = DriverManager.getConnection(connectionURL); TEST_SCHEMA_NAME = null; FQ_TABLE_NAME = TEST_TABLE_NAME; } SimpleTimeLimiter limiter = new SimpleTimeLimiter(); limiter.callWithTimeout(new Callable<Void>() { public Void call() { sink.clearResults(); // Execute tests LOG.info("Starting PrepareTest"); sink.updateResults(new PrepareTest().runTest(connection)); if (USE_NAMESPACE) { LOG.info("Starting CreateSchemaTest"); sink.updateResults(new CreateSchemaTest().runTest(connection)); } LOG.info("Starting CreateTableTest"); sink.updateResults(new CreateTableTest().runTest(connection)); LOG.info("Starting UpsertTableTest"); sink.updateResults(new UpsertTableTest().runTest(connection)); LOG.info("Starting ReadTableTest"); sink.updateResults(new ReadTableTest().runTest(connection)); LOG.info("Starting DeleteTableTest"); sink.updateResults(new DeleteTableTest().runTest(connection)); if (USE_NAMESPACE) { LOG.info("Starting DeleteSchemaTest"); sink.updateResults(new DeleteSchemaTest().runTest(connection)); } return null; } }, timeoutVal, TimeUnit.SECONDS, true); long estimatedTime = System.currentTimeMillis() - startTime; appInfo.setExecutionTime(estimatedTime); appInfo.setSuccessful(true); } catch (Exception e) { LOG.error(Throwables.getStackTraceAsString(e)); appInfo.setMessage(Throwables.getStackTraceAsString(e)); appInfo.setSuccessful(false); } finally { sink.updateResults(appInfo); sink.publishResults(); connection.close(); } return 0; } public static void main(final String[] args) { int result = 0; try { LOG.info("Starting Phoenix Canary Test tool..."); result = ToolRunner.run(new PhoenixCanaryTool(), args); } catch (Exception e) { LOG.error("Error in running Phoenix Canary Test tool. " + e); } LOG.info("Exiting Phoenix Canary Test tool..."); } }
Alright everyone let’s calm right down, there is no need to panic. Pump the brakes on the emotional knee jerk reactions. The Springboks are still a world class team, second only to the All Blacks and everything is still looking very much on track for them to make a deep run in the Rugby World Cup 2015, regardless of last Saturday. While it is true that the Springboks were nowhere near their best in Padova, credit must be given to the Italians for some excellent defense, enterprising attack and disruptive ruck and maul work. The Azzurri adapted better to referee Jerome Garces’ officiating (who was not as bad as some are making him out to be) and will be very pleased with their passionate showing in front of an appreciative home crowd. The reality is that they still lost by sixteen points. A quick word on ref Garces; although he made a few incorrect calls, he did at least try to allow the game to flow which should be encouraged. No one wants to see a game where the referee blows the whistle every ten seconds. The one area where the Springboks have battled on this tour is adapting to the Northern Hemisphere referee interpretations, but credit to them for getting it right in the second half. Garces is not my cup of beer, but he was mostly consistent on the day and what more can you ask for? Games against teams like Italy are tricky in that the Springboks are expected to dominate. When they invariably do, it is not fully appreciated due to a perceived lack of quality in the opposition. Unfortunately anything less than a huge win is slated and in this instance ammo for disgruntled fans and rugby pundits. I am a glass half full man when it comes to the Springboks and I prefer to look at the positives Coach Heyneke Meyer can take from the game. For instance, both Trevor Nyakane and Coenie Oosthuizen had solid outings against what was considered a powerful Italian front row. Fullback Johan Goosen settled after some early nerves, made two impressive line breaks and is definitely worth another look at as back up to Willie le Roux. Newbie Boks, Nizaam Carr and Julian Redlinghuys both had very good debuts off the bench with Carr having a blinder of a line break and pop pass that led to a try for Cobus Reinach. It is true the Springboks ran too laterally on attack at times and were inaccurate with their line outs, but defensively they were excellent and did not concede a try. Eben Etzebeth looked to be nearing the highs of last year and Handre Pollard’s cameo off the bench and superb long pass to Habana reminded us all of his immense talent. Lessons will have been learned about the contesting of the breakdowns and the ref’s interpretations of that. Jean de Villiers will have hopefully realised the importance of when to kick, (especially for the posts) when given the opportunity and the Springboks will know that they need to score more consistently when in the ‘red zone’. They had three clear opportunities to score in the first half and a lead of fifteen or so would have put the game to bed at half time. Converting opportunities into points in key moments at this level is what separates the champion sides from the good ones. Because of the Northern Hemisphere weather expected, passionate fan base and referee interpretations, expect to see much tighter games especially between the top eight teams in the world when they compete in the RWC2015. The Springboks will have benefited from this tour immensely and regardless of injuries should meet the All Blacks in the RWC2015 semi-finals next year. After that who knows but it is comforting to know that they have only lost once in the NH under Heyneke Meyer and will be incredibly tough to beat on any given day. Thank you for reading. Support LWOS by following us on Twitter – @LWOSWorld – and “liking” our Facebook page. Check out more Rugby news, view and preview at www.bowlphilosophy.com and follow me on twitter @bowlphilosophy Have you tuned into Last Word On Sports Radio? LWOS is pleased to bring you 24/7 sports radio to your PC, laptop, tablet or smartphone. What are you waiting for? Main Photo:
While millions of Americans spent Thursday glued to television coverage of former FBI director James Comey’s testimony, Donald Trump took time to bask in the adulation of Religious Right activists who gathered in D.C. for Road to Majority, the annual conference hosted by Ralph Reed’s Faith and Freedom Coalition. Amid these conservative Christians, Trump didn’t need to worry about hearing a discouraging word or being challenged about his habitual lying. “We love him because he is our friend,” said Reed. Trump returned the sentiment, saying, “You didn’t let me down and I will never, ever let you down, you know that.” Reed and Trump both cited the overwhelming support Trump received from white evangelical voters. Trump recalled that he had appeared at the conference last year asking for their support and prayers, and “boy did you deliver.” Reed praised Trump for focusing “like a laser beam” on winning evangelical support “and that’s why he’s the president of the United States today.” Trump touted his accomplishments: the confirmation of Supreme Court Justice Neil Gorsuch, filling the Cabinet with people who “share our values,” withdrawal from the Paris climate accord and his proposed “historic” tax cut. And, of course, he bragged about having signed an executive order “to protect religious liberty in America” and to “stop the Johnson Amendment from interfering with your First Amendment rights.” Perhaps in a nod to those Religious Right activists who were disappointed that his order did not include sweeping exemptions for LGBT discrimination in the name of religion, Trump assured the audience, “Believe me, we’re not finished yet.” Trump cited the Bible, reading from a verse in Isaiah, as well as more vaguely stating: [A]s the Bible tells us, we know that the truth will prevail, that God’s glorious wisdom will shine through, and that the good and decent people of this country will get the change they voted for and that they so richly deserve. Trump seemingly, but vaguely, endorsed Christian-nation activists’ goal of returning official prayer and religious instruction into the nation’s schools, saying schools “should not be a place that drive out faith and religion, but that should welcome faith and religion with wide-open beautiful arms.” Trump also endorsed Religious Right fearmongering about the religious freedom of conservative Christians being under attack in America, saying “It is time to put a stop to the attacks on religion.” He promised, “As long as I’m president, no one is going to stop you from practicing your faith or preaching what is in your heart.” And in a line recycled from his speech at Liberty University last month, Trump said, “In America we don’t worship government, we worship God.” Trump trashed Democrats as “obstructionists” and urged the activists to give him bigger Republican majorities in the House and Senate in the 2018 elections. Reed had kicked off the luncheon by bragging about conservative electoral victories since his coalition was formed in 2010, and mocking mainstream media predictions about the Religious Right’s demise. Among the speakers who preceded Trump were Texas Attorney General Ken Paxton and Senator Ted Cruz. Paxton bragged about how many times Texas had sued the federal government during his tenure. Among the cases he cited was a challenge to federal Department of Education rules on transgender students’ access to bathrooms; he said the state’s legal challenge to the Obama administration rules had delayed their implementation until the Trump administration dropped them. Paxton said that if Hillary Clinton had been elected and was able to name a Supreme Court justice to replace the late Antonin Scalia, “we were likely going to be in a post-constitutional America.” That didn’t happen, he said, thanks to the prayers and political work of Christian conservatives, “Praise God.” Introducing Cruz, Reed called him “one of the fiercest, one of the toughest, one of the most valiant defenders of our values, not only in the U.S. Senate today, but of anyone who has ever served in the U.S. Senate.” “It is June of 2017, and Hillary Clinton is not the president of the United States,” Cruz said, “and Neil Gorsuch is a Supreme Court justice, which demonstrates that elections matter.” But Cruz pushed his fellow GOPers, saying, “We have a Republican majority in the House. We have a Republican majority in the Senate. We have a Republican in the White House. How about we act like it?” Also speaking before Trump was Sen. David Perdue of Georgia, who declared, “We are in a war for the future of this republic.” He listed the New Deal, Great Society, and Obamacare and Dodd-Frank as consequences of Democratic supermajorities. “The great progressive experiment of the last 100 years, with bigger and bigger government, has failed, period.” Perdue praised Trump for doing what he said he’d do on the Supreme Court, regulation, and immigration and praised his trip to the Middle East. “Look, this president is nobody’s choir boy, right?” said Perdue to chuckles from the audience, “But he is a man of action.”
import sys max_value = 1e10 min_value = -max_value input() for v in map(int, sys.stdin): if min_value < v - max_value: min_value = v - max_value if v < max_value: max_value = v print(min_value)
<reponame>niziox/TSP_algorithms<filename>algorithms/greedy_tsp.py #!/usr/bin/python # -*- coding: utf-8 -*- from typing import List, Tuple import numpy as np def greedy_tsp(adj_matrix: List[List[int]], start=1) -> Tuple[List[int], int]: pass
/** * Created by montselozanod on 4/4/15. */ public class RecyclerAdapter extends RecyclerView.Adapter<RecyclerAdapter.AnnouncementViewHolder> { List<Announcement> announces; RecyclerAdapter(){} RecyclerAdapter(List<Announcement> list_announces){ this.announces = list_announces; Log.d("Chabuz", announces.get(0).getMessage()); } @Override public int getItemCount() { return announces.size(); } @Override public AnnouncementViewHolder onCreateViewHolder(ViewGroup viewGroup, int i) { View v = LayoutInflater.from(viewGroup.getContext()).inflate(R.layout.card_view_announce, viewGroup, false); AnnouncementViewHolder pvh = new AnnouncementViewHolder(v); return pvh; } @Override public void onBindViewHolder(AnnouncementViewHolder aViewHolder, int i) { aViewHolder.message.setText(announces.get(i).getMessage()); aViewHolder.dates.setText(announces.get(i).getEnd()); aViewHolder.icon.setImageResource(announces.get(i).getImg()); } @Override public void onAttachedToRecyclerView(RecyclerView recyclerView) { super.onAttachedToRecyclerView(recyclerView); } public static class AnnouncementViewHolder extends RecyclerView.ViewHolder{ CardView announce; TextView message; TextView dates; ImageView icon; AnnouncementViewHolder(View itemView){ super(itemView); announce = (CardView)itemView.findViewById(R.id.announce); message = (TextView)itemView.findViewById(R.id.ann_message); dates = (TextView)itemView.findViewById(R.id.ann_dates); icon = (ImageView)itemView.findViewById(R.id.icon_photo); } } }
/// convenience function that calculates a += b * c pub fn add_mul(a: &mut Vec<isize>, b: &[isize], c: isize, threshold: isize) { while a.len() < b.len() { a.push(0); } for i in 0..b.len() { a[i] += b[i] * c; if a[i] > threshold { if i == a.len() - 1 { a.push(a[i] / threshold) } else { a[i + 1] += a[i] / threshold } a[i] %= threshold; } } }
// Fill the all the vector entries with provided value public void fill(HiveDecimal value) { noNulls = true; isRepeating = true; if (vector[0] == null) { vector[0] = new HiveDecimalWritable(value); } else { vector[0].set(value); } }
<reponame>zuwome/kongxia // // ZZTiXianDetailNumberCell.h // zuwome // // Created by 潘杨 on 2018/6/12. // Copyright © 2018年 TimoreYu. All rights reserved. // #import "ZZTiXianBaseCell.h" /** 提现的详细金额 */ @interface ZZTiXianDetailNumberCell : ZZTiXianBaseCell @property (nonatomic,strong)UITextField *tiXianTextField; /** 最多可提现额度 */ @property (nonatomic,strong)NSString *maxMoneyNumber; /** 全部提现的点击事件 */ @property (nonatomic,copy) dispatch_block_t allTiXianBlock; @end
<gh_stars>0 #include "ModelMesh.h" #include "Camera.h" ModelMesh::ModelMesh(vector<Vertex> vertices, vector<GLuint> indices, vector<MeshTexture> textures) { this->vertices = vertices; this->indices = indices; this->textures = textures; // Now that we have all the required data, set the vertex buffers and its attribute pointers. this->setupMesh(); scale = glm::vec3(0.2f, 0.2f, 0.2f); position = glm::vec3(0.0f, 0.0f, 0.0f); rotation = glm::vec3(0.0f, 1.0f, 0.0f); rotationAngle = 0.0f; this->shininess = 20.0f; } void ModelMesh::Render(Camera * camera, GLuint program, lightInfo _lighting, glm::mat4 _model) { glUseProgram(program); // Bind appropriate textures GLuint diffuseNr = 1; GLuint specularNr = 1; for (GLuint i = 0; i < this->textures.size(); i++) { glActiveTexture(GL_TEXTURE0 + i); // Active proper texture unit before binding // Retrieve texture number (the N in diffuse_textureN) stringstream ss; string number; string name = this->textures[i].type; if (name == "texture_diffuse") ss << diffuseNr++; // Transfer GLuint to stream else if (name == "texture_specular") ss << specularNr++; // Transfer GLuint to stream number = ss.str(); // Now set the sampler to the correct texture unit glUniform1i(glGetUniformLocation(program, (name + number).c_str()), i); // And finally bind the texture glBindTexture(GL_TEXTURE_2D, this->textures[i].id); } // EDIT //CalculateModelMatrix(); if (camera != nullptr) { mvp = camera->GetPVMatrix() * _model; glUniformMatrix4fv(glGetUniformLocation(program, "MVP"), 1, GL_FALSE, glm::value_ptr(mvp)); } glUniformMatrix4fv(glGetUniformLocation(program, "modelMat"), 1, GL_FALSE, glm::value_ptr(_model)); glUniform1f(glGetUniformLocation(program, "shininess"), shininess); glUniform3fv(glGetUniformLocation(program, "ambient"), 1, glm::value_ptr(_lighting.ambient)); glUniform3fv(glGetUniformLocation(program, "lightColour"), 1, glm::value_ptr(_lighting.lightColour)); glUniform3fv(glGetUniformLocation(program, "lightPos"), 1, glm::value_ptr(_lighting.lightPos)); glUniform1f(glGetUniformLocation(program, "lightSpecStr"), _lighting.specularStr); // EDIT END // Draw mesh glBindVertexArray(this->VAO); glDrawElements(GL_TRIANGLES, this->indices.size(), GL_UNSIGNED_INT, 0); glBindVertexArray(0); // Always good practice to set everything back to defaults once configured. for (GLuint i = 0; i < this->textures.size(); i++) { glActiveTexture(GL_TEXTURE0 + i); glBindTexture(GL_TEXTURE_2D, 0); } } void ModelMesh::SetScale(glm::vec3 _scaleVec) { scale = _scaleVec; } void ModelMesh::CalculateModelMatrix() { model = glm::translate(glm::mat4(), position) * glm::rotate(glm::mat4(), glm::radians(rotationAngle), rotation) * glm::scale(glm::mat4(), scale); } void ModelMesh::SetShininess(float newShininess) { shininess = newShininess; } void ModelMesh::IncrementPosition(float _incXBy, float _incYBy, float _incZBy) { } void ModelMesh::IncrementRotation(float _degrees) { } void ModelMesh::IncrementScale(float _incXBy, float _incYBy, float _incZBy) { } void ModelMesh::SetRotationAngle(float _angle) { rotationAngle = _angle; } void ModelMesh::IncrementPositionVec(glm::vec3 _incrementVec) { } void ModelMesh::SetPosition(float _XPos, float _YPos, float _ZPos) { position.x = _XPos; position.y = _YPos; position.z = _ZPos; } void ModelMesh::SetRotation(float _angle) { rotationAngle = _angle; }
/* * Copyright (c) Nordic Semiconductor ASA * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, this * list of conditions and the following disclaimer in the documentation and/or * other materials provided with the distribution. * * 3. Neither the name of Nordic Semiconductor ASA nor the names of other * contributors to this software may be used to endorse or promote products * derived from this software without specific prior written permission. * * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ /** @file * @brief Common defines and macros for firmware developed by Nordic Semiconductor. */ #ifndef NORDIC_COMMON_H__ #define NORDIC_COMMON_H__ /** The upper 8 bits of a 32 bit value */ //lint -emacro(572,MSB) // Suppress warning 572 "Excessive shift value" #define MSB(a) (((a) & 0xFF000000) >> 24) /** The lower 8 bits (of a 32 bit value) */ #define LSB(a) ((a) & 0x000000FF) /** The upper 8 bits of a 16 bit value */ //lint -emacro(572,MSB_16) // Suppress warning 572 "Excessive shift value" #define MSB_16(a) (((a) & 0xFF00) >> 8) /** The lower 8 bits (of a 16 bit value) */ #define LSB_16(a) ((a) & 0x00FF) /** Leaves the minimum of the two 32-bit arguments */ /*lint -emacro(506, MIN) */ /* Suppress "Constant value Boolean */ #define MIN(a, b) ((a) < (b) ? (a) : (b)) /** Leaves the maximum of the two 32-bit arguments */ /*lint -emacro(506, MAX) */ /* Suppress "Constant value Boolean */ #define MAX(a, b) ((a) < (b) ? (b) : (a)) /** Concatenates two parameters. Useful as a second level of indirection, * when a parameter can be macro itself. */ #define CONCAT_2(p1, p2) p1##p2 /** Concatenates three parameters. Useful as a second level of indirection, * when a parameter can be macro itself. */ #define CONCAT_3(p1, p2, p3) p1##p2##p3 /**@brief Set a bit in the uint32 word. * * @param[in] W Word whose bit is being set. * @param[in] B Bit number in the word to be set. */ #define SET_BIT(W,B) ((W) |= (uint32_t)(1U << (B))) /**@brief Clears a bit in the uint32 word. * * @param[in] W Word whose bit is to be cleared. * @param[in] B Bit number in the word to be cleared. */ #define CLR_BIT(W, B) ((W) &= (~((uint32_t)1 << (B)))) /**@brief Checks if a bit is set. * * @param[in] W Word whose bit is to be checked. * @param[in] B Bit number in the word to be checked. * * @retval 1 if bit is set. * @retval 0 if bit is not set. */ #define IS_SET(W,B) (((W) >> (B)) & 1) #define BIT_0 0x01 /**< The value of bit 0 */ #define BIT_1 0x02 /**< The value of bit 1 */ #define BIT_2 0x04 /**< The value of bit 2 */ #define BIT_3 0x08 /**< The value of bit 3 */ #define BIT_4 0x10 /**< The value of bit 4 */ #define BIT_5 0x20 /**< The value of bit 5 */ #define BIT_6 0x40 /**< The value of bit 6 */ #define BIT_7 0x80 /**< The value of bit 7 */ #define BIT_8 0x0100 /**< The value of bit 8 */ #define BIT_9 0x0200 /**< The value of bit 9 */ #define BIT_10 0x0400 /**< The value of bit 10 */ #define BIT_11 0x0800 /**< The value of bit 11 */ #define BIT_12 0x1000 /**< The value of bit 12 */ #define BIT_13 0x2000 /**< The value of bit 13 */ #define BIT_14 0x4000 /**< The value of bit 14 */ #define BIT_15 0x8000 /**< The value of bit 15 */ #define BIT_16 0x00010000 /**< The value of bit 16 */ #define BIT_17 0x00020000 /**< The value of bit 17 */ #define BIT_18 0x00040000 /**< The value of bit 18 */ #define BIT_19 0x00080000 /**< The value of bit 19 */ #define BIT_20 0x00100000 /**< The value of bit 20 */ #define BIT_21 0x00200000 /**< The value of bit 21 */ #define BIT_22 0x00400000 /**< The value of bit 22 */ #define BIT_23 0x00800000 /**< The value of bit 23 */ #define BIT_24 0x01000000 /**< The value of bit 24 */ #define BIT_25 0x02000000 /**< The value of bit 25 */ #define BIT_26 0x04000000 /**< The value of bit 26 */ #define BIT_27 0x08000000 /**< The value of bit 27 */ #define BIT_28 0x10000000 /**< The value of bit 28 */ #define BIT_29 0x20000000 /**< The value of bit 29 */ #define BIT_30 0x40000000 /**< The value of bit 30 */ #define BIT_31 0x80000000 /**< The value of bit 31 */ #define UNUSED_VARIABLE(X) ((void)(X)) #define UNUSED_PARAMETER(X) UNUSED_VARIABLE(X) #endif // NORDIC_COMMON_H__
// Copyright 2019 Kube Capacity Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package capacity import ( "testing" "github.com/stretchr/testify/assert" corev1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/api/resource" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" v1beta1 "k8s.io/metrics/pkg/apis/metrics/v1beta1" ) func TestBuildListClusterMetricsNoOptions(t *testing.T) { cm := getTestClusterMetric() lp := listPrinter{ cm: &cm, } lcm := lp.buildListClusterMetrics() assert.EqualValues(t, &listClusterTotals{ CPU: &listResourceOutput{ Requests: "650m", RequestsPct: "65%", Limits: "810m", LimitsPct: "81%", }, Memory: &listResourceOutput{ Requests: "410Mi", RequestsPct: "10%", Limits: "580Mi", LimitsPct: "14%", }, }, lcm.ClusterTotals) assert.EqualValues(t, &listNodeMetric{ Name: "example-node-1", CPU: &listResourceOutput{ Requests: "650m", RequestsPct: "65%", Limits: "810m", LimitsPct: "81%", }, Memory: &listResourceOutput{ Requests: "410Mi", RequestsPct: "10%", Limits: "580Mi", LimitsPct: "14%", }, }, lcm.Nodes[0]) } func TestBuildListClusterMetricsAllOptions(t *testing.T) { cm := getTestClusterMetric() lp := listPrinter{ cm: &cm, showUtil: true, showPods: true, showContainers: true, showPodCount: true, } lcm := lp.buildListClusterMetrics() assert.EqualValues(t, &listClusterTotals{ CPU: &listResourceOutput{ Requests: "650m", RequestsPct: "65%", Limits: "810m", LimitsPct: "81%", Utilization: "63m", UtilizationPct: "6%", }, Memory: &listResourceOutput{ Requests: "410Mi", RequestsPct: "10%", Limits: "580Mi", LimitsPct: "14%", Utilization: "439Mi", UtilizationPct: "10%", }, PodCount: "1/110", }, lcm.ClusterTotals) assert.EqualValues(t, &listNodeMetric{ Name: "example-node-1", PodCount: "1/110", CPU: &listResourceOutput{ Requests: "650m", RequestsPct: "65%", Limits: "810m", LimitsPct: "81%", Utilization: "63m", UtilizationPct: "6%", }, Memory: &listResourceOutput{ Requests: "410Mi", RequestsPct: "10%", Limits: "580Mi", LimitsPct: "14%", Utilization: "439Mi", UtilizationPct: "10%", }, Pods: []*listPod{ { Name: "example-pod", Namespace: "default", CPU: &listResourceOutput{ Requests: "650m", RequestsPct: "65%", Limits: "810m", LimitsPct: "81%", Utilization: "63m", UtilizationPct: "6%", }, Memory: &listResourceOutput{ Requests: "410Mi", RequestsPct: "10%", Limits: "580Mi", LimitsPct: "14%", Utilization: "439Mi", UtilizationPct: "10%", }, Containers: []listContainer{ { Name: "example-container-1", CPU: &listResourceOutput{ Requests: "450m", RequestsPct: "45%", Limits: "560m", LimitsPct: "56%", Utilization: "40m", UtilizationPct: "4%", }, Memory: &listResourceOutput{ Requests: "160Mi", RequestsPct: "4%", Limits: "280Mi", LimitsPct: "7%", Utilization: "288Mi", UtilizationPct: "7%", }, }, { Name: "example-container-2", CPU: &listResourceOutput{ Requests: "200m", RequestsPct: "20%", Limits: "250m", LimitsPct: "25%", Utilization: "23m", UtilizationPct: "2%", }, Memory: &listResourceOutput{ Requests: "250Mi", RequestsPct: "6%", Limits: "300Mi", LimitsPct: "7%", Utilization: "151Mi", UtilizationPct: "3%", }, }, }, }, }}, lcm.Nodes[0]) } func getTestClusterMetric() clusterMetric { return buildClusterMetric( &corev1.PodList{ Items: []corev1.Pod{ { ObjectMeta: metav1.ObjectMeta{ Name: "example-pod", Namespace: "default", }, Spec: corev1.PodSpec{ NodeName: "example-node-1", Containers: []corev1.Container{ { Name: "example-container-1", Resources: corev1.ResourceRequirements{ Requests: corev1.ResourceList{ "cpu": resource.MustParse("450m"), "memory": resource.MustParse("160Mi"), }, Limits: corev1.ResourceList{ "cpu": resource.MustParse("560m"), "memory": resource.MustParse("280Mi"), }, }, }, { Name: "example-container-2", Resources: corev1.ResourceRequirements{ Requests: corev1.ResourceList{ "cpu": resource.MustParse("200m"), "memory": resource.MustParse("250Mi"), }, Limits: corev1.ResourceList{ "cpu": resource.MustParse("250m"), "memory": resource.MustParse("300Mi"), }, }, }, }, }, }, }, }, &v1beta1.PodMetricsList{ Items: []v1beta1.PodMetrics{ { ObjectMeta: metav1.ObjectMeta{ Name: "example-pod", Namespace: "default", }, Containers: []v1beta1.ContainerMetrics{ { Name: "example-container-1", Usage: corev1.ResourceList{ "cpu": resource.MustParse("40m"), "memory": resource.MustParse("288Mi"), }, }, { Name: "example-container-2", Usage: corev1.ResourceList{ "cpu": resource.MustParse("23m"), "memory": resource.MustParse("151Mi"), }, }, }, }, }, }, &corev1.NodeList{ Items: []corev1.Node{ { ObjectMeta: metav1.ObjectMeta{ Name: "example-node-1", }, Status: corev1.NodeStatus{ Allocatable: corev1.ResourceList{ "cpu": resource.MustParse("1000m"), "memory": resource.MustParse("4000Mi"), "pods": resource.MustParse("110"), }, }, }, }, }, &v1beta1.NodeMetricsList{ Items: []v1beta1.NodeMetrics{ { ObjectMeta: metav1.ObjectMeta{ Name: "example-node-1", }, Usage: corev1.ResourceList{ "cpu": resource.MustParse("63m"), "memory": resource.MustParse("439Mi"), }, }, }, }, ) }
/** * psh's default EA behaviour uses every solution during breeding * by stepping through the population and mutating (etc.) each one; * tournaments are only used to select a mating partner. * This version also implements a more standard EA in which * tournament selection is used to determine all parents, or * alternatively crowding can be used. * @see org.spiderland.Psh.PushGP#Reproduce() */ @Override protected void Reproduce() { if(reproductionType.contains("crowding")) { ReproduceWithCrowding(); } else if(reproductionType.equals("standard")) { int nextPopulation = _currentPopulation == 0 ? 1 : 0; float psum = _crossoverPercent + _mutationPercent; GAIndividual elite = _populations[_currentPopulation][_bestIndividual].clone(); if(_RNG.nextFloat() < _simplificationPercent/100f) { elite = ReproduceBySimplification(elite); } _populations[nextPopulation][0] = elite; for (int n = 1; n < _populations[_currentPopulation].length; n++) { float method = _RNG.nextFloat(); GAIndividual next; if (method < _mutationPercent/psum) { PushGPIndividual i = (PushGPIndividual) Select(_tournamentSize, n).clone(); next = ReproduceByMutation(i); } else { PushGPIndividual a = (PushGPIndividual) Select(_tournamentSize, n); PushGPIndividual b = (PushGPIndividual) Select(_tournamentSize, n); next = ReproduceByCrossover(a, b); } if(_RNG.nextFloat() < _simplificationPercent/100f) { next = ReproduceBySimplification(next); } _populations[nextPopulation][n] = next; } } else { super.Reproduce(); return; } }
def contains(element): def predicate(argument): try: return element in argument except: return False return predicate
<reponame>Dru-Daniels/soko-wars2<gh_stars>0 const level = [ [ 0, 0, 99, 99, 99, 0, 0, 0, 0, 0], [ 0, 0, 99, 64, 99, 0, 0, 0, 0, 0], [ 0, 0, 99, 0, 99, 99, 99, 99, 0, 0], [99, 99, 99, 9, 0, 10, 77, 99, 0, 0], [99, 51, 0, 8, 52, 99, 99, 99, 0, 0], [99, 99, 99, 99, 7, 99, 0, 0, 0, 0], [ 0, 0, 0, 99, 38, 99, 0, 0, 0, 0], [ 0, 0, 0, 99, 99, 99, 0, 0, 0, 0], ] export default level
<gh_stars>1-10 from django.conf.urls import url from shop import views app_name = 'shop' urlpatterns = [ url(r'^(?P<pk>\d+)/buy$', views.ShopPurchaseView.as_view(), name='buy'), url(r'^(?P<pk>\d+)$', views.ShopProdView.as_view(), name='prod'), url(r'^$', views.ShopProdView.as_view(), name='prod'), url(r'^inven/$', views.ShopInvenView.as_view(), name='inven'), url(r'^retrieve/(?P<pk>\d+)$', views.ShopRetrieveView.as_view(), name='retrieve') ]
N, X = [int(nx) for nx in input().split()] x = [abs(int(xx) - X) for xx in input().split()] while len(x) > 1: x2 = [] mn_x = min(x) for n in range(len(x)): mod = x[n] % mn_x if mod != 0: x2.append(mod) x2.append(mn_x) x = x2 ans = x[0] print(ans)
Deafness and Psychiatric Illness Summary Review of the literature concerning the relationship between deafness and psychiatric disorder reveals differences in the pattern of illness depending on the severity of deafness and the age of onset. In particular, the prevalence of schizophrenia in the prelingually deaf is similar to that found in the normal population, whereas the hard of hearing are over-represented among samples of patients suffering from paranoid psychoses in later life. Possible modes of action of long-standing hearing loss in the aetiology of paranoid illnesses are considered: the psychological and social consequences of deafness, the possible contribution of sensory deprivation phenomena, and the interference of hearing loss in attention, perception and communication. Finally, possible future lines of research are suggested.
///////////////////////////////////////////////////////////////////////////////////////////// // Copyright 2017 Intel Corporation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. ///////////////////////////////////////////////////////////////////////////////////////////// #include "InstantAccess_Tiling.h" #include <windows.h> #include <d3d11.h> #include <d3dcompiler.h> #include <cassert> #include "resource.h" #include <stdio.h> #include "emmintrin.h" // Each function uses the following helper functions to convert to and from tiled addresses. UINT swizzle_x(UINT x /*in bytes*/) { UINT x_low = (x & 0xF); UINT x_high = (x & 0xFFFFFFF0); return (x_low | (x_high << 5)); // 5 bits are coming from Y in the tile } // UnswizzleX also contains tile row information in high bits // must mask out using row pitch in calling function UINT UnswizzleX(UINT x) { UINT x_low = x & 0xF; UINT x_high = (0xFFFFFFF0 << 5) & x; return x_low | (x_high>>5); } UINT swizzle_y(UINT y /* in texels */) { UINT y_low = (y & 0x1f); return y_low << 4; // TileY is always 4bits } UINT UnswizzleY(UINT y) { UINT unswizzled = y ^ ((y & (1 << 9)) >> 3); return ((0x1f << 4) & unswizzled) >> 4; } bool IsPow2(UINT input) { return (input & (input - 1)) == 0; } // TileY contains a swizzling operation on the memory addressing path. // this is what this bit operation computes (swizzled[6] = tiled[6] ^ tiled[9]) UINT swizzleAddress(UINT tiledAddr) { return tiledAddr ^ ((tiledAddr & (1 << 9)) >> 3); } #define one_g (1 << 8) #define two_g (2 << 8) #define three_g (3 << 8) void WriteDRA_Copy(UINT mode, INTC::RESOURCE_EXTENSION_DIRECT_ACCESS::MAP_DATA * pGPUSubResourceData, TextureInfo *pTexInfo, UINT mip, D3D11_MAPPED_SUBRESOURCE &texData) { const UINT TileH = 32; // height of tile in blocks // From loaded texture const UINT texWidthInBlock = pTexInfo->widthInBlocks; const UINT texHeightInBlock = pTexInfo->heightInBlocks; const UINT bytesPerBlock = pTexInfo->bytesPerBlock; // Width in bytes of the map (size of the first row of blocks) UINT mapPitch = pGPUSubResourceData->Pitch; // Offset to the mip const UINT xoffset = pGPUSubResourceData->XOffset; // in bytes const UINT yoffset = pGPUSubResourceData->YOffset; // in blocks // Mip height and width // this is incorrect for non-power-of-two sizes... // 12 texels, e.g. are 3 DXT blocks. 6 texels are 2. assert(IsPow2(texHeightInBlock) && IsPow2(texWidthInBlock)); const UINT mipHeightInBlock = (texHeightInBlock >> mip) > 0 ? (texHeightInBlock >> mip) : 1; const UINT mipWidthInBlock = (texWidthInBlock >> mip) > 0 ? (texWidthInBlock >> mip) : 1; const UINT mipWidthInBytes = mipWidthInBlock * bytesPerBlock; // Base address of Tiled Memory UINT_PTR destBase = (UINT_PTR)pGPUSubResourceData->pBaseAddress; // This is the begining of rygs method UINT offs_x0 = swizzle_x(xoffset); UINT offs_y = swizzle_y(yoffset); // incr_y corresponds to the byte size of a full row of tiles. UINT incr_y = swizzle_x(mapPitch); if (mode == MODE_LINEAR_ROWS) { if (pGPUSubResourceData->TileFormat == INTC::RESOURCE_EXTENSION_DIRECT_ACCESS::MAP_TILE_TYPE_TILE_Y) { for (UINT y = 0; y < mipHeightInBlock; y++) { __m128i * pSrc = (__m128i *)((BYTE*)texData.pData + y*texData.RowPitch); for (UINT x = 0; x < mipWidthInBlock; x += 4) { UINT swizzled = swizzleAddress(swizzle_y(yoffset + y) + incr_y * ((yoffset + y) / TileH) + swizzle_x(xoffset + x * 4)); __m128i * thisCL = (__m128i *)((BYTE*)destBase + swizzled); _mm_stream_si128(thisCL, *pSrc); thisCL++; pSrc++; } } } if(pGPUSubResourceData->TileFormat == INTC::RESOURCE_EXTENSION_DIRECT_ACCESS::MAP_TILE_TYPE_TILE_Y_NO_CSX_SWIZZLE) { for (UINT y = 0; y < mipHeightInBlock; y++) { __m128i * pSrc = (__m128i *)((BYTE*)texData.pData + y*texData.RowPitch); for (UINT x = 0; x < mipWidthInBlock; x += 4) { UINT offset = swizzle_y(yoffset + y) + incr_y * ((yoffset + y) / TileH) + swizzle_x(xoffset + x * 4); __m128i * thisCL = (__m128i *)((BYTE*)destBase + offset); _mm_stream_si128(thisCL, *pSrc); thisCL++; pSrc++; } } } } else if(mode == MODE_LINEAR_COLUMNS) { if (pGPUSubResourceData->TileFormat == INTC::RESOURCE_EXTENSION_DIRECT_ACCESS::MAP_TILE_TYPE_TILE_Y) { for (UINT x = 0; x < mipWidthInBlock; x += 4) { for (UINT y = 0; y < mipHeightInBlock; y++) { __m128i * pSrc = (__m128i *)((BYTE*)texData.pData + y*texData.RowPitch + x * 4); UINT yadd = (y / 32) * 32; UINT swizzled = swizzleAddress(swizzle_y(yoffset + y) + incr_y * ((yoffset + y) / TileH) + swizzle_x(xoffset + x * 4)); __m128i * thisCL = (__m128i *)((BYTE*)destBase + swizzled); _mm_stream_si128(thisCL, *pSrc); thisCL++; pSrc++; } } } if (pGPUSubResourceData->TileFormat == INTC::RESOURCE_EXTENSION_DIRECT_ACCESS::MAP_TILE_TYPE_TILE_Y_NO_CSX_SWIZZLE) { for (UINT x = 0; x < mipWidthInBlock; x += 4) { for (UINT y = 0; y < mipHeightInBlock; y++) { __m128i * pSrc = (__m128i *)((BYTE*)texData.pData + y*texData.RowPitch + x * 4); UINT yadd = (y / 32) * 32; UINT offset = swizzle_y(yoffset + y) + incr_y * ((yoffset + y) / TileH) + swizzle_x(xoffset + x * 4); __m128i * thisCL = (__m128i *)((BYTE*)destBase + offset); _mm_stream_si128(thisCL, *pSrc); thisCL++; pSrc++; } } } } else if(mode == MODE_TILED) { if (pGPUSubResourceData->TileFormat == INTC::RESOURCE_EXTENSION_DIRECT_ACCESS::MAP_TILE_TYPE_TILE_Y) { __m128i* thisCL = (__m128i*) destBase; UINT offset = 0; for (UINT y = 0; y < mipHeightInBlock; y++) { UINT yadd = (y / 32) * 32; for (UINT x = 0; x < mipWidthInBlock; x += 4) { UINT usy = UnswizzleY(offset) + yadd; __m128i * pSrc = (__m128i*)((BYTE*)texData.pData + texData.RowPitch * usy + (UnswizzleX(offset) & (mipWidthInBytes - 1))); _mm_stream_si128(thisCL, *pSrc); thisCL++; pSrc++; offset += 16; } } } if (pGPUSubResourceData->TileFormat == INTC::RESOURCE_EXTENSION_DIRECT_ACCESS::MAP_TILE_TYPE_TILE_Y_NO_CSX_SWIZZLE) { __m128i* thisCL = (__m128i*) destBase; UINT offset = 0; for (UINT y = 0; y < mipHeightInBlock; y++) { UINT yadd = (y / 32) * 32; for (UINT x = 0; x < mipWidthInBlock; x += 4) { UINT usy = (((0x1f << 4) & offset) >> 4) + yadd; __m128i * pSrc = (__m128i*)((BYTE*)texData.pData + texData.RowPitch * usy + (UnswizzleX(offset) & (mipWidthInBytes - 1))); _mm_stream_si128(thisCL, *pSrc); thisCL++; pSrc++; offset += 16; } } } } else if (mode == MODE_LINEAR_INTRINSICS) { if (pGPUSubResourceData->TileFormat == INTC::RESOURCE_EXTENSION_DIRECT_ACCESS::MAP_TILE_TYPE_TILE_Y) { // we use 2 different code paths depending on whether we can process a single CPU cacheline worth of data // (which, in TileY, corresponds to a 16Bx4rows of data - 2x4 DXT1 blocks, 1x4 DXT5 blocks, 4x4 RBBA8...) // at a time or if we have to rely on finer-grained, non-aligned // access (which only happens at the lowest mipmap levels) // If we do have enough data to process, the inner loop processes 4 source block rows at a time, // in chunks of 16B per row if (xoffset % 16 == 0 && yoffset % 4 == 0 && mipWidthInBytes % 16 == 0 && mipHeightInBlock % 4 == 0) { // swizzle_x/swizzle_y are leveraged to compute the increment needed when moving // into the 2d destination surface in X and Y direction. // we want x_mask to represent the increment for 16 bytes UINT x_mask = swizzle_x((UINT)-16); // Likewise for y direction, we want 4 rows at a time UINT y_mask = swizzle_y((UINT)-4); // offs_y (below) only encodes the y offset used for addressing _within the tile_. // offs_x0 combines 2 parts of the addressing: // 1. the complete x offset // 2. the part of the y offset that is used to know which tile row the current set of rows is part of. // This is what the next line computes (`yoffset / TileH' is the tile row index) // As a result, when offs_y wraps (i.e. the algorithm wraps into the next tile row), offs_x0 needs to be updated to // the next row of tiles (with incr_y again) offs_x0 += incr_y * (yoffset / TileH); BYTE * baseSrc = (BYTE*)texData.pData; for (UINT y = 0; y < mipHeightInBlock; y += 4) { // read 4 texel rows at time __m128i *src0 = (__m128i *) (baseSrc + y * mipWidthInBytes); __m128i *src1 = (__m128i *) (baseSrc + (y + 1) * mipWidthInBytes); __m128i *src2 = (__m128i *) (baseSrc + (y + 2) * mipWidthInBytes); __m128i *src3 = (__m128i *) (baseSrc + (y + 3) * mipWidthInBytes); UINT offs_x = offs_x0; for (UINT x = 0; x < mipWidthInBytes; x += 16) { // inner loop reads a single cacheline at a time. UINT tiledAddr = offs_y + offs_x; UINT swizzledAddr = swizzleAddress(tiledAddr); __m128i * thisCL = (__m128i *)((BYTE*)destBase + swizzledAddr); // now stream the 64B of data to their final destination _mm_stream_si128(thisCL, *src0); thisCL++; src0++; _mm_stream_si128(thisCL, *src1); thisCL++; src1++; _mm_stream_si128(thisCL, *src2); thisCL++; src2++; _mm_stream_si128(thisCL, *src3); thisCL++; src3++; // move to next 4x4 in source order. // This uses a couple of tricks based on bit propagation and 2's complement. // read rygs method to understand it. offs_x = (offs_x - x_mask) & x_mask; } // same trick as for offs_x offs_y = (offs_y - y_mask) & y_mask; // wrap into next tile row if required if (!offs_y) offs_x0 += incr_y; } } else { // the 1x1 path follows exactly the same pattern as the 4x4 path, // but its inner loop only processes a single UINT, and as such is less cache/CPU friendly. // read the first implementation for additional details. UINT x_mask = swizzle_x((UINT)-4); UINT y_mask = swizzle_y(~0u); offs_x0 += incr_y * (yoffset / TileH); BYTE * baseSrc = (BYTE*)texData.pData; for (UINT y = 0; y < mipHeightInBlock; y++) { UINT *src = (UINT *)(baseSrc + y * mipWidthInBytes); UINT offs_x = offs_x0; for (UINT x = 0; x < mipWidthInBytes; x += 4) { UINT tiledAddr = offs_y + offs_x; UINT swizzledAddr = swizzleAddress(tiledAddr); *((UINT *)((BYTE*)destBase + swizzledAddr)) = *src++; offs_x = (offs_x - x_mask) & x_mask; } offs_y = (offs_y - y_mask) & y_mask; if (!offs_y) { offs_x0 += incr_y; } } } } if (pGPUSubResourceData->TileFormat == INTC::RESOURCE_EXTENSION_DIRECT_ACCESS::MAP_TILE_TYPE_TILE_Y_NO_CSX_SWIZZLE) { // we use 2 different code paths depending on whether we can process a single CPU cacheline worth of data // (which, in TileY, corresponds to a 16Bx4rows of data - 2x4 DXT1 blocks, 1x4 DXT5 blocks, 4x4 RBBA8...) // at a time or if we have to rely on finer-grained, non-aligned // access (which only happens at the lowest mipmap levels) // If we do have enough data to process, the inner loop processes 4 source block rows at a time, // in chunks of 16B per row if (xoffset % 16 == 0 && yoffset % 4 == 0 && mipWidthInBytes % 16 == 0 && mipHeightInBlock % 4 == 0) { // swizzle_x/swizzle_y are leveraged to compute the increment needed when moving // into the 2d destination surface in X and Y direction. // we want x_mask to represent the increment for 16 bytes UINT x_mask = swizzle_x((UINT)-16); // Likewise for y direction, we want 4 rows at a time UINT y_mask = swizzle_y((UINT)-4); // offs_y (below) only encodes the y offset used for addressing _within the tile_. // offs_x0 combines 2 parts of the addressing: // 1. the complete x offset // 2. the part of the y offset that is used to know which tile row the current set of rows is part of. // This is what the next line computes (`yoffset / TileH' is the tile row index) // As a result, when offs_y wraps (i.e. the algorithm wraps into the next tile row), offs_x0 needs to be updated to // the next row of tiles (with incr_y again) offs_x0 += incr_y * (yoffset / TileH); BYTE * baseSrc = (BYTE*)texData.pData; for (UINT y = 0; y < mipHeightInBlock; y += 4) { // read 4 texel rows at time __m128i *src0 = (__m128i *) (baseSrc + y * mipWidthInBytes); __m128i *src1 = (__m128i *) (baseSrc + (y + 1) * mipWidthInBytes); __m128i *src2 = (__m128i *) (baseSrc + (y + 2) * mipWidthInBytes); __m128i *src3 = (__m128i *) (baseSrc + (y + 3) * mipWidthInBytes); UINT offs_x = offs_x0; for (UINT x = 0; x < mipWidthInBytes; x += 16) { // inner loop reads a single cacheline at a time. UINT tiledAddr = offs_y + offs_x; __m128i * thisCL = (__m128i *)((BYTE*)destBase + tiledAddr); // now stream the 64B of data to their final destination _mm_stream_si128(thisCL, *src0); thisCL++; src0++; _mm_stream_si128(thisCL, *src1); thisCL++; src1++; _mm_stream_si128(thisCL, *src2); thisCL++; src2++; _mm_stream_si128(thisCL, *src3); thisCL++; src3++; // move to next 4x4 in source order. // This uses a couple of tricks based on bit propagation and 2's complement. // read rygs method to understand it. offs_x = (offs_x - x_mask) & x_mask; } // same trick as for offs_x offs_y = (offs_y - y_mask) & y_mask; // wrap into next tile row if required if (!offs_y) offs_x0 += incr_y; } } else { // the 1x1 path follows exactly the same pattern as the 4x4 path, // but its inner loop only processes a single UINT, and as such is less cache/CPU friendly. // read the first implementation for additional details. UINT x_mask = swizzle_x((UINT)-4); UINT y_mask = swizzle_y(~0u); offs_x0 += incr_y * (yoffset / TileH); BYTE * baseSrc = (BYTE*)texData.pData; for (UINT y = 0; y < mipHeightInBlock; y++) { UINT *src = (UINT *)(baseSrc + y * mipWidthInBytes); UINT offs_x = offs_x0; for (UINT x = 0; x < mipWidthInBytes; x += 4) { UINT tiledAddr = offs_y + offs_x; *((UINT *)((BYTE*)destBase + tiledAddr)) = *src++; offs_x = (offs_x - x_mask) & x_mask; } offs_y = (offs_y - y_mask) & y_mask; if (!offs_y) { offs_x0 += incr_y; } } } } } } void WriteDRA_Solid(UINT mode, INTC::RESOURCE_EXTENSION_DIRECT_ACCESS::MAP_DATA * pGPUSubresourceData, TextureInfo *pTexInfo, UINT mip, UINT color) { const UINT TileH = 32; // height of tile in blocks // From loaded texture const UINT texWidthInBlock = pTexInfo->widthInBlocks; const UINT texHeightInBlock = pTexInfo->heightInBlocks; const UINT bytesPerBlock = pTexInfo->bytesPerBlock; // Width in bytes of the map (size of the first row of blocks) UINT mapPitch = pGPUSubresourceData->Pitch; // Offset to the mip const UINT xoffset = pGPUSubresourceData->XOffset; // in bytes const UINT yoffset = pGPUSubresourceData->YOffset; // in blocks // Mip height and width // this is incorrect for non-power-of-two sizes... // 12 texels, e.g. are 3 DXT blocks. 6 texels are 2. assert(IsPow2(texHeightInBlock) && IsPow2(texWidthInBlock)); const UINT mipHeightInBlock = (texHeightInBlock >> mip) > 0 ? (texHeightInBlock >> mip) : 1; const UINT mipWidthInBlock = (texWidthInBlock >> mip) > 0 ? (texWidthInBlock >> mip) : 1; const UINT mipWidthInBytes = mipWidthInBlock * bytesPerBlock; // Base address of Tiled Memory UINT_PTR destBase = (UINT_PTR)pGPUSubresourceData->pBaseAddress; // This is the begining of rygs method UINT offs_x0 = swizzle_x(xoffset); UINT offs_y = swizzle_y(yoffset); // incr_y corresponds to the byte size of a full row of tiles. UINT incr_y = swizzle_x(mapPitch); if (mode == MODE_LINEAR_ROWS) { if (pGPUSubresourceData->TileFormat == INTC::RESOURCE_EXTENSION_DIRECT_ACCESS::MAP_TILE_TYPE_TILE_Y) { __declspec(align(16)) UINT baseSrc0[] = { color, color, color, color }; __m128i *src0 = (__m128i *) (&baseSrc0); for (UINT y = 0; y < mipHeightInBlock; y++) { for (UINT x = 0; x < mipWidthInBlock; x += 4) { UINT swizzled = swizzleAddress(swizzle_y(yoffset + y) + incr_y * ((yoffset + y) / TileH) + swizzle_x(xoffset + x * 4)); __m128i * thisCL = (__m128i *)((BYTE*)destBase + swizzled); _mm_stream_si128(thisCL, *src0); } } } if (pGPUSubresourceData->TileFormat == INTC::RESOURCE_EXTENSION_DIRECT_ACCESS::MAP_TILE_TYPE_TILE_Y_NO_CSX_SWIZZLE) { __declspec(align(16)) UINT baseSrc0[] = { color, color, color, color }; __m128i *src0 = (__m128i *) (&baseSrc0); for (UINT y = 0; y < mipHeightInBlock; y++) { for (UINT x = 0; x < mipWidthInBlock; x += 4) { UINT swizzled = swizzleAddress(swizzle_y(yoffset + y) + incr_y * ((yoffset + y) / TileH) + swizzle_x(xoffset + x * 4)); __m128i * thisCL = (__m128i *)((BYTE*)destBase + swizzled); _mm_stream_si128(thisCL, *src0); } } } } else if(mode == MODE_LINEAR_COLUMNS) { if (pGPUSubresourceData->TileFormat == INTC::RESOURCE_EXTENSION_DIRECT_ACCESS::MAP_TILE_TYPE_TILE_Y) { __declspec(align(16)) UINT baseSrc0[] = { color, color, color, color }; __m128i *src0 = (__m128i *) (&baseSrc0); for (UINT x = 0; x < mipWidthInBlock; x += 4) { for (UINT y = 0; y < mipHeightInBlock; y++) { UINT swizzled = swizzleAddress(swizzle_y(yoffset + y) + incr_y * ((yoffset + y) / TileH) + swizzle_x(xoffset + x * 4)); __m128i * thisCL = (__m128i *)((BYTE*)destBase + swizzled); _mm_stream_si128(thisCL, *src0); } } } if (pGPUSubresourceData->TileFormat == INTC::RESOURCE_EXTENSION_DIRECT_ACCESS::MAP_TILE_TYPE_TILE_Y_NO_CSX_SWIZZLE) { __declspec(align(16)) UINT baseSrc0[] = { color, color, color, color }; __m128i *src0 = (__m128i *) (&baseSrc0); for (UINT x = 0; x < mipWidthInBlock; x += 4) { for (UINT y = 0; y < mipHeightInBlock; y++) { UINT swizzled = swizzleAddress(swizzle_y(yoffset + y) + incr_y * ((yoffset + y) / TileH) + swizzle_x(xoffset + x * 4)); __m128i * thisCL = (__m128i *)((BYTE*)destBase + swizzled); _mm_stream_si128(thisCL, *src0); } } } } else if(mode == MODE_TILED) { __m128i * thisCL = (__m128i *)((BYTE*)destBase);// __declspec(align(16)) UINT baseSrc0[] = {color, color, color, color}; __m128i *src0 = (__m128i *) (&baseSrc0); for(UINT x = 0; x < mipWidthInBlock*mipHeightInBlock/4; x++) { _mm_stream_si128(thisCL++, *src0); } } else if(mode == MODE_LINEAR_INTRINSICS) { if (pGPUSubresourceData->TileFormat == INTC::RESOURCE_EXTENSION_DIRECT_ACCESS::MAP_TILE_TYPE_TILE_Y) { if (xoffset % 16 == 0 && yoffset % 4 == 0 && mipWidthInBytes % 16 == 0 && mipHeightInBlock % 4 == 0) { UINT x_mask = swizzle_x((UINT)-16); UINT y_mask = swizzle_y((UINT)-4); offs_x0 += incr_y * (yoffset / TileH); __declspec(align(16)) UINT baseSrc0[] = { color, color, color, color }; __m128i *src0 = (__m128i *) (&baseSrc0); for (UINT y = 0; y < mipHeightInBlock; y += 4) { UINT offs_x = offs_x0; for (UINT x = 0; x < mipWidthInBytes; x += 16) { // inner loop reads a single cacheline at a time. UINT tiledAddr = offs_y + offs_x; UINT swizzledAddr = swizzleAddress(tiledAddr); __m128i * thisCL = (__m128i *)((BYTE*)destBase + swizzledAddr); _mm_stream_si128(thisCL++, *src0); _mm_stream_si128(thisCL++, *src0); _mm_stream_si128(thisCL++, *src0); _mm_stream_si128(thisCL++, *src0); offs_x = (offs_x - x_mask) & x_mask; } offs_y = (offs_y - y_mask) & y_mask; if (!offs_y) offs_x0 += incr_y; } } else { UINT x_mask = swizzle_x((UINT)-4); UINT y_mask = swizzle_y(~0u); offs_x0 += incr_y * (yoffset / TileH); for (UINT y = 0; y < mipHeightInBlock; y++) { UINT offs_x = offs_x0; for (UINT x = 0; x < mipWidthInBytes; x += 4) { UINT tiledAddr = offs_y + offs_x; UINT swizzledAddr = swizzleAddress(tiledAddr); *((UINT *)((BYTE*)destBase + swizzledAddr)) = color; offs_x = (offs_x - x_mask) & x_mask; } offs_y = (offs_y - y_mask) & y_mask; if (!offs_y) { offs_x0 += incr_y; } } } } if (pGPUSubresourceData->TileFormat == INTC::RESOURCE_EXTENSION_DIRECT_ACCESS::MAP_TILE_TYPE_TILE_Y_NO_CSX_SWIZZLE) { if (xoffset % 16 == 0 && yoffset % 4 == 0 && mipWidthInBytes % 16 == 0 && mipHeightInBlock % 4 == 0) { UINT x_mask = swizzle_x((UINT)-16); UINT y_mask = swizzle_y((UINT)-4); offs_x0 += incr_y * (yoffset / TileH); __declspec(align(16)) UINT baseSrc0[] = { color, color, color, color }; __m128i *src0 = (__m128i *) (&baseSrc0); for (UINT y = 0; y < mipHeightInBlock; y += 4) { UINT offs_x = offs_x0; for (UINT x = 0; x < mipWidthInBytes; x += 16) { // inner loop reads a single cacheline at a time. UINT tiledAddr = offs_y + offs_x; UINT swizzledAddr = swizzleAddress(tiledAddr); __m128i * thisCL = (__m128i *)((BYTE*)destBase + swizzledAddr); _mm_stream_si128(thisCL++, *src0); _mm_stream_si128(thisCL++, *src0); _mm_stream_si128(thisCL++, *src0); _mm_stream_si128(thisCL++, *src0); offs_x = (offs_x - x_mask) & x_mask; } offs_y = (offs_y - y_mask) & y_mask; if (!offs_y) offs_x0 += incr_y; } } else { UINT x_mask = swizzle_x((UINT)-4); UINT y_mask = swizzle_y(~0u); offs_x0 += incr_y * (yoffset / TileH); for (UINT y = 0; y < mipHeightInBlock; y++) { UINT offs_x = offs_x0; for (UINT x = 0; x < mipWidthInBytes; x += 4) { UINT tiledAddr = offs_y + offs_x; UINT swizzledAddr = swizzleAddress(tiledAddr); *((UINT *)((BYTE*)destBase + swizzledAddr)) = color; offs_x = (offs_x - x_mask) & x_mask; } offs_y = (offs_y - y_mask) & y_mask; if (!offs_y) { offs_x0 += incr_y; } } } } } } void ReadDRA(UINT mode, INTC::RESOURCE_EXTENSION_DIRECT_ACCESS::MAP_DATA * pGPUSubresourceData, TextureInfo *pTexInfo, UINT mip, D3D11_MAPPED_SUBRESOURCE &texData) { const UINT TileH = 32; // height of tile in blocks // From loaded texture const UINT texWidthInBlock = pTexInfo->widthInBlocks; const UINT texHeightInBlock = pTexInfo->heightInBlocks; const UINT bytesPerBlock = pTexInfo->bytesPerBlock; // Width in bytes of the map (size of the first row of blocks) UINT mapPitch = pGPUSubresourceData->Pitch; // Offset to the mip const UINT xoffset = pGPUSubresourceData->XOffset; // in bytes const UINT yoffset = pGPUSubresourceData->YOffset; // in blocks // Mip height and width // this is incorrect for non-power-of-two sizes... // 12 texels, e.g. are 3 DXT blocks. 6 texels are 2. assert(IsPow2(texHeightInBlock) && IsPow2(texWidthInBlock)); const UINT mipHeightInBlock = (texHeightInBlock >> mip) > 0 ? (texHeightInBlock >> mip) : 1; const UINT mipWidthInBlock = (texWidthInBlock >> mip) > 0 ? (texWidthInBlock >> mip) : 1; // Base address of Tiled Memory __m128i* srcBase = (__m128i*)pGPUSubresourceData->pBaseAddress; __m128i* destBase = (__m128i*)texData.pData; // incr_y corresponds to the byte size of a full row of tiles. UINT incr_y = swizzle_x(mapPitch); if(mode == MODE_LINEAR_ROWS) { __m128i *thisCL = NULL; for(UINT y = 0; y < mipHeightInBlock; y++) { for(UINT x = 0; x < mipWidthInBlock; x+=4) { UINT swizzled = swizzleAddress(swizzle_y(yoffset+y) + incr_y * ((yoffset+y) / TileH) + (swizzle_x(xoffset+x*4))); thisCL = (__m128i *)((BYTE*)srcBase + swizzled); _mm_stream_si128(destBase++, *thisCL); } } } else if(mode == MODE_LINEAR_COLUMNS) { __m128i* thisCL = NULL; for(UINT x = 0; x < mipWidthInBlock; x+=4) { for(UINT y = 0; y < mipHeightInBlock; y++) { UINT swizzled = swizzleAddress(swizzle_y(yoffset+y) + incr_y * ((yoffset+y) / TileH) + (swizzle_x(xoffset+x*4))); thisCL = (__m128i *)((BYTE*)srcBase + swizzled); _mm_stream_si128(destBase++, *thisCL); } } } else if(mode == MODE_TILED) { destBase = (__m128i*)texData.pData; for(UINT x = 0; x < mipWidthInBlock*mipHeightInBlock; x+=4) { _mm_stream_si128(destBase, *srcBase); destBase++; srcBase++; } } }
def geodesic_system(self, c: torch.Tensor, dc: torch.Tensor) -> torch.Tensor: N, d = c.shape requires_grad = c.requires_grad or dc.requires_grad z = c.clone().requires_grad_() dz = dc.clone().requires_grad_() L, M = self.inner(z, dz, dz, return_metric=True) if requires_grad: dLdc = torch.cat([grad(L[n], z, create_graph=True)[0][n].unsqueeze(0) for n in range(N)]) else: dLdc = torch.cat( [grad(L[n], z, retain_graph=(n < N - 1))[0][n].unsqueeze(0) for n in range(N)] ) # dMdt = (self.metric(z + h*dz) - M) / h x(d) or Nx(d) M = self.metric(z) diagonal_metric = M.dim() == 2 if requires_grad: if diagonal_metric: dMdt = torch.tensor( [ [torch.sum(grad(M[n, i], z, create_graph=True)[0] * dz) for i in range(d)] for n in range(N) ] ) else: dMdt = torch.tensor( [ [ torch.sum(grad(M[n, i, j], z, create_graph=True)[0] * dz) for i in range(d) for j in range(d) ] for n in range(N) ] ).view( N, d, d ) x(d) # TODO: figure out how to not store the graph else: if diagonal_metric: dMdt = torch.tensor( [ [torch.sum(grad(M[n, i], z, retain_graph=True)[0] * dz) for i in range(d)] for n in range(N) ] ) # TODO: figure out how to not store the graph else: dMdt = torch.tensor( [ [ torch.sum(grad(M[n, i, j], z, retain_graph=True)[0] * dz) for i in range(d) for j in range(d) ] for n in range(N) ] ).view( N, d, d ) x(d) # TODO: figure out how to not store the graph with torch.set_grad_enabled(requires_grad): if diagonal_metric: ddc = (0.5 * dLdc - dMdt * dz) / M else: Mddc = 0.5 * dLdc - dMdt.bmm(dz.unsqueeze(-1)).squeeze(-1) ddc, _ = torch.solve(Mddc.unsqueeze(-1), M) x1 ddc = ddc.squeeze(-1) return ddc
Story highlights Sen. Al Franken said he plans to give speeches before he leaves Congress The Minnesota Democrat has been accused of touching women inappropriately Washington (CNN) Minnesota Democratic Sen. Al Franken, who announced plans earlier this month to resign his seat, will leave the Senate on January 2, his office said. "When I leave the Senate in a few weeks," Franken said during a speech on the Senate floor, "I will continue trying to be an educated citizen and an advocate and an activist." Until Wednesday afternoon, Franken had not given a specific date for when he will resign. The senator said he has learned a lot over the last eight and half years in his position and gained new perspective on issues and how decisions are made in Washington. Franken added he will continue to give a series of speeches before he leaves. Read More
package com.example.model.users; public abstract class BaseEntity { private Integer id; public Integer getId() { return id; } }
''' Created on Aug 14, 2015 @author: hari ''' from builtins import object class Queue(object): """ Implementation of queue in Python using list. """ def __init__(self): self._list = [] def enqueue(self, item): self._list.append(item) def dequeue(self): if len(self._list) == 0: return None item = self._list[0] del self._list[0] return item def __len__(self): return len(self._list) @property def size(self): return len(self._list) def __iter__(self): while len(self._list) != 0: yield self.dequeue() @property def front(self): if len(self._list) == 0: return None return self._list[0] def __str__(self): return '%s size %s Q %s' % (self.__class__.__name__, len(self), self._list)
The Twin Cities Lisp Users Group meeting for April was last Monday. The main topic was Web Frameworks, but there were also two shorter talks. Weblocks Presentation Patrick Stein gave this presentation at the TC Lispers meeting in April 2010. Weblocks on Vimeo. Allegro Serve and Web Actions Presentation Robert Goldman gave this presentation at the TC Lispers meeting in April 2010. Apology: Unfortunately, ScreenFlow bombed out on me when I went to stop recording. It subsequently saw that it had a partial project there but was unable to recover it. As such, there is no video available for this presentation. Feh. — Patrick Hunchentoot Presentation Paul Krueger gave this presentation at the TC Lispers meeting in April 2010. Hunchentoot on Vimeo. Cocoa Lisp Controller Presentation Paul Krueger gave this presentation at the TC Lispers meeting in April 2010. Cocoa Lisp Controller on Vimeo. CL-Growl Presentation Patrick Stein gave this presentation at the TC Lispers meeting in April 2010. CL-Growl on Vimeo.
// method to draw the bird to the screen public void drawBird() { if(mouseX < width && mouseY < 500) { bird.render(this, mouseX, mouseY, sky.getTime()); } else if(mouseY > 500) { bird.render(this, mouseX, sky.getTime()); } }
Media playback is unsupported on your device Media caption Large crowds had gathered at the store Police have been called to Tesco Extra supermarkets in Scotland after scuffles broke out among shoppers queuing for Black Friday bargains. Large crowds gathered at the Silverburn store, near Pollok in Glasgow, and Kingsway West store in Dundee, ahead of the sale, which began at midnight. Worried staff asked police to attend as customers jostled for some of the heavily-discounted goods on sale. Both stores were closed for a short time. There were no arrests. Sarah Coubrough wrote on Twitter: "Silverburn shut cos shoppers going mental with the sales. Think I'll stick to the online shopping today." Online footage Emma Somers posted: "Police called to the Tesco at Silverburn cause folk are fighting over the bargains for Black Friday." Police said that those involved in the scuffle had gone by the time they arrived, and that there were no arrests. Footage from the store shows shoppers pushing and shoving each other as they attempt to grab items from trolleys. Similar scenes took place at the Tesco Extra in Dundee, with footage of the scuffles being posted online. A statement from Police Scotland's Tayside division said: "For information the Tesco Extra, Kingsway West, Dundee has been closed at this time due to excessive numbers attending to purchase sale items. "This decision was made by Tesco staff to ensure the safety of all their customers." Officers attended other Tesco stores in England and Wales amid reports of crowd surges. Black Friday, traditionally a post-Thanksgiving discount shopping day in the United States, has become one of the busiest shopping days of the year. A Tesco spokesman said: "Over 600 Tesco stores have Black Friday offers available in store. In the interest of customer safety a small number of these stores contacted police last night to help control crowds safely and stores are now trading normally". Are you shopping for a bargain in the Black Friday sales? Did you attend any of the sales overnight? Email [email protected] with your experience.
<reponame>lucasfloriani/go-boilerplate package daos import ( dbpkg "go-boilerplate/db" "go-boilerplate/models" "github.com/gin-gonic/gin" ) // ArtistDAO persists artist data in database using gorm // functions, contains methods for each CRUD actions. type ArtistDAO struct{} // NewArtistDAO creates a new ArtistDAO func NewArtistDAO() *ArtistDAO { return &ArtistDAO{} } // Get reads the artist with the specified ID from the database. func (dao *ArtistDAO) Get(c *gin.Context, id uint) (*models.Artist, error) { artist := models.Artist{} db := dbpkg.Instance(c) err := db.First(&artist, id).Error return &artist, err } // Create saves a new artist record in the database. // The Artist.Id field will be populated with an automatically generated ID upon successful saving. func (dao *ArtistDAO) Create(c *gin.Context, artist *models.Artist) error { db := dbpkg.Instance(c) return db.Save(&artist).Error } // Update saves the changes to an artist in the database. func (dao *ArtistDAO) Update(c *gin.Context, id uint, artist *models.Artist) error { if _, err := dao.Get(c, id); err != nil { return err } db := dbpkg.Instance(c) err := db.Save(&artist).Error return err } // Delete deletes an artist with the specified ID from the database. func (dao *ArtistDAO) Delete(c *gin.Context, id uint) error { artist, err := dao.Get(c, id) if err != nil { return err } db := dbpkg.Instance(c) return db.Delete(&artist).Error } // Count returns the number of the artist records in the database. func (dao *ArtistDAO) Count(c *gin.Context) (count int, err error) { db := dbpkg.Instance(c) err = db.Model(&models.Artist{}).Count(&count).Error return } // Query retrieves the artist records with the specified offset and limit from the database. func (dao *ArtistDAO) Query(c *gin.Context, offset, limit int) ([]models.Artist, error) { artists := []models.Artist{} db := dbpkg.Instance(c) return artists, db.Offset(offset).Limit(limit).Order("id asc").Find(&artists).Error }
def authenticate(self, kf_endpoint: str, runtime_config_name: str) -> Optional[str]: get_response = requests.get(kf_endpoint, allow_redirects=True) if len(get_response.history) > 0: raise AuthenticationError(f'Authentication is required for Kubeflow at {kf_endpoint}. ' f'Update the authentication type setting in runtime configuration ' f'\'{runtime_config_name}\' and try again.', provider=self._type) return None
/* * Copyright 2012-2018 Chronicle Map Contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.openhft.chronicle.map.fromdocs; import net.openhft.chronicle.bytes.Bytes; import net.openhft.chronicle.core.io.IORuntimeException; import net.openhft.chronicle.hash.serialization.BytesReader; import net.openhft.chronicle.hash.serialization.StatefulCopyable; import net.openhft.chronicle.wire.WireIn; import net.openhft.chronicle.wire.WireOut; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.CharacterCodingException; import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; import java.nio.charset.CoderResult; public final class CharSequenceCustomEncodingBytesReader implements BytesReader<CharSequence>, StatefulCopyable<CharSequenceCustomEncodingBytesReader> { // config fields, non-final because read in readMarshallable() private Charset charset; private int inputBufferSize; // cache fields private transient CharsetDecoder charsetDecoder; private transient ByteBuffer inputBuffer; private transient CharBuffer outputBuffer; public CharSequenceCustomEncodingBytesReader(Charset charset, int inputBufferSize) { this.charset = charset; this.inputBufferSize = inputBufferSize; initTransients(); } private void initTransients() { charsetDecoder = charset.newDecoder(); inputBuffer = ByteBuffer.allocate(inputBufferSize); int outputBufferSize = (int) (inputBufferSize * charsetDecoder.averageCharsPerByte()); outputBuffer = CharBuffer.allocate(outputBufferSize); } @NotNull @Override public CharSequence read(Bytes in, @Nullable CharSequence using) { long csLengthAsLong = in.readStopBit(); if (csLengthAsLong > Integer.MAX_VALUE) { throw new IORuntimeException("cs len shouldn't be more than " + Integer.MAX_VALUE + ", " + csLengthAsLong + " read"); } int csLength = (int) csLengthAsLong; StringBuilder sb; if (using instanceof StringBuilder) { sb = (StringBuilder) using; sb.setLength(0); sb.ensureCapacity(csLength); } else { sb = new StringBuilder(csLength); } int remainingBytes = in.readInt(); charsetDecoder.reset(); inputBuffer.clear(); outputBuffer.clear(); boolean endOfInput = false; // this loop inspired by the CharsetDecoder.decode(ByteBuffer) implementation while (true) { if (!endOfInput) { int inputChunkSize = Math.min(inputBuffer.remaining(), remainingBytes); inputBuffer.limit(inputBuffer.position() + inputChunkSize); in.read(inputBuffer); inputBuffer.flip(); remainingBytes -= inputChunkSize; endOfInput = remainingBytes == 0; } CoderResult cr = inputBuffer.hasRemaining() ? charsetDecoder.decode(inputBuffer, outputBuffer, endOfInput) : CoderResult.UNDERFLOW; if (cr.isUnderflow() && endOfInput) cr = charsetDecoder.flush(outputBuffer); if (cr.isUnderflow()) { if (endOfInput) { break; } else { inputBuffer.compact(); continue; } } if (cr.isOverflow()) { outputBuffer.flip(); sb.append(outputBuffer); outputBuffer.clear(); continue; } try { cr.throwException(); } catch (CharacterCodingException e) { throw new IORuntimeException(e); } } outputBuffer.flip(); sb.append(outputBuffer); return sb; } @Override public void readMarshallable(@NotNull WireIn wireIn) throws IORuntimeException { charset = (Charset) wireIn.read(() -> "charset").object(); inputBufferSize = wireIn.read(() -> "inputBufferSize").int32(); initTransients(); } @Override public void writeMarshallable(@NotNull WireOut wireOut) { wireOut.write(() -> "charset").object(charset); wireOut.write(() -> "inputBufferSize").int32(inputBufferSize); } @Override public CharSequenceCustomEncodingBytesReader copy() { return new CharSequenceCustomEncodingBytesReader(charset, inputBufferSize); } }
Effects of genetic knock-down of organic anion transporter genes on secretion of fluorescent organic ions by Malpighian tubules of Drosophila melanogaster. An earlier study has shown that RNAi knock-down of a single organic anion transporter (OAT) gene in the principal cells of Drosophila Malpighian tubules is associated with reductions in the expression of multiple, functionally related genes. In this study, we measured the rates of secretion of four fluorescent ions by tubules isolated from flies expressing targeted RNAi knock-down of specific OAT genes. Droplets secreted by isolated tubules set up in the Ramsay assay were collected in optically flat capillary tubes and the concentrations of fluorescent ions were determined by confocal laser scanning microscopy. Reductions in the expression of organic anion (OA) transporting polypeptide 58Dc (OATP; CG3380) were associated with reduced secretion of the OAs fluorescein and Texas Red. Reduction in the expression of Drosophila multidrug resistance associated protein (dMRP; CG6214) was correlated with reduced secretion of the P-glycoprotein substrate daunorubicin. Secretion of the organic cation quinacrine was unaffected by reduced expression of OATP, dMRP, or a multidrug efflux transporter (MET; CG30344). The results highlight the difficulties of assigning a rate-limiting role in transport of a specific OA to a single membrane transporter.
import { useKusama, KusamaContextProvider } from './KusamaContext' export { useKusama, KusamaContextProvider }
<gh_stars>0 package alien4cloud.orchestrators.services; import alien4cloud.component.repository.exception.CSARVersionAlreadyExistsException; import alien4cloud.dao.IGenericSearchDAO; import alien4cloud.dao.model.GetMultipleDataResult; import alien4cloud.deployment.DeploymentService; import alien4cloud.exception.AlreadyExistException; import alien4cloud.model.deployment.Deployment; import alien4cloud.model.orchestrators.Orchestrator; import alien4cloud.model.orchestrators.OrchestratorConfiguration; import alien4cloud.model.orchestrators.OrchestratorState; import alien4cloud.orchestrators.locations.services.LocationService; import alien4cloud.orchestrators.plugin.ILocationAutoConfigurer; import alien4cloud.orchestrators.plugin.IOrchestratorPlugin; import alien4cloud.orchestrators.plugin.IOrchestratorPluginFactory; import alien4cloud.orchestrators.plugin.model.PluginArchive; import alien4cloud.paas.OrchestratorPluginService; import alien4cloud.paas.exception.PluginConfigurationException; import alien4cloud.tosca.ArchiveIndexer; import alien4cloud.tosca.parser.ParsingError; import alien4cloud.utils.MapUtil; import com.google.common.collect.Lists; import lombok.extern.slf4j.Slf4j; import org.elasticsearch.mapping.QueryHelper; import org.springframework.stereotype.Component; import javax.annotation.Resource; import javax.inject.Inject; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; /** * Service to manage state of an orchestrator */ @Slf4j @Component public class OrchestratorStateService { @Inject private QueryHelper queryHelper; @Resource(name = "alien-es-dao") private IGenericSearchDAO alienDAO; @Inject private OrchestratorConfigurationService orchestratorConfigurationService; @Inject private OrchestratorPluginService orchestratorPluginService; @Inject private DeploymentService deploymentService; @Inject private OrchestratorService orchestratorService; @Inject private LocationService locationService; @Inject private ArchiveIndexer archiveIndexer; /** * Initialize all orchestrator that have a non-disabled state. * Note: Each orchestrator initialization is down in it's own thread so it doesn't impact application startup or other orchestrator connection. * * @return a list of futures for those who want to wait for task to be done. */ public List<Future<?>> initialize() { ExecutorService executorService = Executors.newCachedThreadPool(); List<Future<?>> futures = new ArrayList<Future<?>>(); // get all the orchestrators that are not disabled List<Orchestrator> enabledOrchestrators = orchestratorService.getAllEnabledOrchestrators(); if (enabledOrchestrators == null) { return futures; } for (final Orchestrator orchestrator : enabledOrchestrators) { // error in initialization and timeouts should not impact startup time of Alien 4 cloud and other PaaS Providers. Future<?> future = executorService.submit(new Runnable() { @Override public void run() { try { load(orchestrator); } catch (AlreadyExistException e) { log.info("Orchestrator was already loaded at initialization for {}.", orchestrator.getId()); } catch (Throwable t) { // we have to catch everything as we don't know what a plugin can do here and cannot interrupt startup. // Any orchestrator that failed to load will be considered as DISABLED as the registration didn't occurred log.error("Unexpected error in plugin", t); orchestrator.setState(OrchestratorState.DISABLED); alienDAO.save(orchestrator); } } }); futures.add(future); } return futures; } /** * Enable an orchestrator. * * @param orchestrator * The orchestrator to enable. */ public synchronized void enable(Orchestrator orchestrator) throws PluginConfigurationException { if (orchestrator.getState().equals(OrchestratorState.DISABLED)) { load(orchestrator); } else { log.debug("Request to enable ignored: orchestrator {} (id: {}) is already enabled", orchestrator.getName(), orchestrator.getId()); throw new AlreadyExistException("Orchestrator {} is already instanciated."); } } /** * Load and connect the given orchestrator. * * @param orchestrator * the orchestrator to load and connect. */ private void load(Orchestrator orchestrator) throws PluginConfigurationException { log.info("Loading and connecting orchestrator {} (id: {})", orchestrator.getName(), orchestrator.getId()); // check that the orchestrator is not already loaded. if (orchestratorPluginService.get(orchestrator.getId()) != null) { throw new AlreadyExistException("Plugin is already loaded."); } // switch the state to connecting orchestrator.setState(OrchestratorState.CONNECTING); alienDAO.save(orchestrator); // TODO move below in a thread to perform plugin loading and connection asynchronously IOrchestratorPluginFactory orchestratorFactory = orchestratorService.getPluginFactory(orchestrator); IOrchestratorPlugin<Object> orchestratorInstance = orchestratorFactory.newInstance(); // index the archive in alien catalog try { for (PluginArchive pluginArchive : orchestratorInstance.pluginArchives()) { archiveIndexer.importArchive(pluginArchive.getArchive(), pluginArchive.getArchiveFilePath(), Lists.<ParsingError> newArrayList()); } } catch (CSARVersionAlreadyExistsException e) { log.info("Skipping location archive import as the released version already exists in the repository."); } // Set the configuration for the provider OrchestratorConfiguration orchestratorConfiguration = orchestratorConfigurationService.getConfigurationOrFail(orchestrator.getId()); try { Object configuration = orchestratorConfigurationService.configurationAsValidObject(orchestrator.getId(), orchestratorConfiguration.getConfiguration()); orchestratorInstance.setConfiguration(configuration); } catch (IOException e) { throw new PluginConfigurationException("Failed convert configuration json in object.", e); } // connect the orchestrator orchestratorInstance.init(deploymentService.getCloudActiveDeploymentContexts(orchestrator.getId())); // register the orchestrator instance to be polled for updates orchestratorPluginService.register(orchestrator.getId(), orchestratorInstance); orchestrator.setState(OrchestratorState.CONNECTED); alienDAO.save(orchestrator); if (orchestratorInstance instanceof ILocationAutoConfigurer) { // trigger locations auto-configurations locationService.autoConfigure(orchestrator, (ILocationAutoConfigurer) orchestratorInstance); } } /** * Disable an orchestrator. * * @param orchestrator * The orchestrator to disable. * @param force * If true the orchestrator is disabled even if some deployments are currently running. */ public synchronized boolean disable(Orchestrator orchestrator, boolean force) { if (!force) { QueryHelper.SearchQueryHelperBuilder searchQueryHelperBuilder = queryHelper.buildSearchQuery(alienDAO.getIndexForType(Deployment.class)) .types(Deployment.class).filters(MapUtil.newHashMap(new String[] { "orchestratorId", "endDate" }, new String[][] { new String[] { orchestrator.getId() }, new String[] { null } })) .fieldSort("_timestamp", true); // If there is at least one active deployment. GetMultipleDataResult<Object> result = alienDAO.search(searchQueryHelperBuilder, 0, 1); // TODO place a lock to avoid deployments during disablement of the orchestrator. if (result.getData().length > 0) { return false; } } try { // un-register the orchestrator. IOrchestratorPlugin orchestratorInstance = (IOrchestratorPlugin) orchestratorPluginService.unregister(orchestrator.getId()); if (orchestratorInstance != null) { IOrchestratorPluginFactory orchestratorFactory = orchestratorService.getPluginFactory(orchestrator); orchestratorFactory.destroy(orchestratorInstance); } } catch (Exception e) { log.info("Unable to destroy orchestrator, it may not be created yet", e); } finally { // Mark the orchestrator as disabled orchestrator.setState(OrchestratorState.DISABLED); alienDAO.save(orchestrator); } return true; } }
def auto_delete(self, state : int, epoch : int): for i in range(1, epoch): if i % 5 == 0: continue if os.path.isfile(self.get_ckp_path(state, i)): os.remove(self.get_ckp_path(state, i))
Communicating in organizations, Part IV: E-mails and one-on-one meetings. E-mail messages and face-to-face meetings both are important methods of communication, and each has its own advantages and disadvantages. Conducting informal one-on-one meetings well is important to the success of a leader or manager. However, the follower can also do many things to optimize the success of a one-on-one discussion with his superior.
/* AUTO-GENERATED FILE. DO NOT MODIFY. * * This class was automatically generated by the * aapt tool from the resource data it found. It * should not be modified by hand. */ package b4a.example.galleryfinalres; public final class R { public static final class anim { public static final int gf_flip_horizontal_in=0x7f040000; public static final int gf_flip_horizontal_out=0x7f040001; } public static final class attr { /** <p>Must be a dimension value, which is a floating point number appended with a unit such as "<code>14.5sp</code>". Available units are: px (pixels), dp (density-independent pixels), sp (scaled pixels based on preferred font size), in (inches), mm (millimeters). <p>This may also be a reference to a resource (in the form "<code>@[<i>package</i>:]<i>type</i>:<i>name</i></code>") or theme attribute (in the form "<code>?[<i>package</i>:][<i>type</i>:]<i>name</i></code>") containing a value of this type. */ public static final int dividerWidth=0x7f010004; /** <p>Must be a color value, in the form of "<code>#<i>rgb</i></code>", "<code>#<i>argb</i></code>", "<code>#<i>rrggbb</i></code>", or "<code>#<i>aarrggbb</i></code>". <p>This may also be a reference to a resource (in the form "<code>@[<i>package</i>:]<i>type</i>:<i>name</i></code>") or theme attribute (in the form "<code>?[<i>package</i>:][<i>type</i>:]<i>name</i></code>") containing a value of this type. */ public static final int fabColorNormal=0x7f010001; /** <p>Must be a color value, in the form of "<code>#<i>rgb</i></code>", "<code>#<i>argb</i></code>", "<code>#<i>rrggbb</i></code>", or "<code>#<i>aarrggbb</i></code>". <p>This may also be a reference to a resource (in the form "<code>@[<i>package</i>:]<i>type</i>:<i>name</i></code>") or theme attribute (in the form "<code>?[<i>package</i>:][<i>type</i>:]<i>name</i></code>") containing a value of this type. */ public static final int fabColorPressed=0x7f010000; /** <p>Must be a reference to another resource, in the form "<code>@[+][<i>package</i>:]<i>type</i>:<i>name</i></code>" or to a theme attribute in the form "<code>?[<i>package</i>:][<i>type</i>:]<i>name</i></code>". */ public static final int fabIcon=0x7f010002; /** <p>Must be a string value, using '\\;' to escape characters such as '\\n' or '\\uxxxx' for a unicode character. <p>This may also be a reference to a resource (in the form "<code>@[<i>package</i>:]<i>type</i>:<i>name</i></code>") or theme attribute (in the form "<code>?[<i>package</i>:][<i>type</i>:]<i>name</i></code>") containing a value of this type. */ public static final int fabTitle=0x7f010003; } public static final class dimen { public static final int fab_icon_size=0x7f050002; public static final int fab_shadow_offset=0x7f050003; public static final int fab_shadow_radius=0x7f050004; public static final int fab_size_normal=0x7f050001; public static final int fab_stroke_width=0x7f050005; public static final int gf_title_bar_height=0x7f050000; } public static final class drawable { public static final int bg_gf_crop_texture=0x7f020000; public static final int gf_ic_preview=0x7f020001; public static final int ic_delete_photo=0x7f020002; public static final int ic_folder_check=0x7f020003; public static final int ic_gf_back=0x7f020004; public static final int ic_gf_camera=0x7f020005; public static final int ic_gf_clear=0x7f020006; public static final int ic_gf_crop=0x7f020007; public static final int ic_gf_crop_tile=0x7f020008; public static final int ic_gf_default_photo=0x7f020009; public static final int ic_gf_done=0x7f02000a; public static final int ic_gf_preview=0x7f02000b; public static final int ic_gf_rotate=0x7f02000c; public static final int ic_gf_triangle_arrow=0x7f02000d; public static final int icon=0x7f02000e; public static final int pic_load_more=0x7f02000f; } public static final class id { public static final int fab_crop=0x7f06000b; public static final int fab_label=0x7f060000; public static final int fab_ok=0x7f060016; public static final int gv_photo_list=0x7f060015; public static final int iv_back=0x7f060002; public static final int iv_check=0x7f060020; public static final int iv_clear=0x7f060013; public static final int iv_cover=0x7f06001b; public static final int iv_crop=0x7f060005; public static final int iv_crop_photo=0x7f060009; public static final int iv_delete=0x7f06001a; public static final int iv_folder_arrow=0x7f060012; public static final int iv_folder_check=0x7f06001e; public static final int iv_photo=0x7f060019; public static final int iv_preview=0x7f060004; public static final int iv_rotate=0x7f060006; public static final int iv_source_photo=0x7f060008; public static final int iv_take_photo=0x7f060007; public static final int iv_thumb=0x7f06001f; public static final int ll_folder_panel=0x7f060017; public static final int ll_gallery=0x7f06000c; public static final int ll_title=0x7f060010; public static final int lv_folder_list=0x7f060018; public static final int lv_gallery=0x7f06000d; public static final int titlebar=0x7f060001; public static final int tv_choose_count=0x7f060014; public static final int tv_empty_view=0x7f06000a; public static final int tv_folder_name=0x7f06001c; public static final int tv_indicator=0x7f06000e; public static final int tv_photo_count=0x7f06001d; public static final int tv_sub_title=0x7f060011; public static final int tv_title=0x7f060003; public static final int vp_pager=0x7f06000f; } public static final class layout { public static final int gf_activity_photo_edit=0x7f030000; public static final int gf_activity_photo_preview=0x7f030001; public static final int gf_activity_photo_select=0x7f030002; public static final int gf_adapter_edit_list=0x7f030003; public static final int gf_adapter_folder_list_item=0x7f030004; public static final int gf_adapter_photo_list_item=0x7f030005; public static final int gf_adapter_preview_viewpgaer_item=0x7f030006; } public static final class string { public static final int all_photo=0x7f070001; public static final int crop_fail=0x7f070007; public static final int crop_suc=0x7f070006; public static final int edit_letoff_photo_format=0x7f070010; public static final int empty_sdcard=0x7f07000b; public static final int folder_photo_size=0x7f070003; public static final int gallery=0x7f070000; public static final int maxsize_zero_tip=0x7f070015; public static final int no_photo=0x7f070008; public static final int open_gallery_fail=0x7f07000c; public static final int permissions_denied_tips=0x7f070014; public static final int permissions_tips_gallery=0x7f070013; public static final int photo_crop=0x7f070005; public static final int photo_edit=0x7f070004; public static final int photo_list_empty=0x7f070011; public static final int please_reopen_gf=0x7f07000f; public static final int preview=0x7f070012; public static final int saving=0x7f07000e; public static final int select_max_tips=0x7f07000a; public static final int selected=0x7f070002; public static final int take_photo_fail=0x7f07000d; public static final int waiting=0x7f070009; } public static final class styleable { /** <attr name="colorTheme" format="reference|color"/> <attr name="colorThemeDark" format="reference|color"/> <p>Includes the following attributes:</p> <table> <colgroup align="left" /> <colgroup align="left" /> <tr><th>Attribute</th><th>Description</th></tr> <tr><td><code>{@link #GFFloatingActionButton_fabColorNormal b4a.example.galleryfinalres:fabColorNormal}</code></td><td></td></tr> <tr><td><code>{@link #GFFloatingActionButton_fabColorPressed b4a.example.galleryfinalres:fabColorPressed}</code></td><td></td></tr> <tr><td><code>{@link #GFFloatingActionButton_fabIcon b4a.example.galleryfinalres:fabIcon}</code></td><td></td></tr> <tr><td><code>{@link #GFFloatingActionButton_fabTitle b4a.example.galleryfinalres:fabTitle}</code></td><td></td></tr> </table> @see #GFFloatingActionButton_fabColorNormal @see #GFFloatingActionButton_fabColorPressed @see #GFFloatingActionButton_fabIcon @see #GFFloatingActionButton_fabTitle */ public static final int[] GFFloatingActionButton = { 0x7f010000, 0x7f010001, 0x7f010002, 0x7f010003 }; /** <p>This symbol is the offset where the {@link b4a.example.galleryfinalres.R.attr#fabColorNormal} attribute's value can be found in the {@link #GFFloatingActionButton} array. <p>Must be a color value, in the form of "<code>#<i>rgb</i></code>", "<code>#<i>argb</i></code>", "<code>#<i>rrggbb</i></code>", or "<code>#<i>aarrggbb</i></code>". <p>This may also be a reference to a resource (in the form "<code>@[<i>package</i>:]<i>type</i>:<i>name</i></code>") or theme attribute (in the form "<code>?[<i>package</i>:][<i>type</i>:]<i>name</i></code>") containing a value of this type. @attr name b4a.example.galleryfinalres:fabColorNormal */ public static final int GFFloatingActionButton_fabColorNormal = 1; /** <p>This symbol is the offset where the {@link b4a.example.galleryfinalres.R.attr#fabColorPressed} attribute's value can be found in the {@link #GFFloatingActionButton} array. <p>Must be a color value, in the form of "<code>#<i>rgb</i></code>", "<code>#<i>argb</i></code>", "<code>#<i>rrggbb</i></code>", or "<code>#<i>aarrggbb</i></code>". <p>This may also be a reference to a resource (in the form "<code>@[<i>package</i>:]<i>type</i>:<i>name</i></code>") or theme attribute (in the form "<code>?[<i>package</i>:][<i>type</i>:]<i>name</i></code>") containing a value of this type. @attr name b4a.example.galleryfinalres:fabColorPressed */ public static final int GFFloatingActionButton_fabColorPressed = 0; /** <p>This symbol is the offset where the {@link b4a.example.galleryfinalres.R.attr#fabIcon} attribute's value can be found in the {@link #GFFloatingActionButton} array. <p>Must be a reference to another resource, in the form "<code>@[+][<i>package</i>:]<i>type</i>:<i>name</i></code>" or to a theme attribute in the form "<code>?[<i>package</i>:][<i>type</i>:]<i>name</i></code>". @attr name b4a.example.galleryfinalres:fabIcon */ public static final int GFFloatingActionButton_fabIcon = 2; /** <p>This symbol is the offset where the {@link b4a.example.galleryfinalres.R.attr#fabTitle} attribute's value can be found in the {@link #GFFloatingActionButton} array. <p>Must be a string value, using '\\;' to escape characters such as '\\n' or '\\uxxxx' for a unicode character. <p>This may also be a reference to a resource (in the form "<code>@[<i>package</i>:]<i>type</i>:<i>name</i></code>") or theme attribute (in the form "<code>?[<i>package</i>:][<i>type</i>:]<i>name</i></code>") containing a value of this type. @attr name b4a.example.galleryfinalres:fabTitle */ public static final int GFFloatingActionButton_fabTitle = 3; /** Attributes that can be used with a HorizontalListView. <p>Includes the following attributes:</p> <table> <colgroup align="left" /> <colgroup align="left" /> <tr><th>Attribute</th><th>Description</th></tr> <tr><td><code>{@link #HorizontalListView_android_divider android:divider}</code></td><td></td></tr> <tr><td><code>{@link #HorizontalListView_android_fadingEdgeLength android:fadingEdgeLength}</code></td><td></td></tr> <tr><td><code>{@link #HorizontalListView_android_requiresFadingEdge android:requiresFadingEdge}</code></td><td></td></tr> <tr><td><code>{@link #HorizontalListView_dividerWidth b4a.example.galleryfinalres:dividerWidth}</code></td><td></td></tr> </table> @see #HorizontalListView_android_divider @see #HorizontalListView_android_fadingEdgeLength @see #HorizontalListView_android_requiresFadingEdge @see #HorizontalListView_dividerWidth */ public static final int[] HorizontalListView = { 0x010100e0, 0x01010129, 0x010103a5, 0x7f010004 }; /** <p>This symbol is the offset where the {@link android.R.attr#divider} attribute's value can be found in the {@link #HorizontalListView} array. @attr name android:divider */ public static final int HorizontalListView_android_divider = 1; /** <p>This symbol is the offset where the {@link android.R.attr#fadingEdgeLength} attribute's value can be found in the {@link #HorizontalListView} array. @attr name android:fadingEdgeLength */ public static final int HorizontalListView_android_fadingEdgeLength = 0; /** <p>This symbol is the offset where the {@link android.R.attr#requiresFadingEdge} attribute's value can be found in the {@link #HorizontalListView} array. @attr name android:requiresFadingEdge */ public static final int HorizontalListView_android_requiresFadingEdge = 2; /** <p>This symbol is the offset where the {@link b4a.example.galleryfinalres.R.attr#dividerWidth} attribute's value can be found in the {@link #HorizontalListView} array. <p>Must be a dimension value, which is a floating point number appended with a unit such as "<code>14.5sp</code>". Available units are: px (pixels), dp (density-independent pixels), sp (scaled pixels based on preferred font size), in (inches), mm (millimeters). <p>This may also be a reference to a resource (in the form "<code>@[<i>package</i>:]<i>type</i>:<i>name</i></code>") or theme attribute (in the form "<code>?[<i>package</i>:][<i>type</i>:]<i>name</i></code>") containing a value of this type. @attr name b4a.example.galleryfinalres:dividerWidth */ public static final int HorizontalListView_dividerWidth = 3; }; }
def assert_same_strategy(s1, s2): __tracebackhide__ = True if s1.__class__ != s2.__class__: pytest.fail( "The two strategies have different class\n" "First one is: {}\n Second one is: {}\n".format(s1.__class__, s2.__class__) ) assert s1.__dict__ == s2.__dict__, "The attributes of the strategies differ"
<filename>controllers/base_controller.go package controllers import ( "fmt" "net/http" "github.com/jinzhu/gorm" "github.com/vonji/vonji-api/api" "github.com/vonji/vonji-api/utils" ) type APIBaseController struct { ResponseWriter http.ResponseWriter } func (ctrl APIBaseController) CheckID(id uint) *utils.HttpError { if id == 0 { return &utils.HttpError{ fmt.Sprintf("No request with ID %d found", id), http.StatusNotFound, "" } } return nil } func (ctrl APIBaseController) GetDB() *gorm.DB { return api.GetContext().DB } func (ctrl APIBaseController) GetResponseWriter() http.ResponseWriter { return ctrl.ResponseWriter }
<gh_stars>10-100 # -*- coding: utf-8 -*- # Author: <NAME> <<EMAIL>> # Copyright: Stateoftheart AI PBC 2020. ''' Keras https://keras.io/ wrapper module ''' from sotaai.cv import utils import tensorflow.keras as keras from tensorflow.keras.models import Model from tensorflow.keras.layers import Input, Dense, GlobalAveragePooling2D import numpy as np SOURCE_METADATA = { 'name': 'keras', 'original_name': 'Keras', 'url': 'https://keras.io/' } DATASETS = {'classification': ['mnist', 'cifar10', 'cifar100', 'fashion_mnist']} # @author HO # As of now, only missing EfficientNetBX # MODELS = { 'classification': [ 'InceptionResNetV2', 'InceptionV3', 'ResNet101V2', 'ResNet152V2', 'ResNet50V2', 'VGG16', 'VGG19', 'Xception', 'ResNet50', 'ResNet101', 'ResNet152', 'DenseNet121', 'DenseNet169', 'DenseNet201', 'NASNetMobile', 'NASNetLarge', 'MobileNet', 'MobileNetV2' ] } def load_model( model_name, pretrained=False, alpha=1.0, depth_multiplier=1, dropout=0.001, input_tensor=None, input_shape=None, # TODO(Hugo) # Once standardized input is defined (configs), this param should be put by # the end-user # As per Keras docs, it is important to set include_top to # false to be able to modify model input/output include_top=False, pooling=None, classes=1000, classifier_activation='softmax'): '''Load a model with specific configuration. Args: model_name (string): name of the model/algorithm. include_top: whether to include the fully-connected layer at the top of the network. weights: one of None (random initialization), 'imagenet' (pre-training on ImageNet), or the path to the weights file to be loaded. input_tensor: optional Keras tensor (i.e. output of layers.Input()) to use as image input for the model. input_shape: optional shape tuple, only to be specified if include_top is False (otherwise the input shape has to be (299, 299, 3). It should have exactly 3 inputs channels, and width and height should be no smaller than 71. E.g. (150, 150, 3) would be one valid value. pooling: Optional pooling mode for feature extraction when include_top is False. None means that the output of the model will be the 4D tensor output of the last convolutional block. avg means that global average pooling will be applied to the output of the last convolutional block, and thus the output of the model will be a 2D tensor. max means that global max pooling will be applied. alpha: Controls the width of the network. This is known as the width multiplier in the MobileNet paper. - If alpha < 1.0, proportionally decreases the number of filters in each layer. - If alpha > 1.0, proportionally increases the number of filters in each layer. - If alpha = 1, default number of filters from the paper are used at each layer. Default to 1.0. depth_multiplier: Depth multiplier for depthwise convolution. This is called the resolution multiplier in the MobileNet paper. Default to 1.0. dropout: Dropout rate. Default to 0.001. classes: optional number of classes to classify images into, only to be specified if include_top is True, and if no weights argument is specified. classifier_activation: A str or callable. The activation function to use on the 'top' layer. Ignored unless include_top=True. Set classifier_activation=None to return the logits of the 'top' layer. Returns: tensorflow.python.keras model ''' if pretrained: weights = 'imagenet' else: weights = None # Load the models.\model_name\ class trainer = getattr(keras.applications, model_name) # Load the model and return if model_name in [ 'ResNet50', 'ResNet101', 'ResNet152', 'DenseNet121', 'DenseNet169', 'DenseNet201', 'NASNetMobile', 'NASNetLarge' ]: model = trainer(weights=weights, input_tensor=input_tensor, input_shape=input_shape, include_top=include_top, pooling=pooling, classes=classes) elif model_name == 'MobileNet': model = trainer(weights=weights, alpha=alpha, depth_multiplier=depth_multiplier, dropout=dropout, input_tensor=input_tensor, input_shape=input_shape, include_top=include_top, pooling=pooling, classes=classes) elif model_name == 'MobileNetV2': model = trainer(weights=weights, alpha=alpha, input_tensor=input_tensor, input_shape=input_shape, include_top=include_top, pooling=pooling, classes=classes) else: model = trainer(weights=weights, input_tensor=input_tensor, input_shape=input_shape, include_top=include_top, pooling=pooling, classes=classes, classifier_activation=classifier_activation) return model def load_dataset(dataset_name, download=True): '''Load a given dataset with all its splits Args: dataset_name (string): name of dataset download: temporal flag to skip download and only create the dataset instance with no data (used for JSONs creation) Returns: Dict with keys {'train':(x_train, y_train), 'test':(x_test,y_test), Each entry is a numpy array ''' if download: dataset = getattr(keras.datasets, dataset_name) dataset = dataset.load_data() dataset_dict = {'train': dataset[0], 'test': dataset[1]} else: return {'train': {'name': dataset_name, 'source': 'tensorflow'}} return dataset_dict def model_to_dataset(cv_model, cv_dataset): '''If compatible, adjust model and dataset so that they can be executed against each other Args: cv_model: an abstracted cv model whose source is Keras cv_dataset: an abstracted cv dataset Returns: cv_model: the abstracted cv model adjusted to be executed against cv_dataset cv_dataset: the abstracted cv dataset adjust to be executed against cv_model ''' print('Making compatible {} with {}...'.format(cv_model.name, cv_dataset.name)) # Case 1: # All Keras models require 3 channels, thus we have to reshape the dataset # if less than 3 channels are_channels_compatible = len(cv_dataset.shape) == len( cv_model.original_input_shape ) and cv_dataset.shape[-1] == cv_model.original_input_shape[-1] if not are_channels_compatible: if len(cv_dataset.shape) == 2: fixed_channels_shape = cv_dataset.shape + (3,) else: fixed_channels_shape = cv_dataset.shape[:2] + (3,) print(' => Dataset Channels from {} to {}'.format(cv_dataset.shape, fixed_channels_shape)) cv_dataset.shape = fixed_channels_shape # Case 2: # As per Keras documentation, some models require a minimum width and height # for the input shape. For those models, we make sure the dataset meet those # minimums min_input_shape = None if cv_model.name in utils.IMAGE_MINS: min_input_shape = (utils.IMAGE_MINS[cv_model.name], utils.IMAGE_MINS[cv_model.name]) # TODO(Hugo) # When datasets have a None width/height is not possible to globally know # whether it matches the min_input_shape since this has to be done per # image. We have to take this into account in the image_preprocessing_callback has_min_shape = False if cv_dataset.shape[:2] != (None, None): has_min_shape = min_input_shape and cv_dataset.shape[:2] < min_input_shape if has_min_shape: original_dataset_shape = cv_dataset.shape cv_dataset.shape = min_input_shape + (3,) print(' => Dataset minimum shape from {} to {}'.format( original_dataset_shape, cv_dataset.shape)) # Case 3: # If dataset and model input are not compatible, we have to (1) reshape # the dataset shape a bit more or (2) change the model input layer is_input_compatible = utils.compare_shapes(cv_model.original_input_shape, cv_dataset.shape) if not is_input_compatible: print(' => Model Input from {} to {}'.format(cv_model.original_input_shape, cv_dataset.shape)) input_tensor = Input(shape=cv_dataset.shape) raw_model = load_model( cv_model.name, input_tensor=input_tensor, # As per Keras docs, it is important to set include_top to # false to be able to modify model input/output include_top=False) cv_model.update_raw_model(raw_model) # Case 4: # If output is not compatible with dataset classes, we have to change the # model output layer is_output_compatible = utils.compare_shapes(cv_model.original_output_shape, cv_dataset.classes_shape) if not is_output_compatible: print(' => Model Output from {} to {}'.format( cv_model.original_output_shape, cv_dataset.classes_shape)) # TODO(Hugo) # Further review this. # As read in some Keras blogs for Transfer Learning, there are 3 possible # ways to change Keras output model to a different number of classes: # - Use classes parameter, however this only work when include_top=true # which requires a fixed input shape which is not usually the case. # Therefore, this way was discarded. # - Use Keras function API to add a new Flatten layer after the # last pooling layer in the raw model, then define a new classifier model # with a Dense fully connected layer and an output layer that will predict # the probability for dataset classes. This one did not work, it has issues # when model input shape is dynamic e.g. (None,None,3) # - Add an Average Pooling Layer at the end, then define a classifier with # a Dense fully connected layer and an output layer that will predict the # probability for the dataset classes. This is the one Keras uses in # their models when include_top=true and classes are given. This is the one # that worked well and the method used as of now to change model output, # however still not sure if it is the best way. avg_pooling_layer = GlobalAveragePooling2D(name='avg_pool')( cv_model.raw.layers[-1].output) output = Dense(cv_dataset.classes_shape[0], activation='softmax')(avg_pooling_layer) raw_model = Model(inputs=cv_model.raw.inputs, outputs=output) cv_model.update_raw_model(raw_model) # Some of the cases above are managed at dataset iterator level, that # is why a callback is passed in. The iterator will reshape the dataset items # using this callback and thus taking into account the cases above as # required by the model. def image_preprocessing_callback(image): if has_min_shape: image = utils.resize_image(image, min_input_shape) if not are_channels_compatible: if len(image.shape) == 2: image = image.reshape(image.shape + (1,)) image = np.repeat(image, 3, -1) return image cv_dataset.set_image_preprocessing(image_preprocessing_callback) # Finally, the compatibilized models and dataset are returned return cv_model, cv_dataset class DatasetIterator(): '''Keras dataset iterator class''' def __init__(self, raw) -> None: self._raw = raw self._iterator = self._create_iterator() self._image_preprocessing_callback = None def __next__(self): '''Get the next item from the dataset in a standardized format. Returns: A dict with two mandatory keys: the 'image' key which will hold the image as a numpy array, and the 'label' key which will hold the label as a numpy array as well. The dict might contain other keys depending on the nature of the dataset. ''' image = next(self._iterator['image']) label = next(self._iterator['label']) if self._image_preprocessing_callback: image = self._image_preprocessing_callback(image) return {'image': image, 'label': label} def _create_iterator(self): '''Create an iterator out of the raw dataset split object. This is the Keras iterator being wrapped in our own iterator. Returns: An object containing iterators for the dataset images and labels ''' if isinstance(self._raw, tuple): return {'image': iter(self._raw[0]), 'label': iter(self._raw[1])} def set_image_preprocessing(self, image_preprocessing_callback): self._image_preprocessing_callback = image_preprocessing_callback
<reponame>datalliance88/tencentcloud-sdk-cpp /* * Copyright (c) 2017-2019 THL A29 Limited, a Tencent company. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef TENCENTCLOUD_TCI_V20190318_TCICLIENT_H_ #define TENCENTCLOUD_TCI_V20190318_TCICLIENT_H_ #include <functional> #include <future> #include <tencentcloud/core/AbstractClient.h> #include <tencentcloud/core/Credential.h> #include <tencentcloud/core/profile/ClientProfile.h> #include <tencentcloud/core/AsyncCallerContext.h> #include <tencentcloud/tci/v20190318/model/AIAssistantRequest.h> #include <tencentcloud/tci/v20190318/model/AIAssistantResponse.h> #include <tencentcloud/tci/v20190318/model/CancelTaskRequest.h> #include <tencentcloud/tci/v20190318/model/CancelTaskResponse.h> #include <tencentcloud/tci/v20190318/model/CheckFacePhotoRequest.h> #include <tencentcloud/tci/v20190318/model/CheckFacePhotoResponse.h> #include <tencentcloud/tci/v20190318/model/CreateFaceRequest.h> #include <tencentcloud/tci/v20190318/model/CreateFaceResponse.h> #include <tencentcloud/tci/v20190318/model/CreateLibraryRequest.h> #include <tencentcloud/tci/v20190318/model/CreateLibraryResponse.h> #include <tencentcloud/tci/v20190318/model/CreatePersonRequest.h> #include <tencentcloud/tci/v20190318/model/CreatePersonResponse.h> #include <tencentcloud/tci/v20190318/model/CreateVocabRequest.h> #include <tencentcloud/tci/v20190318/model/CreateVocabResponse.h> #include <tencentcloud/tci/v20190318/model/CreateVocabLibRequest.h> #include <tencentcloud/tci/v20190318/model/CreateVocabLibResponse.h> #include <tencentcloud/tci/v20190318/model/DeleteFaceRequest.h> #include <tencentcloud/tci/v20190318/model/DeleteFaceResponse.h> #include <tencentcloud/tci/v20190318/model/DeleteLibraryRequest.h> #include <tencentcloud/tci/v20190318/model/DeleteLibraryResponse.h> #include <tencentcloud/tci/v20190318/model/DeletePersonRequest.h> #include <tencentcloud/tci/v20190318/model/DeletePersonResponse.h> #include <tencentcloud/tci/v20190318/model/DeleteVocabRequest.h> #include <tencentcloud/tci/v20190318/model/DeleteVocabResponse.h> #include <tencentcloud/tci/v20190318/model/DeleteVocabLibRequest.h> #include <tencentcloud/tci/v20190318/model/DeleteVocabLibResponse.h> #include <tencentcloud/tci/v20190318/model/DescribeAITaskResultRequest.h> #include <tencentcloud/tci/v20190318/model/DescribeAITaskResultResponse.h> #include <tencentcloud/tci/v20190318/model/DescribeAttendanceResultRequest.h> #include <tencentcloud/tci/v20190318/model/DescribeAttendanceResultResponse.h> #include <tencentcloud/tci/v20190318/model/DescribeAudioTaskRequest.h> #include <tencentcloud/tci/v20190318/model/DescribeAudioTaskResponse.h> #include <tencentcloud/tci/v20190318/model/DescribeConversationTaskRequest.h> #include <tencentcloud/tci/v20190318/model/DescribeConversationTaskResponse.h> #include <tencentcloud/tci/v20190318/model/DescribeHighlightResultRequest.h> #include <tencentcloud/tci/v20190318/model/DescribeHighlightResultResponse.h> #include <tencentcloud/tci/v20190318/model/DescribeImageTaskRequest.h> #include <tencentcloud/tci/v20190318/model/DescribeImageTaskResponse.h> #include <tencentcloud/tci/v20190318/model/DescribeImageTaskStatisticRequest.h> #include <tencentcloud/tci/v20190318/model/DescribeImageTaskStatisticResponse.h> #include <tencentcloud/tci/v20190318/model/DescribeLibrariesRequest.h> #include <tencentcloud/tci/v20190318/model/DescribeLibrariesResponse.h> #include <tencentcloud/tci/v20190318/model/DescribePersonRequest.h> #include <tencentcloud/tci/v20190318/model/DescribePersonResponse.h> #include <tencentcloud/tci/v20190318/model/DescribePersonsRequest.h> #include <tencentcloud/tci/v20190318/model/DescribePersonsResponse.h> #include <tencentcloud/tci/v20190318/model/DescribeVocabRequest.h> #include <tencentcloud/tci/v20190318/model/DescribeVocabResponse.h> #include <tencentcloud/tci/v20190318/model/DescribeVocabLibRequest.h> #include <tencentcloud/tci/v20190318/model/DescribeVocabLibResponse.h> #include <tencentcloud/tci/v20190318/model/ModifyLibraryRequest.h> #include <tencentcloud/tci/v20190318/model/ModifyLibraryResponse.h> #include <tencentcloud/tci/v20190318/model/ModifyPersonRequest.h> #include <tencentcloud/tci/v20190318/model/ModifyPersonResponse.h> #include <tencentcloud/tci/v20190318/model/SubmitAudioTaskRequest.h> #include <tencentcloud/tci/v20190318/model/SubmitAudioTaskResponse.h> #include <tencentcloud/tci/v20190318/model/SubmitCheckAttendanceTaskRequest.h> #include <tencentcloud/tci/v20190318/model/SubmitCheckAttendanceTaskResponse.h> #include <tencentcloud/tci/v20190318/model/SubmitConversationTaskRequest.h> #include <tencentcloud/tci/v20190318/model/SubmitConversationTaskResponse.h> #include <tencentcloud/tci/v20190318/model/SubmitDoubleVideoHighlightsRequest.h> #include <tencentcloud/tci/v20190318/model/SubmitDoubleVideoHighlightsResponse.h> #include <tencentcloud/tci/v20190318/model/SubmitFullBodyClassTaskRequest.h> #include <tencentcloud/tci/v20190318/model/SubmitFullBodyClassTaskResponse.h> #include <tencentcloud/tci/v20190318/model/SubmitHighlightsRequest.h> #include <tencentcloud/tci/v20190318/model/SubmitHighlightsResponse.h> #include <tencentcloud/tci/v20190318/model/SubmitImageTaskRequest.h> #include <tencentcloud/tci/v20190318/model/SubmitImageTaskResponse.h> #include <tencentcloud/tci/v20190318/model/SubmitOneByOneClassTaskRequest.h> #include <tencentcloud/tci/v20190318/model/SubmitOneByOneClassTaskResponse.h> #include <tencentcloud/tci/v20190318/model/SubmitOpenClassTaskRequest.h> #include <tencentcloud/tci/v20190318/model/SubmitOpenClassTaskResponse.h> #include <tencentcloud/tci/v20190318/model/SubmitPartialBodyClassTaskRequest.h> #include <tencentcloud/tci/v20190318/model/SubmitPartialBodyClassTaskResponse.h> #include <tencentcloud/tci/v20190318/model/SubmitTraditionalClassTaskRequest.h> #include <tencentcloud/tci/v20190318/model/SubmitTraditionalClassTaskResponse.h> #include <tencentcloud/tci/v20190318/model/TransmitAudioStreamRequest.h> #include <tencentcloud/tci/v20190318/model/TransmitAudioStreamResponse.h> namespace TencentCloud { namespace Tci { namespace V20190318 { class TciClient : public AbstractClient { public: TciClient(const Credential &credential, const std::string &region); TciClient(const Credential &credential, const std::string &region, const ClientProfile &profile); typedef Outcome<Error, Model::AIAssistantResponse> AIAssistantOutcome; typedef std::future<AIAssistantOutcome> AIAssistantOutcomeCallable; typedef std::function<void(const TciClient*, const Model::AIAssistantRequest&, AIAssistantOutcome, const std::shared_ptr<const AsyncCallerContext>&)> AIAssistantAsyncHandler; typedef Outcome<Error, Model::CancelTaskResponse> CancelTaskOutcome; typedef std::future<CancelTaskOutcome> CancelTaskOutcomeCallable; typedef std::function<void(const TciClient*, const Model::CancelTaskRequest&, CancelTaskOutcome, const std::shared_ptr<const AsyncCallerContext>&)> CancelTaskAsyncHandler; typedef Outcome<Error, Model::CheckFacePhotoResponse> CheckFacePhotoOutcome; typedef std::future<CheckFacePhotoOutcome> CheckFacePhotoOutcomeCallable; typedef std::function<void(const TciClient*, const Model::CheckFacePhotoRequest&, CheckFacePhotoOutcome, const std::shared_ptr<const AsyncCallerContext>&)> CheckFacePhotoAsyncHandler; typedef Outcome<Error, Model::CreateFaceResponse> CreateFaceOutcome; typedef std::future<CreateFaceOutcome> CreateFaceOutcomeCallable; typedef std::function<void(const TciClient*, const Model::CreateFaceRequest&, CreateFaceOutcome, const std::shared_ptr<const AsyncCallerContext>&)> CreateFaceAsyncHandler; typedef Outcome<Error, Model::CreateLibraryResponse> CreateLibraryOutcome; typedef std::future<CreateLibraryOutcome> CreateLibraryOutcomeCallable; typedef std::function<void(const TciClient*, const Model::CreateLibraryRequest&, CreateLibraryOutcome, const std::shared_ptr<const AsyncCallerContext>&)> CreateLibraryAsyncHandler; typedef Outcome<Error, Model::CreatePersonResponse> CreatePersonOutcome; typedef std::future<CreatePersonOutcome> CreatePersonOutcomeCallable; typedef std::function<void(const TciClient*, const Model::CreatePersonRequest&, CreatePersonOutcome, const std::shared_ptr<const AsyncCallerContext>&)> CreatePersonAsyncHandler; typedef Outcome<Error, Model::CreateVocabResponse> CreateVocabOutcome; typedef std::future<CreateVocabOutcome> CreateVocabOutcomeCallable; typedef std::function<void(const TciClient*, const Model::CreateVocabRequest&, CreateVocabOutcome, const std::shared_ptr<const AsyncCallerContext>&)> CreateVocabAsyncHandler; typedef Outcome<Error, Model::CreateVocabLibResponse> CreateVocabLibOutcome; typedef std::future<CreateVocabLibOutcome> CreateVocabLibOutcomeCallable; typedef std::function<void(const TciClient*, const Model::CreateVocabLibRequest&, CreateVocabLibOutcome, const std::shared_ptr<const AsyncCallerContext>&)> CreateVocabLibAsyncHandler; typedef Outcome<Error, Model::DeleteFaceResponse> DeleteFaceOutcome; typedef std::future<DeleteFaceOutcome> DeleteFaceOutcomeCallable; typedef std::function<void(const TciClient*, const Model::DeleteFaceRequest&, DeleteFaceOutcome, const std::shared_ptr<const AsyncCallerContext>&)> DeleteFaceAsyncHandler; typedef Outcome<Error, Model::DeleteLibraryResponse> DeleteLibraryOutcome; typedef std::future<DeleteLibraryOutcome> DeleteLibraryOutcomeCallable; typedef std::function<void(const TciClient*, const Model::DeleteLibraryRequest&, DeleteLibraryOutcome, const std::shared_ptr<const AsyncCallerContext>&)> DeleteLibraryAsyncHandler; typedef Outcome<Error, Model::DeletePersonResponse> DeletePersonOutcome; typedef std::future<DeletePersonOutcome> DeletePersonOutcomeCallable; typedef std::function<void(const TciClient*, const Model::DeletePersonRequest&, DeletePersonOutcome, const std::shared_ptr<const AsyncCallerContext>&)> DeletePersonAsyncHandler; typedef Outcome<Error, Model::DeleteVocabResponse> DeleteVocabOutcome; typedef std::future<DeleteVocabOutcome> DeleteVocabOutcomeCallable; typedef std::function<void(const TciClient*, const Model::DeleteVocabRequest&, DeleteVocabOutcome, const std::shared_ptr<const AsyncCallerContext>&)> DeleteVocabAsyncHandler; typedef Outcome<Error, Model::DeleteVocabLibResponse> DeleteVocabLibOutcome; typedef std::future<DeleteVocabLibOutcome> DeleteVocabLibOutcomeCallable; typedef std::function<void(const TciClient*, const Model::DeleteVocabLibRequest&, DeleteVocabLibOutcome, const std::shared_ptr<const AsyncCallerContext>&)> DeleteVocabLibAsyncHandler; typedef Outcome<Error, Model::DescribeAITaskResultResponse> DescribeAITaskResultOutcome; typedef std::future<DescribeAITaskResultOutcome> DescribeAITaskResultOutcomeCallable; typedef std::function<void(const TciClient*, const Model::DescribeAITaskResultRequest&, DescribeAITaskResultOutcome, const std::shared_ptr<const AsyncCallerContext>&)> DescribeAITaskResultAsyncHandler; typedef Outcome<Error, Model::DescribeAttendanceResultResponse> DescribeAttendanceResultOutcome; typedef std::future<DescribeAttendanceResultOutcome> DescribeAttendanceResultOutcomeCallable; typedef std::function<void(const TciClient*, const Model::DescribeAttendanceResultRequest&, DescribeAttendanceResultOutcome, const std::shared_ptr<const AsyncCallerContext>&)> DescribeAttendanceResultAsyncHandler; typedef Outcome<Error, Model::DescribeAudioTaskResponse> DescribeAudioTaskOutcome; typedef std::future<DescribeAudioTaskOutcome> DescribeAudioTaskOutcomeCallable; typedef std::function<void(const TciClient*, const Model::DescribeAudioTaskRequest&, DescribeAudioTaskOutcome, const std::shared_ptr<const AsyncCallerContext>&)> DescribeAudioTaskAsyncHandler; typedef Outcome<Error, Model::DescribeConversationTaskResponse> DescribeConversationTaskOutcome; typedef std::future<DescribeConversationTaskOutcome> DescribeConversationTaskOutcomeCallable; typedef std::function<void(const TciClient*, const Model::DescribeConversationTaskRequest&, DescribeConversationTaskOutcome, const std::shared_ptr<const AsyncCallerContext>&)> DescribeConversationTaskAsyncHandler; typedef Outcome<Error, Model::DescribeHighlightResultResponse> DescribeHighlightResultOutcome; typedef std::future<DescribeHighlightResultOutcome> DescribeHighlightResultOutcomeCallable; typedef std::function<void(const TciClient*, const Model::DescribeHighlightResultRequest&, DescribeHighlightResultOutcome, const std::shared_ptr<const AsyncCallerContext>&)> DescribeHighlightResultAsyncHandler; typedef Outcome<Error, Model::DescribeImageTaskResponse> DescribeImageTaskOutcome; typedef std::future<DescribeImageTaskOutcome> DescribeImageTaskOutcomeCallable; typedef std::function<void(const TciClient*, const Model::DescribeImageTaskRequest&, DescribeImageTaskOutcome, const std::shared_ptr<const AsyncCallerContext>&)> DescribeImageTaskAsyncHandler; typedef Outcome<Error, Model::DescribeImageTaskStatisticResponse> DescribeImageTaskStatisticOutcome; typedef std::future<DescribeImageTaskStatisticOutcome> DescribeImageTaskStatisticOutcomeCallable; typedef std::function<void(const TciClient*, const Model::DescribeImageTaskStatisticRequest&, DescribeImageTaskStatisticOutcome, const std::shared_ptr<const AsyncCallerContext>&)> DescribeImageTaskStatisticAsyncHandler; typedef Outcome<Error, Model::DescribeLibrariesResponse> DescribeLibrariesOutcome; typedef std::future<DescribeLibrariesOutcome> DescribeLibrariesOutcomeCallable; typedef std::function<void(const TciClient*, const Model::DescribeLibrariesRequest&, DescribeLibrariesOutcome, const std::shared_ptr<const AsyncCallerContext>&)> DescribeLibrariesAsyncHandler; typedef Outcome<Error, Model::DescribePersonResponse> DescribePersonOutcome; typedef std::future<DescribePersonOutcome> DescribePersonOutcomeCallable; typedef std::function<void(const TciClient*, const Model::DescribePersonRequest&, DescribePersonOutcome, const std::shared_ptr<const AsyncCallerContext>&)> DescribePersonAsyncHandler; typedef Outcome<Error, Model::DescribePersonsResponse> DescribePersonsOutcome; typedef std::future<DescribePersonsOutcome> DescribePersonsOutcomeCallable; typedef std::function<void(const TciClient*, const Model::DescribePersonsRequest&, DescribePersonsOutcome, const std::shared_ptr<const AsyncCallerContext>&)> DescribePersonsAsyncHandler; typedef Outcome<Error, Model::DescribeVocabResponse> DescribeVocabOutcome; typedef std::future<DescribeVocabOutcome> DescribeVocabOutcomeCallable; typedef std::function<void(const TciClient*, const Model::DescribeVocabRequest&, DescribeVocabOutcome, const std::shared_ptr<const AsyncCallerContext>&)> DescribeVocabAsyncHandler; typedef Outcome<Error, Model::DescribeVocabLibResponse> DescribeVocabLibOutcome; typedef std::future<DescribeVocabLibOutcome> DescribeVocabLibOutcomeCallable; typedef std::function<void(const TciClient*, const Model::DescribeVocabLibRequest&, DescribeVocabLibOutcome, const std::shared_ptr<const AsyncCallerContext>&)> DescribeVocabLibAsyncHandler; typedef Outcome<Error, Model::ModifyLibraryResponse> ModifyLibraryOutcome; typedef std::future<ModifyLibraryOutcome> ModifyLibraryOutcomeCallable; typedef std::function<void(const TciClient*, const Model::ModifyLibraryRequest&, ModifyLibraryOutcome, const std::shared_ptr<const AsyncCallerContext>&)> ModifyLibraryAsyncHandler; typedef Outcome<Error, Model::ModifyPersonResponse> ModifyPersonOutcome; typedef std::future<ModifyPersonOutcome> ModifyPersonOutcomeCallable; typedef std::function<void(const TciClient*, const Model::ModifyPersonRequest&, ModifyPersonOutcome, const std::shared_ptr<const AsyncCallerContext>&)> ModifyPersonAsyncHandler; typedef Outcome<Error, Model::SubmitAudioTaskResponse> SubmitAudioTaskOutcome; typedef std::future<SubmitAudioTaskOutcome> SubmitAudioTaskOutcomeCallable; typedef std::function<void(const TciClient*, const Model::SubmitAudioTaskRequest&, SubmitAudioTaskOutcome, const std::shared_ptr<const AsyncCallerContext>&)> SubmitAudioTaskAsyncHandler; typedef Outcome<Error, Model::SubmitCheckAttendanceTaskResponse> SubmitCheckAttendanceTaskOutcome; typedef std::future<SubmitCheckAttendanceTaskOutcome> SubmitCheckAttendanceTaskOutcomeCallable; typedef std::function<void(const TciClient*, const Model::SubmitCheckAttendanceTaskRequest&, SubmitCheckAttendanceTaskOutcome, const std::shared_ptr<const AsyncCallerContext>&)> SubmitCheckAttendanceTaskAsyncHandler; typedef Outcome<Error, Model::SubmitConversationTaskResponse> SubmitConversationTaskOutcome; typedef std::future<SubmitConversationTaskOutcome> SubmitConversationTaskOutcomeCallable; typedef std::function<void(const TciClient*, const Model::SubmitConversationTaskRequest&, SubmitConversationTaskOutcome, const std::shared_ptr<const AsyncCallerContext>&)> SubmitConversationTaskAsyncHandler; typedef Outcome<Error, Model::SubmitDoubleVideoHighlightsResponse> SubmitDoubleVideoHighlightsOutcome; typedef std::future<SubmitDoubleVideoHighlightsOutcome> SubmitDoubleVideoHighlightsOutcomeCallable; typedef std::function<void(const TciClient*, const Model::SubmitDoubleVideoHighlightsRequest&, SubmitDoubleVideoHighlightsOutcome, const std::shared_ptr<const AsyncCallerContext>&)> SubmitDoubleVideoHighlightsAsyncHandler; typedef Outcome<Error, Model::SubmitFullBodyClassTaskResponse> SubmitFullBodyClassTaskOutcome; typedef std::future<SubmitFullBodyClassTaskOutcome> SubmitFullBodyClassTaskOutcomeCallable; typedef std::function<void(const TciClient*, const Model::SubmitFullBodyClassTaskRequest&, SubmitFullBodyClassTaskOutcome, const std::shared_ptr<const AsyncCallerContext>&)> SubmitFullBodyClassTaskAsyncHandler; typedef Outcome<Error, Model::SubmitHighlightsResponse> SubmitHighlightsOutcome; typedef std::future<SubmitHighlightsOutcome> SubmitHighlightsOutcomeCallable; typedef std::function<void(const TciClient*, const Model::SubmitHighlightsRequest&, SubmitHighlightsOutcome, const std::shared_ptr<const AsyncCallerContext>&)> SubmitHighlightsAsyncHandler; typedef Outcome<Error, Model::SubmitImageTaskResponse> SubmitImageTaskOutcome; typedef std::future<SubmitImageTaskOutcome> SubmitImageTaskOutcomeCallable; typedef std::function<void(const TciClient*, const Model::SubmitImageTaskRequest&, SubmitImageTaskOutcome, const std::shared_ptr<const AsyncCallerContext>&)> SubmitImageTaskAsyncHandler; typedef Outcome<Error, Model::SubmitOneByOneClassTaskResponse> SubmitOneByOneClassTaskOutcome; typedef std::future<SubmitOneByOneClassTaskOutcome> SubmitOneByOneClassTaskOutcomeCallable; typedef std::function<void(const TciClient*, const Model::SubmitOneByOneClassTaskRequest&, SubmitOneByOneClassTaskOutcome, const std::shared_ptr<const AsyncCallerContext>&)> SubmitOneByOneClassTaskAsyncHandler; typedef Outcome<Error, Model::SubmitOpenClassTaskResponse> SubmitOpenClassTaskOutcome; typedef std::future<SubmitOpenClassTaskOutcome> SubmitOpenClassTaskOutcomeCallable; typedef std::function<void(const TciClient*, const Model::SubmitOpenClassTaskRequest&, SubmitOpenClassTaskOutcome, const std::shared_ptr<const AsyncCallerContext>&)> SubmitOpenClassTaskAsyncHandler; typedef Outcome<Error, Model::SubmitPartialBodyClassTaskResponse> SubmitPartialBodyClassTaskOutcome; typedef std::future<SubmitPartialBodyClassTaskOutcome> SubmitPartialBodyClassTaskOutcomeCallable; typedef std::function<void(const TciClient*, const Model::SubmitPartialBodyClassTaskRequest&, SubmitPartialBodyClassTaskOutcome, const std::shared_ptr<const AsyncCallerContext>&)> SubmitPartialBodyClassTaskAsyncHandler; typedef Outcome<Error, Model::SubmitTraditionalClassTaskResponse> SubmitTraditionalClassTaskOutcome; typedef std::future<SubmitTraditionalClassTaskOutcome> SubmitTraditionalClassTaskOutcomeCallable; typedef std::function<void(const TciClient*, const Model::SubmitTraditionalClassTaskRequest&, SubmitTraditionalClassTaskOutcome, const std::shared_ptr<const AsyncCallerContext>&)> SubmitTraditionalClassTaskAsyncHandler; typedef Outcome<Error, Model::TransmitAudioStreamResponse> TransmitAudioStreamOutcome; typedef std::future<TransmitAudioStreamOutcome> TransmitAudioStreamOutcomeCallable; typedef std::function<void(const TciClient*, const Model::TransmitAudioStreamRequest&, TransmitAudioStreamOutcome, const std::shared_ptr<const AsyncCallerContext>&)> TransmitAudioStreamAsyncHandler; /** *提供 AI 助教基础版本功能接口 * @param req AIAssistantRequest * @return AIAssistantOutcome */ AIAssistantOutcome AIAssistant(const Model::AIAssistantRequest &request); void AIAssistantAsync(const Model::AIAssistantRequest& request, const AIAssistantAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); AIAssistantOutcomeCallable AIAssistantCallable(const Model::AIAssistantRequest& request); /** *用于取消已经提交的任务,目前只支持图像任务。 * @param req CancelTaskRequest * @return CancelTaskOutcome */ CancelTaskOutcome CancelTask(const Model::CancelTaskRequest &request); void CancelTaskAsync(const Model::CancelTaskRequest& request, const CancelTaskAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); CancelTaskOutcomeCallable CancelTaskCallable(const Model::CancelTaskRequest& request); /** *检查人脸图片是否合法 * @param req CheckFacePhotoRequest * @return CheckFacePhotoOutcome */ CheckFacePhotoOutcome CheckFacePhoto(const Model::CheckFacePhotoRequest &request); void CheckFacePhotoAsync(const Model::CheckFacePhotoRequest& request, const CheckFacePhotoAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); CheckFacePhotoOutcomeCallable CheckFacePhotoCallable(const Model::CheckFacePhotoRequest& request); /** *创建人脸 * @param req CreateFaceRequest * @return CreateFaceOutcome */ CreateFaceOutcome CreateFace(const Model::CreateFaceRequest &request); void CreateFaceAsync(const Model::CreateFaceRequest& request, const CreateFaceAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); CreateFaceOutcomeCallable CreateFaceCallable(const Model::CreateFaceRequest& request); /** *创建人员库 * @param req CreateLibraryRequest * @return CreateLibraryOutcome */ CreateLibraryOutcome CreateLibrary(const Model::CreateLibraryRequest &request); void CreateLibraryAsync(const Model::CreateLibraryRequest& request, const CreateLibraryAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); CreateLibraryOutcomeCallable CreateLibraryCallable(const Model::CreateLibraryRequest& request); /** *创建人员 * @param req CreatePersonRequest * @return CreatePersonOutcome */ CreatePersonOutcome CreatePerson(const Model::CreatePersonRequest &request); void CreatePersonAsync(const Model::CreatePersonRequest& request, const CreatePersonAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); CreatePersonOutcomeCallable CreatePersonCallable(const Model::CreatePersonRequest& request); /** *创建词汇 * @param req CreateVocabRequest * @return CreateVocabOutcome */ CreateVocabOutcome CreateVocab(const Model::CreateVocabRequest &request); void CreateVocabAsync(const Model::CreateVocabRequest& request, const CreateVocabAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); CreateVocabOutcomeCallable CreateVocabCallable(const Model::CreateVocabRequest& request); /** *建立词汇库 * @param req CreateVocabLibRequest * @return CreateVocabLibOutcome */ CreateVocabLibOutcome CreateVocabLib(const Model::CreateVocabLibRequest &request); void CreateVocabLibAsync(const Model::CreateVocabLibRequest& request, const CreateVocabLibAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); CreateVocabLibOutcomeCallable CreateVocabLibCallable(const Model::CreateVocabLibRequest& request); /** *删除人脸 * @param req DeleteFaceRequest * @return DeleteFaceOutcome */ DeleteFaceOutcome DeleteFace(const Model::DeleteFaceRequest &request); void DeleteFaceAsync(const Model::DeleteFaceRequest& request, const DeleteFaceAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); DeleteFaceOutcomeCallable DeleteFaceCallable(const Model::DeleteFaceRequest& request); /** *删除人员库 * @param req DeleteLibraryRequest * @return DeleteLibraryOutcome */ DeleteLibraryOutcome DeleteLibrary(const Model::DeleteLibraryRequest &request); void DeleteLibraryAsync(const Model::DeleteLibraryRequest& request, const DeleteLibraryAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); DeleteLibraryOutcomeCallable DeleteLibraryCallable(const Model::DeleteLibraryRequest& request); /** *删除人员 * @param req DeletePersonRequest * @return DeletePersonOutcome */ DeletePersonOutcome DeletePerson(const Model::DeletePersonRequest &request); void DeletePersonAsync(const Model::DeletePersonRequest& request, const DeletePersonAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); DeletePersonOutcomeCallable DeletePersonCallable(const Model::DeletePersonRequest& request); /** *删除词汇 * @param req DeleteVocabRequest * @return DeleteVocabOutcome */ DeleteVocabOutcome DeleteVocab(const Model::DeleteVocabRequest &request); void DeleteVocabAsync(const Model::DeleteVocabRequest& request, const DeleteVocabAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); DeleteVocabOutcomeCallable DeleteVocabCallable(const Model::DeleteVocabRequest& request); /** *删除词汇库 * @param req DeleteVocabLibRequest * @return DeleteVocabLibOutcome */ DeleteVocabLibOutcome DeleteVocabLib(const Model::DeleteVocabLibRequest &request); void DeleteVocabLibAsync(const Model::DeleteVocabLibRequest& request, const DeleteVocabLibAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); DeleteVocabLibOutcomeCallable DeleteVocabLibCallable(const Model::DeleteVocabLibRequest& request); /** *获取标准化接口任务结果 * @param req DescribeAITaskResultRequest * @return DescribeAITaskResultOutcome */ DescribeAITaskResultOutcome DescribeAITaskResult(const Model::DescribeAITaskResultRequest &request); void DescribeAITaskResultAsync(const Model::DescribeAITaskResultRequest& request, const DescribeAITaskResultAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); DescribeAITaskResultOutcomeCallable DescribeAITaskResultCallable(const Model::DescribeAITaskResultRequest& request); /** *人脸考勤查询结果 * @param req DescribeAttendanceResultRequest * @return DescribeAttendanceResultOutcome */ DescribeAttendanceResultOutcome DescribeAttendanceResult(const Model::DescribeAttendanceResultRequest &request); void DescribeAttendanceResultAsync(const Model::DescribeAttendanceResultRequest& request, const DescribeAttendanceResultAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); DescribeAttendanceResultOutcomeCallable DescribeAttendanceResultCallable(const Model::DescribeAttendanceResultRequest& request); /** *音频评估任务信息查询接口,异步查询客户提交的请求的结果。 * @param req DescribeAudioTaskRequest * @return DescribeAudioTaskOutcome */ DescribeAudioTaskOutcome DescribeAudioTask(const Model::DescribeAudioTaskRequest &request); void DescribeAudioTaskAsync(const Model::DescribeAudioTaskRequest& request, const DescribeAudioTaskAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); DescribeAudioTaskOutcomeCallable DescribeAudioTaskCallable(const Model::DescribeAudioTaskRequest& request); /** *音频对话任务评估任务信息查询接口,异步查询客户提交的请求的结果。 * @param req DescribeConversationTaskRequest * @return DescribeConversationTaskOutcome */ DescribeConversationTaskOutcome DescribeConversationTask(const Model::DescribeConversationTaskRequest &request); void DescribeConversationTaskAsync(const Model::DescribeConversationTaskRequest& request, const DescribeConversationTaskAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); DescribeConversationTaskOutcomeCallable DescribeConversationTaskCallable(const Model::DescribeConversationTaskRequest& request); /** *视频精彩集锦结果查询接口,异步查询客户提交的请求的结果。 * @param req DescribeHighlightResultRequest * @return DescribeHighlightResultOutcome */ DescribeHighlightResultOutcome DescribeHighlightResult(const Model::DescribeHighlightResultRequest &request); void DescribeHighlightResultAsync(const Model::DescribeHighlightResultRequest& request, const DescribeHighlightResultAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); DescribeHighlightResultOutcomeCallable DescribeHighlightResultCallable(const Model::DescribeHighlightResultRequest& request); /** *拉取任务详情 * @param req DescribeImageTaskRequest * @return DescribeImageTaskOutcome */ DescribeImageTaskOutcome DescribeImageTask(const Model::DescribeImageTaskRequest &request); void DescribeImageTaskAsync(const Model::DescribeImageTaskRequest& request, const DescribeImageTaskAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); DescribeImageTaskOutcomeCallable DescribeImageTaskCallable(const Model::DescribeImageTaskRequest& request); /** *获取图像任务统计信息 * @param req DescribeImageTaskStatisticRequest * @return DescribeImageTaskStatisticOutcome */ DescribeImageTaskStatisticOutcome DescribeImageTaskStatistic(const Model::DescribeImageTaskStatisticRequest &request); void DescribeImageTaskStatisticAsync(const Model::DescribeImageTaskStatisticRequest& request, const DescribeImageTaskStatisticAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); DescribeImageTaskStatisticOutcomeCallable DescribeImageTaskStatisticCallable(const Model::DescribeImageTaskStatisticRequest& request); /** *获取人员库列表 * @param req DescribeLibrariesRequest * @return DescribeLibrariesOutcome */ DescribeLibrariesOutcome DescribeLibraries(const Model::DescribeLibrariesRequest &request); void DescribeLibrariesAsync(const Model::DescribeLibrariesRequest& request, const DescribeLibrariesAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); DescribeLibrariesOutcomeCallable DescribeLibrariesCallable(const Model::DescribeLibrariesRequest& request); /** *获取人员详情 * @param req DescribePersonRequest * @return DescribePersonOutcome */ DescribePersonOutcome DescribePerson(const Model::DescribePersonRequest &request); void DescribePersonAsync(const Model::DescribePersonRequest& request, const DescribePersonAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); DescribePersonOutcomeCallable DescribePersonCallable(const Model::DescribePersonRequest& request); /** *拉取人员列表 * @param req DescribePersonsRequest * @return DescribePersonsOutcome */ DescribePersonsOutcome DescribePersons(const Model::DescribePersonsRequest &request); void DescribePersonsAsync(const Model::DescribePersonsRequest& request, const DescribePersonsAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); DescribePersonsOutcomeCallable DescribePersonsCallable(const Model::DescribePersonsRequest& request); /** *查询词汇 * @param req DescribeVocabRequest * @return DescribeVocabOutcome */ DescribeVocabOutcome DescribeVocab(const Model::DescribeVocabRequest &request); void DescribeVocabAsync(const Model::DescribeVocabRequest& request, const DescribeVocabAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); DescribeVocabOutcomeCallable DescribeVocabCallable(const Model::DescribeVocabRequest& request); /** *查询词汇库 * @param req DescribeVocabLibRequest * @return DescribeVocabLibOutcome */ DescribeVocabLibOutcome DescribeVocabLib(const Model::DescribeVocabLibRequest &request); void DescribeVocabLibAsync(const Model::DescribeVocabLibRequest& request, const DescribeVocabLibAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); DescribeVocabLibOutcomeCallable DescribeVocabLibCallable(const Model::DescribeVocabLibRequest& request); /** *修改人员库信息 * @param req ModifyLibraryRequest * @return ModifyLibraryOutcome */ ModifyLibraryOutcome ModifyLibrary(const Model::ModifyLibraryRequest &request); void ModifyLibraryAsync(const Model::ModifyLibraryRequest& request, const ModifyLibraryAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); ModifyLibraryOutcomeCallable ModifyLibraryCallable(const Model::ModifyLibraryRequest& request); /** *修改人员信息 * @param req ModifyPersonRequest * @return ModifyPersonOutcome */ ModifyPersonOutcome ModifyPerson(const Model::ModifyPersonRequest &request); void ModifyPersonAsync(const Model::ModifyPersonRequest& request, const ModifyPersonAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); ModifyPersonOutcomeCallable ModifyPersonCallable(const Model::ModifyPersonRequest& request); /** *音频任务提交接口 * @param req SubmitAudioTaskRequest * @return SubmitAudioTaskOutcome */ SubmitAudioTaskOutcome SubmitAudioTask(const Model::SubmitAudioTaskRequest &request); void SubmitAudioTaskAsync(const Model::SubmitAudioTaskRequest& request, const SubmitAudioTaskAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); SubmitAudioTaskOutcomeCallable SubmitAudioTaskCallable(const Model::SubmitAudioTaskRequest& request); /** *提交人员考勤任务,支持包括点播和直播资源;支持通过DescribeAttendanceResult查询结果,也支持通过NoticeUrl设置考勤回调结果,回调结果结构如下: ##### 回调事件结构 | 参数名称 | 类型 | 描述 | | ---- | --- | ------ | | jobid | Integer | 任务ID | | person_info | array of PersonInfo | 识别到的人员列表 | #####子结构PersonInfo | 参数名称 | 类型 | 描述 | | ---- | --- | ------ | | traceid | String | 可用于区分同一路视频流下的不同陌生人 | | personid | String | 识别到的人员ID,如果是陌生人则返回空串 | | libid | String | 识别到的人员所在的库ID,如果是陌生人则返回空串 | | timestamp | uint64 | 识别到人脸的绝对时间戳,单位ms | | image_url | string | 识别到人脸的事件抓图的下载地址,不长期保存,需要请及时下载 | * @param req SubmitCheckAttendanceTaskRequest * @return SubmitCheckAttendanceTaskOutcome */ SubmitCheckAttendanceTaskOutcome SubmitCheckAttendanceTask(const Model::SubmitCheckAttendanceTaskRequest &request); void SubmitCheckAttendanceTaskAsync(const Model::SubmitCheckAttendanceTaskRequest& request, const SubmitCheckAttendanceTaskAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); SubmitCheckAttendanceTaskOutcomeCallable SubmitCheckAttendanceTaskCallable(const Model::SubmitCheckAttendanceTaskRequest& request); /** *对话任务分析接口 * @param req SubmitConversationTaskRequest * @return SubmitConversationTaskOutcome */ SubmitConversationTaskOutcome SubmitConversationTask(const Model::SubmitConversationTaskRequest &request); void SubmitConversationTaskAsync(const Model::SubmitConversationTaskRequest& request, const SubmitConversationTaskAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); SubmitConversationTaskOutcomeCallable SubmitConversationTaskCallable(const Model::SubmitConversationTaskRequest& request); /** *发起双路视频生成精彩集锦接口。该接口可以通过客户传入的学生音视频及老师视频两路Url,自动生成一堂课程的精彩集锦。需要通过DescribeHighlightResult 接口获取生成结果。 * @param req SubmitDoubleVideoHighlightsRequest * @return SubmitDoubleVideoHighlightsOutcome */ SubmitDoubleVideoHighlightsOutcome SubmitDoubleVideoHighlights(const Model::SubmitDoubleVideoHighlightsRequest &request); void SubmitDoubleVideoHighlightsAsync(const Model::SubmitDoubleVideoHighlightsRequest& request, const SubmitDoubleVideoHighlightsAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); SubmitDoubleVideoHighlightsOutcomeCallable SubmitDoubleVideoHighlightsCallable(const Model::SubmitDoubleVideoHighlightsRequest& request); /** ***传统课堂授课任务**:在此场景中,老师为站立授课,有白板或投影供老师展示课程内容,摄像头可以拍摄到老师的半身或者全身。拍摄视频为一路全局画面,且背景不动,要求画面稳定清晰。通过此接口可分析老师授课的行为及语音,以支持AI评教。 **提供的功能接口有:**老师人脸识别、老师表情识别、老师肢体动作识别、语音识别。 可分析的指标维度包括:身份识别、正脸、侧脸、人脸坐标、人脸尺寸、高兴、中性、高兴、中性、惊讶、厌恶、恐惧、愤怒、蔑视、悲伤、正面讲解、写板书、指黑板、语音转文字、发音时长、非发音时长、音量、语速、指定关键词的使用等 **对场景的要求为:**真实场景老师1人出现在画面中,全局画面且背景不动;人脸上下角度在20度以内,左右角度在15度以内,歪头角度在15度以内;光照均匀,无遮挡,人脸清晰可见;像素最好在 100X100 像素以上,但是图像整体质量不能超过1080p。 **结果查询方式:**图像任务直接返回结果,点播及直播任务通过DescribeAITaskResult查询结果。 * @param req SubmitFullBodyClassTaskRequest * @return SubmitFullBodyClassTaskOutcome */ SubmitFullBodyClassTaskOutcome SubmitFullBodyClassTask(const Model::SubmitFullBodyClassTaskRequest &request); void SubmitFullBodyClassTaskAsync(const Model::SubmitFullBodyClassTaskRequest& request, const SubmitFullBodyClassTaskAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); SubmitFullBodyClassTaskOutcomeCallable SubmitFullBodyClassTaskCallable(const Model::SubmitFullBodyClassTaskRequest& request); /** *发起视频生成精彩集锦接口。该接口可以通过客户传入的课程音频数据及相关策略(如微笑抽取,专注抽取等),自动生成一堂课程的精彩集锦。需要通过QueryHighlightResult接口获取生成结果。 * @param req SubmitHighlightsRequest * @return SubmitHighlightsOutcome */ SubmitHighlightsOutcome SubmitHighlights(const Model::SubmitHighlightsRequest &request); void SubmitHighlightsAsync(const Model::SubmitHighlightsRequest& request, const SubmitHighlightsAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); SubmitHighlightsOutcomeCallable SubmitHighlightsCallable(const Model::SubmitHighlightsRequest& request); /** *提交图像分析任务 * @param req SubmitImageTaskRequest * @return SubmitImageTaskOutcome */ SubmitImageTaskOutcome SubmitImageTask(const Model::SubmitImageTaskRequest &request); void SubmitImageTaskAsync(const Model::SubmitImageTaskRequest& request, const SubmitImageTaskAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); SubmitImageTaskOutcomeCallable SubmitImageTaskCallable(const Model::SubmitImageTaskRequest& request); /** ***提交在线1对1课堂任务** 对于在线1对1课堂,老师通过视频向学生授课,并且学生人数为1人。通过上传学生端的图像信息,可以获取学生的听课情况分析。 具体指一路全局画面且背景不动,有1位学生的头像或上半身的画面,要求画面稳定清晰。 **提供的功能接口有:**学生人脸识别、学生表情识别、语音识别。可分析的指标维度包括:学生身份识别、正脸、侧脸、抬头、低头、人脸坐标、人脸尺寸、高兴、中性、高兴、中性、惊讶、厌恶、恐惧、愤怒、蔑视、悲伤、语音转文字、发音时长、非发音时长、音量、语速等。 **对场景的要求为:**真实常规1v1授课场景,学生2人以下,全局画面且背景不动;人脸上下角度在20度以内,左右角度在15度以内,歪头角度在15度以内;光照均匀,无遮挡,人脸清晰可见;像素最好在 100X100 像素以上,但是图像整体质量不能超过1080p。 **结果查询方式:**图像任务直接返回结果,点播及直播任务通过DescribeAITaskResult查询结果。 * @param req SubmitOneByOneClassTaskRequest * @return SubmitOneByOneClassTaskOutcome */ SubmitOneByOneClassTaskOutcome SubmitOneByOneClassTask(const Model::SubmitOneByOneClassTaskRequest &request); void SubmitOneByOneClassTaskAsync(const Model::SubmitOneByOneClassTaskRequest& request, const SubmitOneByOneClassTaskAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); SubmitOneByOneClassTaskOutcomeCallable SubmitOneByOneClassTaskCallable(const Model::SubmitOneByOneClassTaskRequest& request); /** ***提交线下小班(无课桌)课任务** 线下小班课是指有学生无课桌的课堂,满座15人以下,全局画面且背景不动,能清晰看到。 **提供的功能接口有:**学生人脸识别、学生表情识别、学生肢体动作识别。 可分析的指标维度包括:身份识别、正脸、侧脸、抬头、低头、高兴、中性、高兴、中性、惊讶、厌恶、恐惧、愤怒、蔑视、悲伤、站立、举手、坐着等。 **对场景的要求为:**真实常规教室,满座15人以下,全局画面且背景不动;人脸上下角度在20度以内,左右角度在15度以内,歪头角度在15度以内;光照均匀,无遮挡,人脸清晰可见;像素最好在 100X100 像素以上但是图像整体质量不能超过1080p。 **结果查询方式:**图像任务直接返回结果,点播及直播任务通过DescribeAITaskResult查询结果。 * @param req SubmitOpenClassTaskRequest * @return SubmitOpenClassTaskOutcome */ SubmitOpenClassTaskOutcome SubmitOpenClassTask(const Model::SubmitOpenClassTaskRequest &request); void SubmitOpenClassTaskAsync(const Model::SubmitOpenClassTaskRequest& request, const SubmitOpenClassTaskAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); SubmitOpenClassTaskOutcomeCallable SubmitOpenClassTaskCallable(const Model::SubmitOpenClassTaskRequest& request); /** ***在线小班课任务**:此场景是在线授课场景,老师一般为坐着授课,摄像头可以拍摄到老师的头部及上半身。拍摄视频为一路全局画面,且背景不动,要求画面稳定清晰。通过此接口可分析老师授课的行为及语音,以支持AI评教。 **提供的功能接口有:**老师人脸识别、老师表情识别、老师手势识别、光线识别、语音识别。 可分析的指标维度包括:身份识别、正脸、侧脸、人脸坐标、人脸尺寸、高兴、中性、高兴、中性、惊讶、厌恶、恐惧、愤怒、蔑视、悲伤、点赞手势、听你说手势、听我说手势、拿教具行为、语音转文字、发音时长、非发音时长、音量、语速、指定关键词的使用等 **对场景的要求为:**在线常规授课场景,全局画面且背景不动;人脸上下角度在20度以内,左右角度在15度以内,歪头角度在15度以内;光照均匀,无遮挡,人脸清晰可见;像素最好在 100X100 像素以上,但是图像整体质量不能超过1080p。 **结果查询方式:**图像任务直接返回结果,点播及直播任务通过DescribeAITaskResult查询结果。 * @param req SubmitPartialBodyClassTaskRequest * @return SubmitPartialBodyClassTaskOutcome */ SubmitPartialBodyClassTaskOutcome SubmitPartialBodyClassTask(const Model::SubmitPartialBodyClassTaskRequest &request); void SubmitPartialBodyClassTaskAsync(const Model::SubmitPartialBodyClassTaskRequest& request, const SubmitPartialBodyClassTaskAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); SubmitPartialBodyClassTaskOutcomeCallable SubmitPartialBodyClassTaskCallable(const Model::SubmitPartialBodyClassTaskRequest& request); /** ***提交线下传统面授大班课(含课桌)任务。** 传统教室课堂是指有学生课堂有课桌的课堂,满座20-50人,全局画面且背景不动。 **提供的功能接口有:**学生人脸识别、学生表情识别、学生肢体动作识别。可分析的指标维度包括:学生身份识别、正脸、侧脸、抬头、低头、高兴、中性、高兴、中性、惊讶、厌恶、恐惧、愤怒、蔑视、悲伤、举手、站立、坐着、趴桌子、玩手机等 **对场景的要求为:**传统的学生上课教室,满座20-50人,全局画面且背景不动;人脸上下角度在20度以内,左右角度在15度以内,歪头角度在15度以内;光照均匀,无遮挡,人脸清晰可见;像素最好在 100X100 像素以上,但是图像整体质量不能超过1080p。 **结果查询方式:**图像任务直接返回结果,点播及直播任务通过DescribeAITaskResult查询结果。 * @param req SubmitTraditionalClassTaskRequest * @return SubmitTraditionalClassTaskOutcome */ SubmitTraditionalClassTaskOutcome SubmitTraditionalClassTask(const Model::SubmitTraditionalClassTaskRequest &request); void SubmitTraditionalClassTaskAsync(const Model::SubmitTraditionalClassTaskRequest& request, const SubmitTraditionalClassTaskAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); SubmitTraditionalClassTaskOutcomeCallable SubmitTraditionalClassTaskCallable(const Model::SubmitTraditionalClassTaskRequest& request); /** *分析音频信息 * @param req TransmitAudioStreamRequest * @return TransmitAudioStreamOutcome */ TransmitAudioStreamOutcome TransmitAudioStream(const Model::TransmitAudioStreamRequest &request); void TransmitAudioStreamAsync(const Model::TransmitAudioStreamRequest& request, const TransmitAudioStreamAsyncHandler& handler, const std::shared_ptr<const AsyncCallerContext>& context = nullptr); TransmitAudioStreamOutcomeCallable TransmitAudioStreamCallable(const Model::TransmitAudioStreamRequest& request); }; } } } #endif // !TENCENTCLOUD_TCI_V20190318_TCICLIENT_H_
<reponame>skyfackr/luogu_personal_cppcode #include<bits/stdc++.h> using namespace std; int i1,i2,i3,i4,i5,i6,i7,i8,i9; int jj1,jj2,jj3; ofstream f; int main() { f.open("ans.txt",ios::out); for (i1=1;i1<=9;i1++) for (i2=1;i2<=9;i2++) for (i3=1;i3<=9;i3++) for (i4=1;i4<=9;i4++) for (i5=1;i5<=9;i5++) for (i6=1;i6<=9;i6++) for (i7=1;i7<=9;i7++) for (i8=1;i8<=9;i8++) for (i9=1;i9<=9;i9++) { if (i1!=i2&&i1!=i3&&i1!=i4&&i1!=i5&&i1!=i6&&i1!=i7&&i1!=i8&&i1!=i9&&i2!=i3&&i2!=i4&&i2!=i5&&i2!=i6&&i2!=i7&&i2!=i8&&i2!=i9&&i3!=i4&&i3!=i5&&i3!=i6&&i3!=i7&&i3!=i8&&i3!=i9&&i4!=i5&&i4!=i6&&i4!=i7&&i4!=i8&&i4!=i9&&i5!=i6&&i5!=i7&&i5!=i8&&i5!=i9&&i6!=i7&&i6!=i8&&i6!=i9&&i7!=i8&&i7!=i9&&i8!=i9) { jj1=i1*100+i2*10+i3; jj2=i4*100+i5*10+i6; jj3=i7*100+i8*10+i9; if (jj1*6==jj2*3&&jj2*3==jj3*2) { f<<jj1<<" "<<jj2<<" "<<jj3<<endl; } } } f.close(); return 0; }
def step_sentry(self,runstop): if runstop is not self.runstop_last: if runstop: for mk in self.motors.keys(): self.motors[mk].disable_torque() else: for mk in self.motors.keys(): self.motors[mk].enable_torque() self.runstop_last=runstop
/* * Drop a reference to a cookie. */ void fscache_cookie_put(struct fscache_cookie *cookie, enum fscache_cookie_trace where) { struct fscache_cookie *parent; int ref; _enter("%x", cookie->debug_id); do { unsigned int cookie_debug_id = cookie->debug_id; bool zero = __refcount_dec_and_test(&cookie->ref, &ref); trace_fscache_cookie(cookie_debug_id, ref - 1, where); if (!zero) return; parent = cookie->parent; fscache_unhash_cookie(cookie); fscache_free_cookie(cookie); cookie = parent; where = fscache_cookie_put_parent; } while (cookie); _leave(""); }
package cmd import ( "testing" "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "sigs.k8s.io/structured-merge-diff/v4/fieldpath" "sigs.k8s.io/structured-merge-diff/v4/value" ) type M = map[string]interface{} func TestFieldListMatchObject(t *testing.T) { testCases := []struct { name string fieldList value.FieldList object M shouldMatch bool }{ { name: "match", fieldList: value.FieldList{ value.Field{Name: "containerPort", Value: value.NewValueInterface(80)}, value.Field{Name: "protocol", Value: value.NewValueInterface("TCP")}, }, object: M{ "containerPort": 80, "protocol": "TCP", "name": "foo", }, shouldMatch: true, }, { name: "mismatch", fieldList: value.FieldList{ value.Field{Name: "containerPort", Value: value.NewValueInterface(80)}, value.Field{Name: "protocol", Value: value.NewValueInterface("TCP")}, }, object: M{ "containerPort": 8080, "protocol": "TCP", "name": "foo", }, shouldMatch: false, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { got := fieldListMatchObject(tc.fieldList, tc.object) if got != tc.shouldMatch { t.Errorf("Unexpected result: got=%v, expected=%v", got, tc.shouldMatch) } }) } } func TestMarshaller_MarshalMetaObject(t *testing.T) { now := metav1.NewTime(time.Unix(1606150365, 0).UTC()) s1 := fieldpath.NewSet( fieldpath.MakePathOrDie("metadata", "finalizers"), fieldpath.MakePathOrDie("metadata", "finalizers", value.NewValueInterface("service.kubernetes.io/load-balancer-cleanup"), ), fieldpath.MakePathOrDie("metadata", "labels"), fieldpath.MakePathOrDie("metadata", "labels", "app"), fieldpath.MakePathOrDie("metadata", "ownerReferences"), fieldpath.MakePathOrDie("metadata", "ownerReferences", fieldpath.KeyByFields("uid", "72594682-7b8d-4d52-bb84-8cab3cd2e16f")), fieldpath.MakePathOrDie("ownerReferences", fieldpath.KeyByFields("uid", "72594682-7b8d-4d52-bb84-8cab3cd2e16f"), "kind", ), fieldpath.MakePathOrDie("ownerReferences", fieldpath.KeyByFields("uid", "72594682-7b8d-4d52-bb84-8cab3cd2e16f"), "apiVersion", ), ) s2 := fieldpath.NewSet( fieldpath.MakePathOrDie("metadata", "finalizers", value.NewValueInterface("service.kubernetes.io/foo"), ), fieldpath.MakePathOrDie("spec", "containers", fieldpath.KeyByFields("name", "c1")), fieldpath.MakePathOrDie("spec", "containers", fieldpath.KeyByFields("name", "c1"), "image"), fieldpath.MakePathOrDie("spec", "containers", fieldpath.KeyByFields("name", "c1"), "ports"), fieldpath.MakePathOrDie("spec", "containers", fieldpath.KeyByFields("name", "c1"), "ports", fieldpath.KeyByFields("containerPort", 53, "protocol", "TCP")), fieldpath.MakePathOrDie("spec", "containers", fieldpath.KeyByFields("name", "c1"), "ports", fieldpath.KeyByFields("containerPort", 53, "protocol", "TCP"), "containerPort"), fieldpath.MakePathOrDie("spec", "containers", fieldpath.KeyByFields("name", "c1"), "ports", fieldpath.KeyByFields("containerPort", 53, "protocol", "TCP"), "protocol"), ) s3 := fieldpath.NewSet( fieldpath.MakePathOrDie("spec", "containers", fieldpath.KeyByFields("name", "c1"), "ports", fieldpath.KeyByFields("containerPort", 53, "protocol", "UDP")), fieldpath.MakePathOrDie("spec", "containers", fieldpath.KeyByFields("name", "c1"), "ports", fieldpath.KeyByFields("containerPort", 53, "protocol", "UDP"), "containerPort"), fieldpath.MakePathOrDie("spec", "containers", fieldpath.KeyByFields("name", "c1"), "ports", fieldpath.KeyByFields("containerPort", 53, "protocol", "UDP"), "protocol"), ) f1, _ := s1.ToJSON() f2, _ := s2.ToJSON() f3, _ := s3.ToJSON() pod := &corev1.Pod{ ObjectMeta: metav1.ObjectMeta{ ManagedFields: []metav1.ManagedFieldsEntry{ { Manager: "m1", Operation: metav1.ManagedFieldsOperationUpdate, Time: &now, FieldsV1: &metav1.FieldsV1{Raw: f1}, }, { Manager: "m2", Operation: metav1.ManagedFieldsOperationUpdate, Time: &now, FieldsV1: &metav1.FieldsV1{Raw: f2}, }, { Manager: "m3", Operation: metav1.ManagedFieldsOperationUpdate, Time: &now, FieldsV1: &metav1.FieldsV1{Raw: f3}, }, }, Labels: map[string]string{ "app": "bar", "version": "v1", }, Finalizers: []string{ "service.kubernetes.io/load-balancer-cleanup", "service.kubernetes.io/foo", }, OwnerReferences: []metav1.OwnerReference{ { UID: "72594682-7b8d-4d52-bb84-8cab3cd2e16f", Kind: "ReplicaSet", Name: "bar-xxxx", }, }, }, Spec: corev1.PodSpec{ Containers: []corev1.Container{ { Name: "c1", Image: "image:latest", Ports: []corev1.ContainerPort{ { Name: "tcp", ContainerPort: 53, Protocol: corev1.ProtocolTCP, }, { Name: "udp", ContainerPort: 53, Protocol: corev1.ProtocolUDP, }, }, }, }, }, } const expected = ` metadata: creationTimestamp: null m1 (Update 2020-11-23 16:52:45 +0000) finalizers: m1 (Update 2020-11-23 16:52:45 +0000) - service.kubernetes.io/load-balancer-cleanup m2 (Update 2020-11-23 16:52:45 +0000) - service.kubernetes.io/foo m1 (Update 2020-11-23 16:52:45 +0000) labels: m1 (Update 2020-11-23 16:52:45 +0000) app: bar m1 (Update 2020-11-23 16:52:45 +0000) version: v1 m1 (Update 2020-11-23 16:52:45 +0000) ownerReferences: m1 (Update 2020-11-23 16:52:45 +0000) - apiVersion: "" m1 (Update 2020-11-23 16:52:45 +0000) kind: ReplicaSet m1 (Update 2020-11-23 16:52:45 +0000) name: bar-xxxx m1 (Update 2020-11-23 16:52:45 +0000) uid: 72594682-7b8d-4d52-bb84-8cab3cd2e16f spec: containers: m2 (Update 2020-11-23 16:52:45 +0000) - image: image:latest m2 (Update 2020-11-23 16:52:45 +0000) name: c1 m2 (Update 2020-11-23 16:52:45 +0000) ports: m2 (Update 2020-11-23 16:52:45 +0000) - containerPort: 53 m2 (Update 2020-11-23 16:52:45 +0000) name: tcp m2 (Update 2020-11-23 16:52:45 +0000) protocol: TCP m3 (Update 2020-11-23 16:52:45 +0000) - containerPort: 53 m3 (Update 2020-11-23 16:52:45 +0000) name: udp m3 (Update 2020-11-23 16:52:45 +0000) protocol: UDP m2 (Update 2020-11-23 16:52:45 +0000) resources: {} status: {} ` data, err := MarshalMetaObject(pod, TimeFormatFull) if err != nil { t.Fatal(err) } assert.Equal(t, expected, string(data)) } func TestBuildTree(t *testing.T) { s1 := fieldpath.NewSet( fieldpath.MakePathOrDie("metadata", "finalizers"), fieldpath.MakePathOrDie("metadata", "finalizers", value.NewValueInterface("service.kubernetes.io/load-balancer-cleanup"), ), ) s2 := fieldpath.NewSet( fieldpath.MakePathOrDie("metadata", "finalizers", value.NewValueInterface("service.kubernetes.io/foo"), ), ) f1, _ := s1.ToJSON() f2, _ := s2.ToJSON() m := Marshaller{ now: time.Now(), timeFormat: TimeFormatNone, } r := require.New(t) root, err := m.buildTree([]metav1.ManagedFieldsEntry{ { Manager: "m1", Operation: metav1.ManagedFieldsOperationApply, FieldsV1: &metav1.FieldsV1{ Raw: f1, }, }, { Manager: "m2", Operation: metav1.ManagedFieldsOperationUpdate, FieldsV1: &metav1.FieldsV1{ Raw: f2, }, }, }, 0, 0, 0) r.NoError(err) leaf1 := &Node{ Info: &ManagerInfo{ Manager: "m1", Operation: string(metav1.ManagedFieldsOperationApply), }, } leaf2 := &Node{ Info: &ManagerInfo{ Manager: "m2", Operation: string(metav1.ManagedFieldsOperationUpdate), }, } node1 := &Node{ Values: map[string]*ValueWithNode{ `"service.kubernetes.io/load-balancer-cleanup"`: { Value: value.NewValueInterface("service.kubernetes.io/load-balancer-cleanup"), Node: leaf1, }, `"service.kubernetes.io/foo"`: { Value: value.NewValueInterface("service.kubernetes.io/foo"), Node: leaf2, }, }, Info: &ManagerInfo{ Manager: "m1", Operation: string(metav1.ManagedFieldsOperationApply), }, } leaf1.Parent, leaf2.Parent = node1, node1 node2 := &Node{ Fields: map[string]*Node{ "finalizers": node1, }, } node1.Parent = node2 expected := &Node{ Fields: map[string]*Node{ "metadata": node2, }, } node2.Parent = expected r.Equal(expected, root) }
/** * @author <a href="mailto:[email protected]">Marek Posolda</a> */ public class RealmAdapter extends AbstractAdapter implements RealmModel { private static final Logger logger = Logger.getLogger(RealmAdapter.class); private final RealmEntity realm; protected volatile transient PublicKey publicKey; protected volatile transient PrivateKey privateKey; private volatile transient PasswordPolicy passwordPolicy; public RealmAdapter(RealmEntity realmEntity, MongoStoreInvocationContext invocationContext) { super(invocationContext); this.realm = realmEntity; } @Override public String getId() { return realm.getId(); } @Override public String getName() { return realm.getName(); } @Override public void setName(String name) { realm.setName(name); updateRealm(); } @Override public boolean isEnabled() { return realm.isEnabled(); } @Override public void setEnabled(boolean enabled) { realm.setEnabled(enabled); updateRealm(); } @Override public boolean isSslNotRequired() { return realm.isSslNotRequired(); } @Override public void setSslNotRequired(boolean sslNotRequired) { realm.setSslNotRequired(sslNotRequired); updateRealm(); } @Override public boolean isRegistrationAllowed() { return realm.isRegistrationAllowed(); } @Override public void setRegistrationAllowed(boolean registrationAllowed) { realm.setRegistrationAllowed(registrationAllowed); updateRealm(); } @Override public boolean isVerifyEmail() { return realm.isVerifyEmail(); } @Override public void setVerifyEmail(boolean verifyEmail) { realm.setVerifyEmail(verifyEmail); updateRealm(); } @Override public boolean isResetPasswordAllowed() { return realm.isResetPasswordAllowed(); } @Override public void setResetPasswordAllowed(boolean resetPassword) { realm.setResetPasswordAllowed(resetPassword); updateRealm(); } @Override public boolean isSocial() { return realm.isSocial(); } @Override public void setSocial(boolean social) { realm.setSocial(social); updateRealm(); } @Override public boolean isUpdateProfileOnInitialSocialLogin() { return realm.isUpdateProfileOnInitialSocialLogin(); } @Override public void setUpdateProfileOnInitialSocialLogin(boolean updateProfileOnInitialSocialLogin) { realm.setUpdateProfileOnInitialSocialLogin(updateProfileOnInitialSocialLogin); updateRealm(); } @Override public PasswordPolicy getPasswordPolicy() { if (passwordPolicy == null) { passwordPolicy = new PasswordPolicy(realm.getPasswordPolicy()); } return passwordPolicy; } @Override public void setPasswordPolicy(PasswordPolicy policy) { this.passwordPolicy = policy; realm.setPasswordPolicy(policy.toString()); updateRealm(); } @Override public int getAccessTokenLifespan() { return realm.getAccessTokenLifespan(); } @Override public void setAccessTokenLifespan(int tokenLifespan) { realm.setAccessTokenLifespan(tokenLifespan); updateRealm(); } @Override public int getRefreshTokenLifespan() { return realm.getRefreshTokenLifespan(); } @Override public void setRefreshTokenLifespan(int tokenLifespan) { realm.setRefreshTokenLifespan(tokenLifespan); updateRealm(); } @Override public int getAccessCodeLifespan() { return realm.getAccessCodeLifespan(); } @Override public void setAccessCodeLifespan(int accessCodeLifespan) { realm.setAccessCodeLifespan(accessCodeLifespan); updateRealm(); } @Override public int getAccessCodeLifespanUserAction() { return realm.getAccessCodeLifespanUserAction(); } @Override public void setAccessCodeLifespanUserAction(int accessCodeLifespanUserAction) { realm.setAccessCodeLifespanUserAction(accessCodeLifespanUserAction); updateRealm(); } @Override public String getPublicKeyPem() { return realm.getPublicKeyPem(); } @Override public void setPublicKeyPem(String publicKeyPem) { realm.setPublicKeyPem(publicKeyPem); this.publicKey = null; updateRealm(); } @Override public String getPrivateKeyPem() { return realm.getPrivateKeyPem(); } @Override public void setPrivateKeyPem(String privateKeyPem) { realm.setPrivateKeyPem(privateKeyPem); this.privateKey = null; updateRealm(); } @Override public PublicKey getPublicKey() { if (publicKey != null) return publicKey; publicKey = KeycloakModelUtils.getPublicKey(getPublicKeyPem()); return publicKey; } @Override public void setPublicKey(PublicKey publicKey) { this.publicKey = publicKey; String publicKeyPem = KeycloakModelUtils.getPemFromKey(publicKey); setPublicKeyPem(publicKeyPem); } @Override public PrivateKey getPrivateKey() { if (privateKey != null) return privateKey; privateKey = KeycloakModelUtils.getPrivateKey(getPrivateKeyPem()); return privateKey; } @Override public void setPrivateKey(PrivateKey privateKey) { this.privateKey = privateKey; String privateKeyPem = KeycloakModelUtils.getPemFromKey(privateKey); setPrivateKeyPem(privateKeyPem); } @Override public String getLoginTheme() { return realm.getLoginTheme(); } @Override public void setLoginTheme(String name) { realm.setLoginTheme(name); updateRealm(); } @Override public String getAccountTheme() { return realm.getAccountTheme(); } @Override public void setAccountTheme(String name) { realm.setAccountTheme(name); updateRealm(); } @Override public UserAdapter getUser(String name) { DBObject query = new QueryBuilder() .and("loginName").is(name) .and("realmId").is(getId()) .get(); UserEntity user = getMongoStore().loadSingleEntity(UserEntity.class, query, invocationContext); if (user == null) { return null; } else { return new UserAdapter(user, invocationContext); } } @Override public UserModel getUserByEmail(String email) { DBObject query = new QueryBuilder() .and("email").is(email) .and("realmId").is(getId()) .get(); UserEntity user = getMongoStore().loadSingleEntity(UserEntity.class, query, invocationContext); if (user == null) { return null; } else { return new UserAdapter(user, invocationContext); } } @Override public UserModel getUserById(String id) { UserEntity user = getMongoStore().loadEntity(UserEntity.class, id, invocationContext); // Check that it's user from this realm if (user == null || !getId().equals(user.getRealmId())) { return null; } else { return new UserAdapter(user, invocationContext); } } @Override public UserAdapter addUser(String username) { UserAdapter userModel = addUserEntity(username); for (String r : getDefaultRoles()) { grantRole(userModel, getRole(r)); } for (ApplicationModel application : getApplications()) { for (String r : application.getDefaultRoles()) { grantRole(userModel, application.getRole(r)); } } return userModel; } // Add just user entity without defaultRoles protected UserAdapter addUserEntity(String username) { if (getUser(username) != null) { throw new IllegalArgumentException("User " + username + " already exists"); } UserEntity userEntity = new UserEntity(); userEntity.setLoginName(username); userEntity.setEnabled(true); userEntity.setRealmId(getId()); getMongoStore().insertEntity(userEntity, invocationContext); return new UserAdapter(userEntity, invocationContext); } @Override public boolean removeUser(String name) { DBObject query = new QueryBuilder() .and("loginName").is(name) .and("realmId").is(getId()) .get(); return getMongoStore().removeEntities(UserEntity.class, query, invocationContext); } @Override public RoleAdapter getRole(String name) { DBObject query = new QueryBuilder() .and("name").is(name) .and("realmId").is(getId()) .get(); RoleEntity role = getMongoStore().loadSingleEntity(RoleEntity.class, query, invocationContext); if (role == null) { return null; } else { return new RoleAdapter(role, this, invocationContext); } } @Override public RoleModel addRole(String name) { RoleAdapter role = getRole(name); if (role != null) { // Compatibility with JPA model return role; // throw new IllegalArgumentException("Role " + name + " already exists"); } RoleEntity roleEntity = new RoleEntity(); roleEntity.setName(name); roleEntity.setRealmId(getId()); getMongoStore().insertEntity(roleEntity, invocationContext); return new RoleAdapter(roleEntity, this, invocationContext); } @Override public boolean removeRoleById(String id) { return getMongoStore().removeEntity(RoleEntity.class, id, invocationContext); } @Override public Set<RoleModel> getRoles() { DBObject query = new QueryBuilder() .and("realmId").is(getId()) .get(); List<RoleEntity> roles = getMongoStore().loadEntities(RoleEntity.class, query, invocationContext); Set<RoleModel> result = new HashSet<RoleModel>(); if (roles == null) return result; for (RoleEntity role : roles) { result.add(new RoleAdapter(role, this, invocationContext)); } return result; } @Override public RoleModel getRoleById(String id) { RoleEntity role = getMongoStore().loadEntity(RoleEntity.class, id, invocationContext); if (role == null || !getId().equals(role.getRealmId())) { return null; } else { return new RoleAdapter(role, this, invocationContext); } } @Override public List<String> getDefaultRoles() { return realm.getDefaultRoles(); } @Override public void addDefaultRole(String name) { RoleModel role = getRole(name); if (role == null) { addRole(name); } getMongoStore().pushItemToList(realm, "defaultRoles", name, true, invocationContext); } @Override public void updateDefaultRoles(String[] defaultRoles) { List<String> roleNames = new ArrayList<String>(); for (String roleName : defaultRoles) { RoleModel role = getRole(roleName); if (role == null) { addRole(roleName); } roleNames.add(roleName); } realm.setDefaultRoles(roleNames); updateRealm(); } @Override public ApplicationModel getApplicationById(String id) { ApplicationEntity appData = getMongoStore().loadEntity(ApplicationEntity.class, id, invocationContext); // Check if application belongs to this realm if (appData == null || !getId().equals(appData.getRealmId())) { return null; } return new ApplicationAdapter(appData, invocationContext); } @Override public ApplicationModel getApplicationByName(String name) { DBObject query = new QueryBuilder() .and("realmId").is(getId()) .and("name").is(name) .get(); ApplicationEntity appEntity = getMongoStore().loadSingleEntity(ApplicationEntity.class, query, invocationContext); return appEntity==null ? null : new ApplicationAdapter(appEntity, invocationContext); } @Override public Map<String, ApplicationModel> getApplicationNameMap() { Map<String, ApplicationModel> resourceMap = new HashMap<String, ApplicationModel>(); for (ApplicationModel resource : getApplications()) { resourceMap.put(resource.getName(), resource); } return resourceMap; } @Override public List<ApplicationModel> getApplications() { DBObject query = new QueryBuilder() .and("realmId").is(getId()) .get(); List<ApplicationEntity> appDatas = getMongoStore().loadEntities(ApplicationEntity.class, query, invocationContext); List<ApplicationModel> result = new ArrayList<ApplicationModel>(); for (ApplicationEntity appData : appDatas) { result.add(new ApplicationAdapter(appData, invocationContext)); } return result; } @Override public ApplicationModel addApplication(String name) { UserAdapter resourceUser = addUserEntity(name); ApplicationEntity appData = new ApplicationEntity(); appData.setName(name); appData.setRealmId(getId()); appData.setEnabled(true); appData.setResourceUserId(resourceUser.getUser().getId()); getMongoStore().insertEntity(appData, invocationContext); return new ApplicationAdapter(appData, resourceUser, invocationContext); } @Override public boolean removeApplication(String id) { return getMongoStore().removeEntity(ApplicationEntity.class, id, invocationContext); } @Override public boolean hasRole(UserModel user, RoleModel role) { Set<RoleModel> roles = getRoleMappings(user); if (roles.contains(role)) return true; for (RoleModel mapping : roles) { if (mapping.hasRole(role)) return true; } return false; } @Override public void grantRole(UserModel user, RoleModel role) { UserEntity userEntity = ((UserAdapter)user).getUser(); getMongoStore().pushItemToList(userEntity, "roleIds", role.getId(), true, invocationContext); } @Override public Set<RoleModel> getRoleMappings(UserModel user) { Set<RoleModel> result = new HashSet<RoleModel>(); List<RoleEntity> roles = MongoModelUtils.getAllRolesOfUser(user, invocationContext); for (RoleEntity role : roles) { if (getId().equals(role.getRealmId())) { result.add(new RoleAdapter(role, this, invocationContext)); } else { // Likely applicationRole, but we don't have this application yet result.add(new RoleAdapter(role, invocationContext)); } } return result; } @Override public Set<RoleModel> getRealmRoleMappings(UserModel user) { Set<RoleModel> allRoles = getRoleMappings(user); // Filter to retrieve just realm roles TODO: Maybe improve to avoid filter programmatically... Maybe have separate fields for realmRoles and appRoles on user? Set<RoleModel> realmRoles = new HashSet<RoleModel>(); for (RoleModel role : allRoles) { RoleEntity roleEntity = ((RoleAdapter)role).getRole(); if (getId().equals(roleEntity.getRealmId())) { realmRoles.add(role); } } return realmRoles; } @Override public void deleteRoleMapping(UserModel user, RoleModel role) { if (user == null || role == null) return; UserEntity userEntity = ((UserAdapter)user).getUser(); getMongoStore().pullItemFromList(userEntity, "roleIds", role.getId(), invocationContext); } @Override public Set<RoleModel> getScopeMappings(UserModel user) { Set<RoleModel> result = new HashSet<RoleModel>(); List<RoleEntity> roles = MongoModelUtils.getAllScopesOfUser(user, invocationContext); for (RoleEntity role : roles) { if (getId().equals(role.getRealmId())) { result.add(new RoleAdapter(role, this, invocationContext)); } else { // Likely applicationRole, but we don't have this application yet result.add(new RoleAdapter(role, invocationContext)); } } return result; } @Override public Set<RoleModel> getRealmScopeMappings(UserModel user) { Set<RoleModel> allScopes = getScopeMappings(user); // Filter to retrieve just realm roles TODO: Maybe improve to avoid filter programmatically... Maybe have separate fields for realmRoles and appRoles on user? Set<RoleModel> realmRoles = new HashSet<RoleModel>(); for (RoleModel role : allScopes) { RoleEntity roleEntity = ((RoleAdapter)role).getRole(); if (getId().equals(roleEntity.getRealmId())) { realmRoles.add(role); } } return realmRoles; } @Override public void addScopeMapping(UserModel agent, RoleModel role) { UserEntity userEntity = ((UserAdapter)agent).getUser(); getMongoStore().pushItemToList(userEntity, "scopeIds", role.getId(), true, invocationContext); } @Override public void deleteScopeMapping(UserModel user, RoleModel role) { UserEntity userEntity = ((UserAdapter)user).getUser(); getMongoStore().pullItemFromList(userEntity, "scopeIds", role.getId(), invocationContext); } @Override public OAuthClientModel addOAuthClient(String name) { UserAdapter oauthAgent = addUserEntity(name); OAuthClientEntity oauthClient = new OAuthClientEntity(); oauthClient.setOauthAgentId(oauthAgent.getUser().getId()); oauthClient.setRealmId(getId()); oauthClient.setName(name); getMongoStore().insertEntity(oauthClient, invocationContext); return new OAuthClientAdapter(oauthClient, oauthAgent, invocationContext); } @Override public boolean removeOAuthClient(String id) { return getMongoStore().removeEntity(OAuthClientEntity.class, id, invocationContext); } @Override public OAuthClientModel getOAuthClient(String name) { UserAdapter user = getUser(name); if (user == null) return null; DBObject query = new QueryBuilder() .and("realmId").is(getId()) .and("oauthAgentId").is(user.getUser().getId()) .get(); OAuthClientEntity oauthClient = getMongoStore().loadSingleEntity(OAuthClientEntity.class, query, invocationContext); return oauthClient == null ? null : new OAuthClientAdapter(oauthClient, user, invocationContext); } @Override public OAuthClientModel getOAuthClientById(String id) { OAuthClientEntity clientEntity = getMongoStore().loadEntity(OAuthClientEntity.class, id, invocationContext); // Check if client belongs to this realm if (clientEntity == null || !getId().equals(clientEntity.getRealmId())) return null; return new OAuthClientAdapter(clientEntity, invocationContext); } @Override public List<OAuthClientModel> getOAuthClients() { DBObject query = new QueryBuilder() .and("realmId").is(getId()) .get(); List<OAuthClientEntity> results = getMongoStore().loadEntities(OAuthClientEntity.class, query, invocationContext); List<OAuthClientModel> list = new ArrayList<OAuthClientModel>(); for (OAuthClientEntity data : results) { list.add(new OAuthClientAdapter(data, invocationContext)); } return list; } @Override public void addRequiredCredential(String type) { RequiredCredentialModel credentialModel = initRequiredCredentialModel(type); addRequiredCredential(credentialModel, realm.getRequiredCredentials()); } @Override public void addRequiredResourceCredential(String type) { RequiredCredentialModel credentialModel = initRequiredCredentialModel(type); addRequiredCredential(credentialModel, realm.getRequiredApplicationCredentials()); } @Override public void addRequiredOAuthClientCredential(String type) { RequiredCredentialModel credentialModel = initRequiredCredentialModel(type); addRequiredCredential(credentialModel, realm.getRequiredOAuthClientCredentials()); } protected void addRequiredCredential(RequiredCredentialModel credentialModel, List<RequiredCredentialEntity> persistentCollection) { RequiredCredentialEntity credEntity = new RequiredCredentialEntity(); credEntity.setType(credentialModel.getType()); credEntity.setFormLabel(credentialModel.getFormLabel()); credEntity.setInput(credentialModel.isInput()); credEntity.setSecret(credentialModel.isSecret()); persistentCollection.add(credEntity); updateRealm(); } @Override public void updateRequiredCredentials(Set<String> creds) { updateRequiredCredentials(creds, realm.getRequiredCredentials()); } @Override public void updateRequiredApplicationCredentials(Set<String> creds) { updateRequiredCredentials(creds, realm.getRequiredApplicationCredentials()); } @Override public void updateRequiredOAuthClientCredentials(Set<String> creds) { updateRequiredCredentials(creds, realm.getRequiredOAuthClientCredentials()); } protected void updateRequiredCredentials(Set<String> creds, List<RequiredCredentialEntity> credsEntities) { Set<String> already = new HashSet<String>(); Set<RequiredCredentialEntity> toRemove = new HashSet<RequiredCredentialEntity>(); for (RequiredCredentialEntity entity : credsEntities) { if (!creds.contains(entity.getType())) { toRemove.add(entity); } else { already.add(entity.getType()); } } for (RequiredCredentialEntity entity : toRemove) { credsEntities.remove(entity); } for (String cred : creds) { logger.info("updating cred: " + cred); if (!already.contains(cred)) { RequiredCredentialModel credentialModel = initRequiredCredentialModel(cred); addRequiredCredential(credentialModel, credsEntities); } } } @Override public List<RequiredCredentialModel> getRequiredCredentials() { return convertRequiredCredentialEntities(realm.getRequiredCredentials()); } @Override public List<RequiredCredentialModel> getRequiredApplicationCredentials() { return convertRequiredCredentialEntities(realm.getRequiredApplicationCredentials()); } @Override public List<RequiredCredentialModel> getRequiredOAuthClientCredentials() { return convertRequiredCredentialEntities(realm.getRequiredOAuthClientCredentials()); } protected List<RequiredCredentialModel> convertRequiredCredentialEntities(Collection<RequiredCredentialEntity> credEntities) { List<RequiredCredentialModel> result = new ArrayList<RequiredCredentialModel>(); for (RequiredCredentialEntity entity : credEntities) { RequiredCredentialModel model = new RequiredCredentialModel(); model.setFormLabel(entity.getFormLabel()); model.setInput(entity.isInput()); model.setSecret(entity.isSecret()); model.setType(entity.getType()); result.add(model); } return result; } @Override public boolean validatePassword(UserModel user, String password) { for (CredentialEntity cred : ((UserAdapter)user).getUser().getCredentials()) { if (cred.getType().equals(UserCredentialModel.PASSWORD)) { return new Pbkdf2PasswordEncoder(cred.getSalt()).verify(password, cred.getValue()); } } return false; } @Override public boolean validateTOTP(UserModel user, String password, String token) { if (!validatePassword(user, password)) return false; for (CredentialEntity cred : ((UserAdapter)user).getUser().getCredentials()) { if (cred.getType().equals(UserCredentialModel.TOTP)) { return new TimeBasedOTP().validate(token, cred.getValue().getBytes()); } } return false; } @Override public boolean validateSecret(UserModel user, String secret) { for (CredentialEntity cred : ((UserAdapter)user).getUser().getCredentials()) { if (cred.getType().equals(UserCredentialModel.SECRET)) { return secret.equals(cred.getValue()); } } return false; } @Override public UserCredentialModel getSecret(UserModel user) { for (CredentialEntity cred : ((UserAdapter)user).getUser().getCredentials()) { if (cred.getType().equals(UserCredentialModel.SECRET)) { return UserCredentialModel.secret(cred.getValue()); } } return null; } @Override public void updateCredential(UserModel user, UserCredentialModel cred) { CredentialEntity credentialEntity = null; UserEntity userEntity = ((UserAdapter) user).getUser(); for (CredentialEntity entity : userEntity.getCredentials()) { if (entity.getType().equals(cred.getType())) { credentialEntity = entity; } } if (credentialEntity == null) { credentialEntity = new CredentialEntity(); credentialEntity.setType(cred.getType()); credentialEntity.setDevice(cred.getDevice()); userEntity.getCredentials().add(credentialEntity); } if (cred.getType().equals(UserCredentialModel.PASSWORD)) { byte[] salt = Pbkdf2PasswordEncoder.getSalt(); credentialEntity.setValue(new Pbkdf2PasswordEncoder(salt).encode(cred.getValue())); credentialEntity.setSalt(salt); } else { credentialEntity.setValue(cred.getValue()); } credentialEntity.setDevice(cred.getDevice()); getMongoStore().updateEntity(userEntity, invocationContext); } @Override public UserModel getUserBySocialLink(SocialLinkModel socialLink) { DBObject query = new QueryBuilder() .and("socialLinks.socialProvider").is(socialLink.getSocialProvider()) .and("socialLinks.socialUsername").is(socialLink.getSocialUsername()) .and("realmId").is(getId()) .get(); UserEntity userEntity = getMongoStore().loadSingleEntity(UserEntity.class, query, invocationContext); return userEntity==null ? null : new UserAdapter(userEntity, invocationContext); } @Override public Set<SocialLinkModel> getSocialLinks(UserModel user) { UserEntity userEntity = ((UserAdapter)user).getUser(); List<SocialLinkEntity> linkEntities = userEntity.getSocialLinks(); if (linkEntities == null) { return Collections.EMPTY_SET; } Set<SocialLinkModel> result = new HashSet<SocialLinkModel>(); for (SocialLinkEntity socialLinkEntity : linkEntities) { SocialLinkModel model = new SocialLinkModel(socialLinkEntity.getSocialProvider(), socialLinkEntity.getSocialUsername()); result.add(model); } return result; } @Override public void addSocialLink(UserModel user, SocialLinkModel socialLink) { UserEntity userEntity = ((UserAdapter)user).getUser(); SocialLinkEntity socialLinkEntity = new SocialLinkEntity(); socialLinkEntity.setSocialProvider(socialLink.getSocialProvider()); socialLinkEntity.setSocialUsername(socialLink.getSocialUsername()); getMongoStore().pushItemToList(userEntity, "socialLinks", socialLinkEntity, true, invocationContext); } @Override public void removeSocialLink(UserModel user, SocialLinkModel socialLink) { SocialLinkEntity socialLinkEntity = new SocialLinkEntity(); socialLinkEntity.setSocialProvider(socialLink.getSocialProvider()); socialLinkEntity.setSocialUsername(socialLink.getSocialUsername()); UserEntity userEntity = ((UserAdapter)user).getUser(); getMongoStore().pullItemFromList(userEntity, "socialLinks", socialLinkEntity, invocationContext); } protected void updateRealm() { getMongoStore().updateEntity(realm, invocationContext); } protected RequiredCredentialModel initRequiredCredentialModel(String type) { RequiredCredentialModel model = RequiredCredentialModel.BUILT_IN.get(type); if (model == null) { throw new RuntimeException("Unknown credential type " + type); } return model; } @Override public List<UserModel> getUsers() { DBObject query = new QueryBuilder() .and("realmId").is(getId()) .get(); List<UserEntity> users = getMongoStore().loadEntities(UserEntity.class, query, invocationContext); return convertUserEntities(users); } @Override public List<UserModel> searchForUser(String search) { search = search.trim(); Pattern caseInsensitivePattern = Pattern.compile("(?i:" + search + ")"); QueryBuilder nameBuilder; int spaceInd = search.lastIndexOf(" "); // Case when we have search string like "ohn Bow". Then firstName must end with "ohn" AND lastName must start with "bow" (everything case-insensitive) if (spaceInd != -1) { String firstName = search.substring(0, spaceInd); String lastName = search.substring(spaceInd + 1); Pattern firstNamePattern = Pattern.compile("(?i:" + firstName + "$)"); Pattern lastNamePattern = Pattern.compile("(?i:^" + lastName + ")"); nameBuilder = new QueryBuilder().and( new QueryBuilder().put("firstName").regex(firstNamePattern).get(), new QueryBuilder().put("lastName").regex(lastNamePattern).get() ); } else { // Case when we have search without spaces like "foo". The firstName OR lastName could be "foo" (everything case-insensitive) nameBuilder = new QueryBuilder().or( new QueryBuilder().put("firstName").regex(caseInsensitivePattern).get(), new QueryBuilder().put("lastName").regex(caseInsensitivePattern).get() ); } QueryBuilder builder = new QueryBuilder().and( new QueryBuilder().and("realmId").is(getId()).get(), new QueryBuilder().or( new QueryBuilder().put("loginName").regex(caseInsensitivePattern).get(), new QueryBuilder().put("email").regex(caseInsensitivePattern).get(), nameBuilder.get() ).get() ); List<UserEntity> users = getMongoStore().loadEntities(UserEntity.class, builder.get(), invocationContext); return convertUserEntities(users); } @Override public List<UserModel> searchForUserByAttributes(Map<String, String> attributes) { QueryBuilder queryBuilder = new QueryBuilder() .and("realmId").is(getId()); for (Map.Entry<String, String> entry : attributes.entrySet()) { if (entry.getKey().equals(UserModel.LOGIN_NAME)) { queryBuilder.and("loginName").regex(Pattern.compile("(?i:" + entry.getValue() + "$)")); } else if (entry.getKey().equalsIgnoreCase(UserModel.FIRST_NAME)) { queryBuilder.and(UserModel.FIRST_NAME).regex(Pattern.compile("(?i:" + entry.getValue() + "$)")); } else if (entry.getKey().equalsIgnoreCase(UserModel.LAST_NAME)) { queryBuilder.and(UserModel.LAST_NAME).regex(Pattern.compile("(?i:" + entry.getValue() + "$)")); } else if (entry.getKey().equalsIgnoreCase(UserModel.EMAIL)) { queryBuilder.and(UserModel.EMAIL).regex(Pattern.compile("(?i:" + entry.getValue() + "$)")); } } List<UserEntity> users = getMongoStore().loadEntities(UserEntity.class, queryBuilder.get(), invocationContext); return convertUserEntities(users); } protected List<UserModel> convertUserEntities(List<UserEntity> userEntities) { List<UserModel> userModels = new ArrayList<UserModel>(); for (UserEntity user : userEntities) { userModels.add(new UserAdapter(user, invocationContext)); } return userModels; } @Override public Map<String, String> getSmtpConfig() { return realm.getSmtpConfig(); } @Override public void setSmtpConfig(Map<String, String> smtpConfig) { realm.setSmtpConfig(smtpConfig); updateRealm(); } @Override public Map<String, String> getSocialConfig() { return realm.getSocialConfig(); } @Override public void setSocialConfig(Map<String, String> socialConfig) { realm.setSocialConfig(socialConfig); updateRealm(); } @Override public AbstractMongoIdentifiableEntity getMongoEntity() { return realm; } }
Unfortunately, the two answers below are simply wrong for a very, very simple reason: shot traps are completely irrelevant in modern armored warfare. I know it sounds illogical and counter-intuitive but it’s true. It doesn’t matter if the Leopard 2A5 has this gigantic reverse slope on its frontal turret, it will never, ever deflect a shot downwards into the hull roof. Let me explain why: Shot traps were very much a thing during World War Two. Let’s take for example the frontal turret of early production Tiger IIs: If an anti-tank shell hit the lower half of this frontal curve seen here on this early production Porsche turret, there was a very good chance that it could deflect downwards into the very thin hull roof and penetrate the tank. This is why Tiger IIs were soon upgraded with this Henschel turret that presented a flat frontal surface with no deflecting shot traps: However, modern MBTs don’t have to worry about such matters anymore because modern anti-tank shells simply never ricochet. A good WW2 anti-tank shell could reach velocities of ~800m/s. If you used APCR ammunition (which was extremely rare), a good gun like the famed German 75mm Pak 40 or the 88mm Pak 43 could reach 1000m/s. Modern tank guns using APFSDS ammunition can reach significantly higher velocities. the 44 caliber 120mm gun used by the M1 Abrams can shoot projectiles at ~1600m/s. Later models of the Leopard 2 using the longer 55 caliber 120mm gun can reach 1750m/s. At such velocities, if the fired shot connects against the enemy tank’s armour, one of three things will happen: 1: The penetrator will shatter; 2: The penetrator will embed itself inside the enemy armour but will lack the penetration to completely traverse it; 3: The penetrator will completely go through the enemy armour and proceed to do many terrible things to the crew and equipment sheltered inside the tank. This is why the Leopard 2A5 (among others) can afford to have this strange-looking conical shaped turret. It simply doesn’t have to worry about shot traps anymore. Hope this clarified a few things.
from .weight_smooth_l1_loss import WeightSmoothL1Loss from .weight_softmax_loss import WeightSoftmaxLoss from .multibox_loss import MultiBoxLoss from .refine_multibox_loss import RefineMultiBoxLoss from .focal_loss_sigmoid import FocalLossSigmoid from .focal_loss_softmax import FocalLossSoftmax __all__ = ['MultiBoxLoss', 'WeightSoftmaxLoss', ]
def can_player_attack(self, player: entities.PlayerEntity) -> bool: if player.super_fireballs > 0: return True for entity in self.entities: if isinstance(entity, entities.Fireball) and entity.sender == player.color: return False return True
The following interview appears in Music & Literature No. 7, which is devoted to Paul Griffiths, Ann Quin, and Lera Auerbach. Paul Griffiths was born in Wales in 1947. A music critic for thirty years, he has published several books on music, as well as librettos and novels. Scott Esposito: Your book let me tell you is a novel you wrote from the perspective of Hamlet’s Ophelia, and, in an Oulipian twist, you only use words that Ophelia speaks in Shakespeare’s Hamlet. I found the results to be quite remarkable. To start, a very fundamental question—perhaps the key question: why Ophelia? Paul Griffiths: I began with the idea of taking all the words spoken in Hamlet and rearranging them into a new text. However, it didn’t take me very long to realize that while initially I could say almost anything with this stock of words, unless I took huge care in monitoring what I was using, I could easily end up with a highly resistant residue of archaisms and prepositions. I therefore decided to use not all the words in the play, once each, but all the words spoken by one character, with no restriction as to number of uses. Now, if you choose Hamlet as your character, his vocabulary is so vast there’s virtually no constraint—and I needed an active constraint to make the book work. If you choose Francisco, there’s the opposite problem, of being able to say only a very little. Ophelia has enough words to express herself on all sorts of matters, but also few enough that she is constantly bumping up against the unsayable. Article continues after advertisement The constraint also allowed her to give readers the experience of reading words they have read before but are reading now in a new context. Because her mad scenes introduce a language that is unusual and therefore memorable, the reader easily recognizes, for example, where “rosemary” comes from. Ophelia has enough words to express herself on all sorts of matters, but also few enough that she is constantly bumping up against the unsayable. At the same time, I wanted the book to do what novels generally do: tell a story. Ophelia has one of the play’s most powerful lines: “Lord, we know what we are, but know not what we may be.” My attempt was to give her something of what she may be. Also, and again quite aside from the constraint, here was a character who invites questions, a character who has very little opportunity to speak for herself in the play, and may now do so. Article continues after advertisement SE: Do you have a sense of how many words were available to you based on this constraint? PG: There are 483 different words, drawn from Ophelia’s part in the Second Quarto and in the First Folio, with modernized spelling. I left out the First Quarto because its “Ofelia” is so different, and it lies outside the Hamlet tradition, but I wanted to draw on both the other texts because each has words not supplied by the other: most importantly, Q2 has “time” where F1 has “tune.” The 483 figure counts “o” and “o’” as different words. But it strikes me now that “words” isn’t the right word, because I use these things as what one might call “letter strings,” whose possible other meanings are available—including meanings in other languages. For example, Ophelia uses “staff” to mean a stick or pole, but the same letter string (not, strictly, the same word) can also mean a group of people working for the same concern, or an element of musical notation. These are all, to use a term from linguistics, homographs. There isn’t a term for letter string, so I’m going to invent one: “grypheme.” A grapheme (existing term) notates a phoneme; a grypheme is a string of graphemes. There are 478 gryphemes in Ophelian, because the vocabulary includes five pairs of homographs (“o”/”o’” is one). The gryphemes may be combined in ways that their punctuation allows; for example, the very useful “’s” is in the play, and can therefore be applied to any word. But the gryphemes cannot be pushed together and so lose their integrity—otherwise, since Ophelia has several isolated letters (a, b, I, o, s, t), I could introduce all kinds of gryphemes not present in the original: “biotas,” for example. (I did this once; it’s very easy to spot.) SE: I’m intrigued by the fact that you had to invent a new word for what these units that you play with are. This feels like something Oulipian practice very much succeeds at: to get us to examine language with conceptual frames no one has previously brought to it. Did you find that the creation of let me tell you began to change the ways you experienced written language? PG: Yes, the let me tell you experience—and perhaps I should say that it took me 13 years to write the book, from 1989 to 2002—certainly had a larger impact, perhaps in encouraging a view of language as made first of words, then of meanings. I think, also, concept can precede practice—that people are drawn, as readers and as writers, to overt form from suspicions about the ease and immediacy with which language is supposed to communicate. A spirit of play is also involved, and an openness to the seriousness of play. These things—the suspicions and the playfulness—will then no doubt be furthered by formal writing, as you suggest. Some could observe that this entails several problems of serious importance to oneself. (One is that, as you see here, the suspicions and the playfulness are always lurking.) Writing under an imposed constraint may reinforce the lesson that all verbal communication is constrained, by the nature and traditions of the language (as you find when you try to express yourself in something other than your mother tongue), and by the individual’s knowledge of the language and habits with it. The occasion—an interview, say—may limit the language further. A highly artificial way of writing can unlock something otherwise unavailable. We may experience a melancholy in this, that we are not in total control of our words. But writing under deliberate constraint can also be, paradoxically, a liberation. A highly artificial way of writing can unlock, as just now, something otherwise unavailable. To take an example from let me tell you, Ophelia lacks the word “mother”—not so strange in the play, where such a character doesn’t appear and is never referred to, but bound to be odd in an autobiography. Why would she never use that word? There would have to be a reason, and that reason would have to be expressed. Hence a whole chapter, on a mother-daughter relationship I could never have imagined without the constraint. SE: Let’s drill into this question of how writing your book “encouraged a view of language as made first of words, then of meanings.” This sounds as though you initially had an idea of there being a bunch of Platonic “meanings” sitting around, readymade, and that words were then created to be attached to those meanings; then, as you wrote the book, somehow this view changed to something like the opposite. And I wonder, first of all, did I describe that correctly? And secondly, did you begin to see “meanings” as something much more amorphous, unstable, illogical, than as these readymade things just waiting for words to become attached to them? PG: Here’s how I think it worked. When I started, I found it very difficult to make a single well-formed sentence with the Ophelian vocabulary. It might take me an hour. There was no question of having in mind some meaning that I wanted to find words for, just of coming up with a sentence of some length that would be syntactically correct and—this is where the matter of meaning began to come in—make sense. Here’s an example from an early draft—a sentence that did not find its way into the eventual let me tell you: “He is at table, with his right hand on his knees.” Now that I look at this again, I notice its formality (“at table”) and the questions it invites. Who is this? Is he alone? How does the narrator know what he or she reports? Presumably this “he” is eating just with his left hand, but why? Is he left-handed? Is his right hand hurt? Is this some sort of game or test? I could have used these implicit questions to continue the narrative, perhaps with: “I could see him from where I was, right by the door, from which he was turned away. I could see he had the wish to say something, but he did not speak. There was another in there with him, a lady I could not see. There was something he had to tell this lady—that was it—something he had to say, but could not.” As in the later stages of writing let me tell you, I find this forming itself bit by bit, without my being aware of where it’s going. No doubt what I’m about to say is too much influenced by the computer model, but it’s as if my brain were trying out all kinds of combinations with the available words, checking the results against what might plausibly follow. You might say that let me tell you is a special case, coming out of an engagement with a body of rules. In writing as I am now—“normally,” as one might put it, or “freely”—there are no such rules and no such engagement. However, as I’ve already indicated, I think this is a delusion. Though the rules and constraints have become invisible, because we’re so practiced in using them, they’re there, and their presence makes any piece of writing a collaboration between writer and language. This is just more obvious in such a thing as let me tell you. Ophelia in that book is aware of all this, and yet she finds ways—not against the constraint but precisely because of its power to, as I said, unlock the unavailable—to speak of her early childhood, of her very different feelings for her two parents, of how she understands and does not understand Hamlet, and of an overwhelming experience of love. SE: Now that we’re talking about things like the form of the interview, sentence structure, etc., being sorts of constraints in themselves, we’re in a territory that I often find myself pondering when I talk about constrained works: namely, there are constraints and there are constraints. For instance, we might look at the act of translation an a sort of exercise in constrained writing: create a text that is this mirror image of another text in a different language. But to me such a constraint feels very different than the sort of constraint you’ve employed to create let me tell you. Do you draw any lines to differentiate between various kinds of constraints? PG: I would regard making a translation as a task, not a constraint. There are lots of tasks that writing can accomplish—a letter to your grandmother, a book review, a report to your insurance company—and they’re not constraints. A task is a limitation, of course, but it limits the subject matter or the voice, whereas a constraint places limits on form or genesis. A task entails all kinds of subsidiary tasks that will vary through the length of the piece, while a constraint is unitary or uniform, and applies in the same way right through. A task depends for its nature on some relation to the outside world—a person being addressed, or, in the case of a translation, a text being addressed—but not so a constraint. Also, one can fulfill only one task at a time, or maybe two (a book report for your grandmother), but it’s perfectly possible to abide by several constraints. Here’s an example: “I was in no mind to lay me down with him as if it had still been as it was. How could I, now that Rich had had his say? In light of that, what could I tell him if he should come that night? I was lost in this.” That’s five constraints (other than those that are with us always), but only one task, which was to write a few sentences exemplifying five simultaneous constraints: using the Ophelian vocabulary, restricted to words of one syllable, excluding articles, and having sentences all different in length, their numbers of words divisible by five. Participating in an interview is a task, not a constraint. The more general phenomena I was trying to describe—how phrases and sentences will seem to form themselves without the writer’s volition, how a text is partly directed by the language in which it is written—come out of constraints that are not chosen and not dependent on circumstances but everywhere and unavoidable. I’m reminded of something Milton Babbitt said, something like: “Everyone works according to rules; I would just prefer to know what mine are.” The tendency is to regard writing under constraint as a specialist area of writing, like sci-fi or sonnets, but I would see it rather as making overt what in other writing we don’t notice. SE: Do you feel there is a lower limit to how many words a vocabulary can have and still be viable for a novel? PG: I reckon Ophelia’s vocabulary must be close to the limit of what is usable over the long haul. Of her 483 words, 30 or so are archaisms and many others rather rare (“unbraced,” “beauteous”). There are also near-duplications (“heaven,” “heavens,” “heavenly”) and, conversely, regions where an important element is missing, as with “mother,” or the verb “to be,” where Ophelia has virtually everything except—a significant lack in a first-person narrative—“am.” Perhaps this is Ophelia inventing the telephone. These clumps and gaps limit the versatility of the language, though in the same measure they give it character—and the gaps, in particular, are hugely useful in forcing solutions beyond the obvious and immediate. Ophelia cannot say, for example, “I am here,” but she could say: “See me here with you,” or “Think of me as being here.” Such locutions color the situation and suggest narrative possibilities. “Think of me as being here, but not right here with you. Think of it like this: that there could be another here, the two not touching. My words come to you from that other here. You cannot see me, where you are; you will never see me. But I may speak to you, and you may speak to me. We may speak to each other, on and on, but never see one another, each of us in our own here.” Perhaps this is Ophelia inventing the telephone. Your question further prompted me to compare Ophelia’s vocabulary with that of Basic English. Of the 850 words in the latter’s “core vocabulary,” she has 166 by my count, i.e. 20 percent. And her slice of the pie is in practice even smaller than that, for Basic English counts all forms of a verb as representatives of one word, so that “come” in Basic English includes “comes,” “coming,” and “came,” whereas Ophelia has only “come” and “comes.” The same goes for singular and plural forms of nouns. Ophelia normally has one or the other, and the rare cases where she has both are not always the luckiest: “watchman/watchmen,” for example. So she’s working with maybe 10 percent or less of the vocabulary that would be available to her in Basic English, plus about 300 words of limited usefulness. I’d expect this is near to the limit for writing a narrative of close on 40,000 words. I hope it is, as my intention was to work at that point where continuous prose is still just feasible, where there could be, at the same time, versatility and strain. To tighten the screws further, one might have to go to, say, Guildenstern, who has just 175 words and probably no more than five per cent of Basic English. “Believe me,” says Guildenstern, “and it is some affliction to find you do not have the word for the very thing you would most love to say!” And he’s right. I would not like to have to write a whole novel in Guildensternian, quite apart from the fact that he isn’t, poor chap, such a fascinating character. SE: At times in the book you add another constraint on top of the Ophelian vocabulary, as, for instance, when you compose the sonnets that are found in let me tell you. Is this something you have been interested in experimenting with, adding localized constraints into more generally constrained works? PG: There are several subsidiary constraints in let me tell you. Some of them are quite evident and, as I see it, integral to the experience of reading the book. But other sub-constraints are less apparent. For example, I took it that the primary constraint, that Ophelia is limited to the words she has in the play, would be global, i.e. that all the characters would be similarly constrained to their vocabularies in the play—except that, because they’re expressing themselves within Ophelia’s narrative, they’re constrained to the intersection of their vocabularies with hers, to the words they hold in common. For example, one chapter includes several letters from Polonius, and these use only words that are spoken in the play both by Ophelia and by him. These letters, which she quotes in an entirely positive light, showing how fond she is of her father, allow her unknowingly (though, of course, we know) to anticipate and defuse one of the most disturbing moments in the play, when Polonius reads out to the royal couple a letter to her from Hamlet. Other characters, too, use only the vocabularies they share with Ophelia. This comes to the surface in a dialogue that has Ophelia inviting Gertrude to consider the words she is using, the words they have in common (as women, we might want to add). One reason to work with these further restrictions was to up the ante, make the writing even more difficult and the result, in some cases, more comic, for humor is often another happy product of constrained writing. But also, I felt that Polonius and the rest could not properly be themselves in my book if they used words that were not theirs. And then there were often curious features of these various intersections, as with the Ophelia-Gertrude example. For instance, of the six characters with whom I was concerned—the only ones Ophelia has any possibility of mentioning, namely Hamlet (his is the only name she has, but she can also call him “the young lord”), Claudius (“the king”), Gertrude (“the king’s lady”), Polonius (“my father”), Laertes (“my brother”), and Ophelia herself—only Claudius lacks “god.” 156 of Ophelia’s words, almost one in three, are unique to her among these characters, words that include “morning” and “profound” and “composed” and “key,” as well as the more easily predicted “rosemary” and “paconcies.” Words that she shares only with Hamlet are also quite interesting: “answers,” “glass,” “sing,” “yours,” and so on. However, I didn’t feel I could make up words for the most celebrated and surely most enigmatic character in our literature, and so in let me tell you Hamlet never speaks in propria persona, only when he’s taking part in the play scene, which I thought had to be there, as in the source text. Hamlet’s silence then becomes one of the features of the book. And to keep him off center, but of course looming, I had Ophelia use the grypheme of his name only once. Of the approximately 1,170 words Ophelia speaks in the First Folio edition, 480 (41 percent) come in her mad scenes, and nearly all the rest are to do with Hamlet… In let me tell you, she gets the chance to talk about other people. She has, nevertheless, a whole chapter on him, the longest in the book. I should say that, because Ophelia’s vocabulary offers very few possibilities for indicating the passing of time, many of the chapters implicitly show her at different ages by concentrating on her relationship with one other person. With Laertes, she is a young child, and the chapter is punctuated by nursery rhymes that cannot properly be completed. With Hamlet, she is an adolescent, beginning to gain an adult’s understanding. Her preoccupation with Hamlet is, of course, another matter cued by the play. Of the approximately 1,170 words Ophelia speaks in the First Folio edition, 480 (41%) come in her mad scenes, and nearly all the rest are to do with Hamlet, whether she’s talking about him to her brother and her father, placed with him in their single two-handed scene, expressing her concern to the queen, or surviving his teasing in the play scene. In let me tell you, she gets the chance to talk about other people. Some of them are characters I added to the cast, like the mother. The other important ones are the Polonius family’s maid and a wise woman, the Lady Profound. Part of their function is to alert Ophelia to the fate awaiting her—the maid outlines to her the whole action of the play—and the story of the book then becomes one of whether she will or can escape that fate. Despite these additions to the dramatis personaæ, there are very few people in let me tell you, which takes place largely in a world quite separate from the court where Hamlet will unfold. Again created partly by the vocabulary, it is a natural world, close by a mountain (“White his shroud as the mountain snow,” Ophelia sings in the play, “Larded with sweet flowers,” introducing words crucial to how her environment is depicted in let me tell you). Only in the penultimate chapter does Ophelia go to the royal residence–and then, in the final chapter, beyond. You mention the sonnets. Each of these begins with a iambic pentameter from a Shakespeare play, a line that Ophelia could have spoken. An example is “I cannot tell what you and other men,” which is spoken by Cassius to Brutus in Julius Caesar. It just occurs to me that it might be interesting to go through Hamlet with a view to how much of it Ophelia could say—even translate it into Ophelian. Here, for example, is one moment: “To be, and not to be: are these the answers, then?” There are also constraints in let me tell you that have nothing to do with vocabularies. For example, a couple of songs are set to a tune that seems to have been in Shakespeare’s head from time to time, not least when he was writing for the distracted Ophelia. SE: Do you have any information on how many times you use each of Ophelia’s words in let me tell you? PG: I don’t know too much about word frequencies in let me tell you, but some work was done on this by a Shakespeare scholar in England, Peter Kirwan. It was difficult enough to write this book with the vocabulary involved, and with the further restricted vocabularies of the other characters, without adding too many constraints with regard to how often or when particular words could be used. Some, though, there were. They fall into two groups. First, because I’d made it a rule that the book would have to include Ophelia’s vocabulary in toto, I had to find uses for some tricky customers. Limiting the use of archaic words and contractions I don’t regard as a constraint, more a necessity. For instance, if the book were to read as being in something like modern English, I had to limit the appearance of old forms of the second person singular pronoun: “thee,” “thou,” “thy.” Happily, there are parts of England where these forms are still just about current, and so I could give them all to the maid with the implication she came from such a region. Things like “gyved” and “gis” also had to be naturalized in some way. There were words I did manage to measure out—which you can only do if you’re dealing with a fixed vocabulary. There are not many of these—only three—and they are for the reader to discover. One becomes the last word in the book. On the level of form, it’s like the capstone on a pyramid; the pattern is now complete. At the same time, again in a formal sense, it’s a kind of hinge, between what Ophelia has said and what she has not yet said, the point where the book meets the non-book. Being so crucial, it had to be a word, too, that would carry a weight of meaning and implication. SE: How was it that you became intrigued by the possibility of working Oulipian constraint into your work? What were your first experiments with this method? PG: You invite an autobiographical answer, which I am happy to supply, in order to give credit where it is due. We have to go back to my second year in high school, 1959-60, when our English teacher was John A. Hall. One week, our assignment from him was to write a story in which six objects were to appear: a broken television tube, a bubble car (already there is a sense of the period), a green-eyed yellow idol, and I can’t remember what the other three were. I, at the age of 11 or 12, thought this was absurd and even demeaning—a totally arbitrary cramp on our imaginations and a joke he was playing on us. Just the same, I quite enjoyed writing my story, and arriving at the required objects either stealthily or with panache. The big surprise, however, came at the next lesson, when he invited several of us to read out our stories. They were all completely different. More than that, I could recognize, in the story one of my friends had come up with, how it spoke of him, of his personality. That was my first experience of writing under constraint. At almost the same time, a certain literary group was being founded in Paris, but I don’t think I had any awareness of the Oulipo until the later seventies, when I discovered Calvino, and I knew nothing of Perec until the English translation of La Vie mode d’emploi came out, in 1987. Calvino and Perec, and a few others, made it possible to stand up again after being bowled over by Beckett. SE: That’s really quite a surprise, that the first you heard of Perec was when David Bellos’s translation of La Vie mode d’emploi arrived in 1987. I believe that was the first of Perec to be translated into English, and this certainly speaks to how important translation is to the movement of culture across national and linguistic boundaries. Could you share with us some of those translations that were especially meaningful to you during that period? PG: #1: Shakespeare. Not a translation, you may say, but reckonable here because, though my mother took me to my first Shakespeare play when I was eight, and though I read the canon two or three years after that, my introduction was by way of the Lambs’ recounting of many of the plays as tales. #2: Homer. I encountered the Iliad during this same period by way of “Classics Illustrated,” a US series delivering high literature by way of comic books, which I seized on. There they were, at the newsagent’s, in a not at all affluent part of a largely practical city (Birmingham), ready to open the universe to anyone who could come up with sixpence. Because of that introduction, I was primed to move on a little later to E.V. Rieu’s prose translations of both the Homeric epics for Penguin Classics, and through that imprint to so much more. #3: Hugo. Another issue of “Classics Illustrated,” The Hunchback of Notre Dame, powerfully troubled me not so much for the images of the title character as for the offstage presence of something utterly strange to me, something that, of course, I could later identify as sex. This was another kind of introduction, to the category of books that, ringed with fire, you will never read, never be able to open. #4: Beckett. Waiting for Godot at the Oxford Playhouse when I was 18 or 19 made me read everything of his I could find. #5: Borges. He came to Oxford in 1971 to receive an honorary doctorate. I missed seeing him, but I bought a copy of Fictions—and then, again, everything else available. #6: Calvino. Not sure how I found this one. Certainly I was ready at the bookshop when If on a winter’s night… came out, in 1981. And I remember reading Mr. Palomar when it was new, four years later, on the sidewalk on Fifth Avenue, right outside Scribner’s Bookstore (currently a cosmetics outlet). #7: Perec. Unknown to me before the appearance of Life, a User’s Manual, in 1987. Or perhaps, rather, there was a big space in my mind waiting for him to fill. SE: I’d like to ask you about the short note appended to the start of the book, ostensibly the words of Claudius, the King. It is a very curious thing, as Claudius discloses that they all are participants in a “play,” and he acknowledges that “we all have no more than the words that come to us in the play. We go on with these words. We have to.” It reminds me very much of the strange piece appended to the end of Coetzee’s Elizabeth Costello, which similarly breaks down the “fourth wall” as it were and hits similar notes about the trap that is language. Do you see a sort of liberation from the fact that we are in, in a manner, stuck with the words that come to us? PG: I didn’t intend to have such a preface, feeling that, if the thing ever got published, it would be known as “the novel in Ophelian,” just as nobody needs to be told that La Disparition lacks the letter “e.” However, when Harry Mathews said there should be a preface, I thought I had to do it, and so I came up with the idea of giving it to Claudius to voice. He, of course, can name Ophelia. Other characters could, too, but Claudius seemed the right choice, for several reasons. One was simply that he could sign his preface, as “The King”—words that also appear in Ophelian, so that the signature could be a kind of hinge (again) into the main body of the narrative. Thanks for the mention of Coetzee, whom of course I admire immensely. I have a first U.S. edition of Elizabeth Costello (2003), and I think I wrote Claudius’s preface in 2005, so the example may have been at the back of my mind. More to the front was Pagliacci. Yes, Claudius has stepped out from the play, in a move he feels to be highly unusual and dangerous. Ophelia is by no means so obviously aware of being a play character; indeed, the matter of her awareness is central to her course through the book. Breaking down the fourth wall, as you put it—demystifying the experience, inviting the audience to engage with the work as an artifact, letting people in on what you’re doing—is a big part of what I (and I am hardly alone here) feel to be part of the inheritance. Constrained writing offers valuable help in this; the reader stands informed of the project and the means. The liberation, or the stimulus to find things you never could have found any other way, is a whole other side of it, and I’ve already remarked on this. But what I haven’t mentioned is how the constraint validates some very simple things, things that it’s very hard now to express, given the age of our literature, but that, because they come out of the constraint, can be said again, freshly, and can even make their mark again, freshly. It’s an example of how a highly artificial mode of writing can come up with things that seem natural, fundamental, unmediated. SE: When I think of the very best writing, economy comes to mind as a principle factor. I feel like this is a thing a writer often reaches indirectly, arriving there, as it were, with some sort of help, or only after a long period of work and development. let me tell you strikes me as a very, very compressed text. What is the effect of constraint on economy in your literature? PG: Thank you for mentioning the compression of let me tell you. I think this does come from the constraint, in several ways. Most straightforwardly (in a sense), the constraint decelerates the writing process, so that all kinds of thoughts and angles come into consciousness in the course of putting together a single sentence, and perhaps some of that mental activity will be reflected in the sentence. Secondly, the constraint may force a slightly oblique or unusual or otherwise particular turn of phrase, which may, by those qualities, imply more than it states. The very first sentence of let me tell you could be an example: “So: now I come to speak.” This has, I hope, the sense of a forcible beginning (people often use “so” in this way: “So, how did you two get together?”), but also it places an emphasis on the first person singular pronoun and suggests, by the phrase “come to,” a length of time during which this person was not speaking, leaving it open whether her silence was chosen or enforced. Thirdly, the constraint made it exceedingly difficult to write long sentences, such as I often like to do, and that fact alone may give the text compactness. The average sentence length in let me tell you is 12.4 words, whereas in an earlier piece of fiction of mine it’s 30.3 words. Also under this heading, the constraint allowed or encouraged me to write some very short sentences, such as the second in the book: “At last.” Because this amplifies what has just been said, there’s a close interlocking of the two sentences, and that, too, will tend to increase the concentration. On a possibly less elevated plane, I learned a lot about economy through working as a newspaper music critic, constantly between the ages of 24 and 44. SE: I’ve noticed that throughout this interview you have used the terms, Ophelian, Claudian, Gertrudian, etc. Do you to some extent consider these languages, or linguistic systems? Do you continue to write in them? And I’m curious if by now you’re able to compose in Ophelian without having to check your words against any sort of a list, whether you have a feel for what it is and how it sounds, what is possible in it and what is not. PG: Indeed I have come to feel Ophelian to be a language in itself, and not just a small and scattered subset of English. I have had these words in my head for such a long time that I know how they play, where they wish to go, and where they cannot. And you may see, as you take in these words, that they have a way of their own. As just there, I can switch into Ophelian with a lot more facility than at the start, more than 25 years ago—though I do still have to check the wordlist occasionally—and yes, I have gone on writing in this language from time to time, anything from little sketches to another whole novel, similar in length to let me tell you and nearing completion. A constraint opens up a whole field of potential literature. I’d also like to point out the often overlooked aspect of Oulipo contained in the “po” bit: Ouvroir de littérature potentielle. A constraint—and this is another way in which it’s a liberation—opens up a whole field of potential literature. La Disparition instances the novel-without-an-e, creating a type that includes forerunners (Ernest Vincent Wright’s Gadsby and others that might come to light) and successors; on a larger plane, it introduces the lipogrammatic novel more generally, so that a novel without an “a,” or a novel without a “t,” or a novel with neither “a” nor “t,” would all be part of its progeny. Of course, the potential does not have to be realized; it exists even if nobody writes a lipogrammatic novel again. In a similar way, let me tell you instances the novel in Ophelian and, beyond that, the novel in a vocabulary taken from some other character, whether in Hamlet, in Shakespeare, or in another source, or limited in a different way—and not only the novel but any piece of writing. One might imagine a philosophical or theological treatise in Ophelian, or an epic poem, or a book of children’s stories. I would be curious to see what someone else might come up with writing in this language; perhaps another writer would find totally different ways of using it. However, nobody since Perec has, as far as I know, published a lipogrammatic novel, so perhaps in the Ophelian case, too, the potential will remain potential. The following interview appears in Music & Literature No. 7.
def search_iterator(lines, start=0): save_lines = list() iterator_ops = ["--", "++", "+=", "-="] for iterator_op in iterator_ops: for index in range(start, len(lines)): if iterator_op in lines[index]: save_lines.append((index, iterator_op)) return save_lines
Schizophrenia and Violence: Systematic Review and Meta-Analysis Seena Fazel and colleagues investigate the association between schizophrenia and other psychoses and violence and violent offending, and show that the increased risk appears to be partly mediated by substance abuse comorbidity. Introduction In the 1980s, expert opinion suggested that there was no increased risk for violence in individuals with schizophrenia and other psychoses . However, with the publication of large population-based studies over the last two decades, it is now thought that there is a modest association between violence and schizophrenia and other psychoses . This view is not shared by many mental health clinicians or public advocacy groups. For example, a recent joint public education campaign by three leading UK mental health charities contends that the view that people with mental health problems are violent is a myth , and the National Alliance on Mental Illness in the US asserts that acts of violence by the mentally ill are ''exceptional'' . In factsheets, the Schizophrenia and Related Disorders Alliance of America states that people with schizophrenia are no more likely to be violent than their neighbours , and SANE Australia state that people with mental illness who receive treatment are no more violent than others . The issue remains topical because it is thought to have contributed to policy and legal developments for psychiatric patients and the striking increase in the number of secure hospital patients in many Western countries (alongside sex offender legislation) . It also contributes to the stigma associated with mental illness , which is considered to be the most significant obstacle to the development of mental health services . Although there have been a number of studies examining the relationship between the psychoses and violent outcomes, there are wide variations in risk ratios reported with estimates ranging from 7-fold increases in violent offending in schizophrenia compared with general population controls to no association in a highly influential prospective investigation . Previous reviews of the literature have not been quantitative or have not systematically explored the grey literature . In addition, they have included selected samples, such as investigations solely of homicide offenders (who are more likely to have psychoses than other offenders) , and have not explored potential sources of heterogeneity. We report a systematic review of investigations examining the risk of schizophrenia and other psychoses for violent outcomes including homicide. We explored the reasons for variations between the primary studies using metaregression. We aimed to test whether risk estimates differed by gender, diagnosis (schizophrenia versus other psychoses), outcome measure (criminal convictions versus self-report or informant based information), country location (US or Nordic countries versus the rest of the world), study design (case-control versus longitudinal), and study period. In addition, we have conducted a systematic review of studies examining the risk of schizophrenia in homicide offenders. Methods Computerised Medline, Embase, and Psycinfo searches were performed from January 1970 to February 2009 using the terms viol*, crim*, homicide, schiz*, severe mental illness, major mental disorder, psychos*, and psychot*. References were hand searched for other references, including to grey literature, and non-English language publications were translated. In order to supplement the search of grey literature, US National Criminal Abstracts was searched as well as an extensive bibliography on crime and mental disorder prepared for the Public Health Agency of Canada . We contacted authors of published studies for additional information as required. MOOSE guidelines (Meta-analyses of Observational Studies in Epidemiology, http://www.consort-statement.org/ index.aspx?o = 1031) were followed. Our inclusion criteria included case-control studies (including cross-sectional surveys) and cohort studies, which allowed an estimation of the risk of violence in patients with schizophrenia and/or other psychoses compared with a general population comparison group. Reports were excluded if: (i) Data were presented solely on all convictions not broken down for violence . (ii) There was no general population comparison data . Studies that used other psychiatric diagnoses as the comparator group were also excluded . (iii) Data were superseded by subsequent work and inclusion would involve duplication of data . In one of these studies , data were used for the subgroup analysis on whether outcomes were different by diagnosis of cases (schizophrenia versus nonschizophrenic psychoses). In another, data for women were used from the older publication because it was not included in the updated work . (iv) The cases included diagnoses of nonpsychotic illnesses such as personality disorder and major depression . However, we included one study where the proportion of psychoses was 95% . We conducted a separate analysis of homicide only studies. For this analysis, studies were excluded if information on controls was taken from a different country and another time period or no data on controls were provided . For one of the included studies , state population data were specifically gathered from a government agency , and for another , data on homicides were specifically extracted for the purposes of this review. Data Extraction A standardised form was used to extract data, which included information on the study design, geographical location of study, last year of follow-up for violence (''study period''), diagnoses of cases, definition of violence, method of ascertainment of violence, sample size, mean age, adjustment for socio-demographic factors, and, in the cases, numbers with comorbid substance abuse. For those studies with comorbid substance abuse data, we also extracted data on primary and secondary diagnoses of substance abuse in the population controls (and in two comparisons , these were extracted from data based on separate publications ). Where possible, the control group was a population of individuals without any mental disorders. If data were available for both schizophrenia and nonschizophrenic psychoses, the former was used for the primary analyses. For the purposes of analysis, study design was explored as a dichotomous variable (case-control versus longitudinal) where nested case-control study were included as case-control designs, and also all three designs were compared (case-control versus nested case-control versus longitudinal). Longitudinal designs referred to studies where violence was assessed after diagnosis had been established. Study location was analyzed in two ways: Nordic countries versus the rest of the world, and the US versus the rest of the world. The analysis was done in this way because many of the studies were conducted in three Nordic countries (Sweden, Denmark, Finland) because of the availability for research of national registers for health and crime, and the possibility that the gun ownership laws and higher base rates of violence in the US lead to different risk estimates than other countries . Sample size was analyzed as a continuous variable (for the metaregression) and by numbers of cases in three groups (0-99, 100-1,000, and .1,001 cases) for subgroup analysis. Outcomes measures were analyzed as a dichotomous variable: register-based versus self-report and/or informant interview. Study period was assessed by those reports where last year of follow-up was before 1990 and those on or after 1990. Gender was included in the metaregression analysis as a trichotomous (male, mixed, and female studies separately) and dichotomous (male and mixed studies combined versus female) variable. Suitability for inclusion was assessed and data extraction conducted independently by two researchers (SF and GG), and any differences resolved with discussion with the other authors. Data Analyses Meta-analyses of risk of violent outcomes were carried out generating pooled odds ratios (ORs) with 95% confidence intervals (CIs). Heterogeneity among studies was estimated using Cochran's Q (reported with a x 2 -value and p-value) and the I 2 statistic, the latter describing the percentage of variation across studies that is due to heterogeneity rather than chance , with 95% CIs . I 2 , unlike Q, does not inherently depend upon the number of studies considered with values of 25%, 50%, and 75% taken to indicate low, moderate, and high levels of heterogeneity, respectively. We explored the risk associated with substance abuse comorbidity separately by presenting estimates of risk ratios of schizophrenia and related psychoses with comorbidity, and without comorbidity. As others have noted, adjustment by Table S1). doi:10.1371/journal.pmed.1000120.g001 substance abuse is not appropriate as it exists on the causal pathway between schizophrenia (exposure) and outcome (violence) . We calculated adjusted ORs by socio-demographic factors when stratum-specific estimates were given using the Mantel-Haenszel method . We calculated population attributable risk fractions for the studies that reported on number of crimes in the samples investigated. We opted for individual counts of crime rather than number of convicted individuals for this analysis as it has been demonstrated that the number of crimes per conviction is significantly higher in individuals with severe mental illness than other offenders . Hence, using crimes more accurately captures the population impact of violent criminality. For this analysis, the base rate r was defined as the number of separate violent crimes committed per 1,000 in the general population. r 0 was defined as the number of violent crimes per 1,000 individuals who had not been patients with schizophrenia. We then calculated the population-attributable risk as the difference in r2r 0 and the population-attributable risk fraction as population-attributable risk/r. These data were not synthesized because of their heterogeneity. Potential sources of heterogeneity were investigated further by metaregression analysis, subgrouping studies according to their inclusion criteria, and methodological factors. All subgroup analyses involved nonoverlapping data and used random-effects models. For metaregression analyses, male, female, and mixedgender studies were included. All factors were entered individually and in combination to test for possible associations. Analyses were done in STATA statistical software package, version 10 (Statacorp, 2008) using the metan (for random and fixed-effects metaanalysis), metareg (for metaregression), and metabias (for publication bias analysis). Female Studies Six studies provided risk estimates in female samples in 5,002 individuals with schizophrenia and other psychoses ( Figure 1) . The random-effects pooled crude OR was 7.9 (95% CI 4.0-15.4), and the fixed-effects crude OR was 6.6 (5.6-8.0). These estimates were associated with high heterogeneity (I 2 = 86% . Three additional studies that included 256 women with schizophrenia made no material difference to the risk estimates (random-effects pooled OR = 7.7; 4.2-14.1) . These studies were excluded from sensitivity analyses as the base rate of violent was zero in the cases or the controls , and thus led to unstable risk estimates. Heterogeneity We examined possible differences between risk estimates by various characteristics (Table 1). There was no significant difference in the risk estimates by diagnostic criteria of the cases. This result was demonstrated in three different ways. When including all the studies, the OR for risk of violence in individuals with schizophrenia was 5.6 (4.1-7.6) compared with the risk in nonschizophrenia psychoses where the OR was 4.9 (3.6-6.6). When this analysis was limited to those studies that reported both diagnoses, there was no difference (OR = 6.3, 3.9-10.1 in schizophrenia versus OR = 5.2, 3.6-7.4 in nonschizophrenia psychoses) ( Figure 4). When it was limited to men and those that reported both diagnoses, those with schizophrenia had a risk estimate of 4.0 (3.0-5.3), similar to individuals with nonschizophrenia psychoses, where the OR was 4.0 (3.3-4.8). There was no difference in risk estimates depending on type of outcome measure (criminal convictions and arrest data versus selfand informant-report; Figure 5). In the male-only studies, the OR was 4.1 (3.0-5.5) for the register-based outcomes, whereas it was 3.0 (1.6-5.8) for the investigations where self-report and informants were used to determine outcome. These were both associated with substantial heterogeneity (I 2 's of 88% and 70%, respectively). There was only one study where risk estimates on both outcomes were reported . There was no evidence of any difference in risk estimates by region when comparing studies conducted in Nordic reports with those from other countries (Table 1), or when the studies based in the US were compared with the rest of the world. In the male-only studies, the Nordic ones reported an OR of 4.4 (3.5-5.4) compared with the rest of the world where the OR was 3.8 (2.6-5.5). There was no significant difference in risk estimates for the other study characteristics: study period and study size. Nonsignificant differences by study type were found: longitudinal studies reported lower risk estimates (OR = 3.8, 2.6-5.5) but this was based on only four samples ( Figure 6). Furthermore, there was some evidence of publication bias using Egger's test (t = 2.17, p = 0.04) but not with a funnel plot analysis (z = 20.31, p = 0.76). This finding was replicated when we combined the results for gender and used the publication as the unit of measurement (Egger's test, t = 2.75, p = 0.013; funnel plot, z = 20.39, p = 0.70). In the metaregression analysis with all the studies included, none of these study characteristics apart from substance abuse was statistically significant (individually or in a model where all factors were entered into simultaneously). Study type as a dichotomous variable (longitudinal versus case-control) was associated with some heterogeneity on metaregression when all factors were included in the model (b = 21.12, t = 21.57, p = 0.12). When the analysis was restricted to the male and mixed gender studies, the association almost reached statistical significance (b = 21.64, t = 22.44, p = 0.051). Substance Abuse and Violent Crime In men, there were five studies where the risk of violence was reported both in individuals with schizophrenia and other psychoses who have comorbid substance abuse, and in individuals with substance abuse alone . In comparing these risk estimates, there was no apparent difference (Figure 7). We also compared all psychoses studies (irrespective of comorbidity) with those that reported risk of violence in individuals with a diagnosis of substance use disorders (Figure 8). Substance use disorders were associated with higher risk estimates, although the finding was nonsignificant using a random-effects model. Using fixed-effects, the OR in individuals with psychosis was 3.3 (3.0-3.5) compared with 5.5 (5.4-5.6) in substance abuse. Homicide as Outcome We identified five studies that reported on the risk of homicide in individuals with psychosis compared with the general population ( Figure 9) . There were 261 homicides committed by individuals with schizophrenia and other psychoses compared with 2,999 in the comparison group. The risk of homicide in individuals with schizophrenia was 0.3% compared with 0.02% in the general population. The random-effects pooled OR was 19.5 (14.7-25.8), with significant heterogeneity (I 2 = 60%; 0%-85%). Within these studies, we compared these estimates with the two studies that reported on risk of homicide in persons diagnosed with substance abuse. The risk of homicide in individuals with substance abuse was 0.3%, with a random-effects pooled OR of 10.9 (3.4-34.9). Discussion This systematic review of the risk of violence in schizophrenia and other psychoses identified 20 studies including 18,423 individuals with these disorders. There were four main findings. The first was that the risk of violent outcomes was increased in individuals with schizophrenia and other psychoses. The risk estimates, reported as ORs, were all above one indicating an increased risk of violence in those with schizophrenia and other psychoses compared with the general population controls, although the risk estimates varied between one and seven in men, and between four and 29 in women. A second finding was that comorbidity with substance use disorders substantially increased this risk, with increased ORs between three and 25. Although there was considerable variation in this estimate between studies, the pooled estimate was around four times higher compared with individuals without comorbidity. Third, we found no significant differences in risk estimates for a number of study design characteristics for which there has been uncertainty. These included: whether the diagnosis was schizophrenia versus other psychoses, if the outcome measure was register-based arrests and convictions versus self-report, and if the study location was the US or Nordic countries compared with other countries. Finally, the increased risk of violence in schizophrenia and the psychoses comorbid with substance abuse was not different than the risk of violence in individuals with diagnoses of substance use disorders. In other words, schizophrenia and other psychoses did not appear to add any additional risk to that conferred by the substance abuse alone. We found higher risk estimates in the female-only and mixed gender studies compared with the general population, although these estimates were not significantly higher than male-only estimates using random-effects models. The higher risk estimates in women may be a consequence of the lower prevalence of drug and alcohol use in the general female population compared with the general male population, and so violence associated with other causes, including schizophrenia, would be overrepresented in the women . Although other work has demonstrated a closing of the gender gap in rates of violence from patients discharged from psychiatric hospitals , this present systematic review has shown that risk of violence by gender is reversed compared with general population prevalence rates of violence. In addition, we found only five studies that compared risk of homicide in individuals with schizophrenia compared with the general population. Although the heterogeneity was large, the risk estimates were considerably higher than those for all violent outcomes. Although the risk of any individual with schizophrenia committing homicide was very small at 0.3% and similar in magnitude to the risk in individuals with substance abuse (which was also 0.3%), it does indicate a particularly strong association of psychosis and homicide. It may also reflect the better quality of these studies, including better ascertainment of cases. Apart from homicide, risk estimates do not appear to be elevated with the increasing severity of violent offence in individuals with psychosis . There were several potentially important negative findings. In particular, Nordic-based or US-based investigations did not provide different risk estimates than the rest of the world. This finding would argue against the suggestion that the association between mental illness and violent crime is modified by variations in population base rates of violence or the availability of handguns. Lastly, there was no difference in risk estimates produced by studies conducted before and after 1990. Although deinstitutionalization would have occurred at different dates in the included studies, this finding may support the conclusions of two related investigations in the Australian state of Victoria that demonstrated that violent convictions have not increased in recent decades compared to these offences in the general population . Further research is needed to examine this issue. There are a number of limitations to this review. First, caution is warranted in the overall estimates provided in this review as there was significant heterogeneity. The lack of any explanation for this heterogeneity, apart from substance abuse comorbidity and possibly study design, suggests that methodological variations that we were not able to test may have been important. An alternative approach would be individual participant meta-analysis as it would provide some consistency across the potentially mediating characteristics. One notable finding was that, in all but three of the included studies, violence was assessed irrespective of the timing of the diagnosis of schizophrenia (i.e., violence before and after the diagnosis), which would overestimate the effects of the illness. There were three studies that used longitudinal designs (where violence was only included after diagnosis was established) , which provided lower risk estimates. Second, the overall pooled estimates will have overestimated the association because of inadequate adjustment for confounding and the use of a random-effects meta-analysis. A consequence of the latter was that risk estimates were less conservative than using a fixed-effects model, as the smaller studies were weighted more equally in the random-effects meta-analysis . For example, in the men, the pooled OR was 2.9 in the fixed-effects model compared with a random-effects estimate of 4.0. The fixed-effects odd ratio was further reduced to 2.0 when adjustment for socio-demographic factors was included, possible in only four out of 13 male studies. However, the use of random-effects estimates in the subgroup analysis led to more conservative findings because of larger CIs. Another limitation was there were no studies outside of the US, Northern Europe, Israel, Australia, and New Zealand potentially limiting the generalizability of the findings. However, we found no difference by study region (such as the US or Nordic countries compared with other countries), which would suggest that the findings are applicable to Western countries. However, the lack of any studies in low income countries is notable. A number of recommendations for future research arise from this review. Residual and inadequate confounding is likely to have affected the estimates produced by the primary studies because of inadequate measurement of exposures and confounders. For example, some of the studies adjusted for socio-economic status by using the profession of the father , while another used neighbourhood controls . More precise and reliable measures of confounders need to be included in future studies. One promising approach is to compare individuals with schizophrenia with unaffected siblings, and there is a recent study that found that the adjusted OR of violent crime for individuals with schizophrenia compared with their unaffected siblings was 1.8 (95% CI 1.3-1.8). When compared with general population controls matched for year of birth and gender, the adjusted OR was 2.0 (1.8-2.2) . In addition, how substance abuse mediates violent offending needs further study. Whether future work needs to rely on resourceintensive ways of gathering outcome data such as self-report measures or interviewing informants is questioned by this review, although prevalence rates will be higher when such approaches are used. In addition, health services research could further examine the role of different service configurations in reducing violence outcomes in these patients. In particular, the role of continuity of care should be investigated. Research has demonstrated no reduction in the prevalence of violence when intensive case management has been used compared with standard care , but alternative models of service delivery need study. Finally, perhaps the most important research implication is the need for better quality and larger randomized controlled trials for the treatment of substance abuse comorbidity in schizophrenia . A number of implications arise from this review. First, the findings highlight the importance of risk assessment and management for patients with substance abuse comorbidity. In those without substance abuse comorbidity, the risk of violent crime was modestly elevated with ORs ranging from 1 to 5. However, better adjustment for potentially relevant confounders and problems of misclassification (i.e., many of these patients may have undiagnosed and unreported substance abuse) would possibly reduce the observed risk. This effect has been demonstrated in a recent Swedish study where the adjusted OR was minimally raised (at 1.2) in individuals with schizophrenia and no comorbid substance abuse compared with general population controls . The relationship between comorbid substance abuse and violence in schizophrenia may be mediated by personality features and/or social problems, and is unlikely to be a simple additive effect . In support, one study demonstrated that rates of substance abuse have increased markedly in individuals with schizophrenia over 25 y, but rates of violence modestly. The authors concluded that a subgroup of people with schizophrenia at risk of violence have increasingly abused substances . The relationship with medication adherence may also mediate the association with violent outcomes, particularly if it precedes substance abuse on the causal pathway to violence. The data on medication adherence has reported associations with violence in naturalistic studies , but a recent analysis of the Clinical Antipsychotic Trials in Intervention Effectiveness (CATIE) trial data for violent outcomes found no overall association with violence . Further research is necessary to clarify the relationship between substance abuse, medication adherence, and violence. A second implication relates to attempts to redress the stigmatization of patients with schizophrenia and other psychoses that could be reconsidered in light of the findings of the risk of violence in substance use disorders . Our findings suggest that individuals with substance use disorders may be more dangerous than individuals with schizophrenia and other psychoses, and that the psychoses comorbid with substance abuse may confer no additional risk over and above the risk associated with the substance abuse. As substance use disorders are three to four times more common than the psychoses , public health strategies to reduce violence in society could focus on the prevention and treatment of substance abuse at individual, community, and societal levels . In summary, there is a robust body of evidence that demonstrates an association between the psychoses and violence. This association is increased by substance abuse comorbidity and may be stronger in women. However, the increased risk associated with this comorbidity is of a similar magnitude to that in individuals with substance abuse alone. This finding would suggest that violence reduction strategies could consider focusing on the primary and secondary prevention of substance abuse rather than solely target individuals with severe mental illness. Acknowledgments We are grateful to P. Lindquist, J. Modestin, and M. Soyka for further information about their studies; to D. Hoffmann, Information Officer, Federal Office of Criminal Investigation, Weisbaden, Germany, for information on violent arrest rates in Germany by age band; to K. Ebmeier and J. Enayati for translations from German; to R. Yu for assistance with the attributable risk data; and O. Pickering for assistance with accessing studies and translation from Danish. Editors' Summary Background. Schizophrenia is a lifelong, severe psychotic condition. One in 100 people will have at least one episode of schizophrenia during their lifetime. Symptoms include delusions (for example, patients believe that someone is plotting against them) and hallucinations (hearing or seeing things that are not there). In men, schizophrenia usually starts in the late teens or early 20s; women tend to develop schizophrenia a little later. The causes of schizophrenia include genetic predisposition, obstetric complications, illegal drug use (substance abuse), and experiencing traumatic life events. The condition can be treated with a combination of antipsychotic drugs and supportive therapy; hospitalization may be necessary in very serious cases to prevent self harm. Many people with schizophrenia improve sufficiently after treatment to lead satisfying lives although some patients need lifelong support and supervision. Why Was This Study Done? Some people believe that schizophrenia and other psychoses are associated with violence, a perception that is often reinforced by news reports and that contributes to the stigma associated with mental illness. However, mental health advocacy groups and many mental health clinicians argue that it is a myth that people with mental health problems are violent. Several large, population-based studies have examined this disputed relationship. But, although some studies found no increased risk of violence among patients with schizophrenia compared with the general population, others found a marked increase in violent offending in patients with schizophrenia. Here, the researchers try to resolve this variation (''heterogeneity'') in the conclusions reached in different studies by doing a systematic review (a study that uses predefined search criteria to identify all the research on a specific topic) and a meta-analysis (a statistical method for combining the results of several studies) of the literature on associations between violence and schizophrenia and other psychoses. They also explored the relationship between substance abuse and violence. What Did the Researchers Do and Find? By systematically searching bibliographic databases and reference lists, the researchers identified 20 studies that compared the risk of violence in people with schizophrenia and other psychoses and the risk of violence in the general population. They then used a ''random effects model'' (a statistical technique that allows for heterogeneity between studies) to investigate the association between schizophrenia and violence. For men with schizophrenia or other psychoses, the pooled odds ratio (OR) from the relevant studies (which showed moderate heterogeneity) was 4.7, which was reduced to 3.8 once adjustment was made for socio-economic factors. That is, a man with schizophrenia was four to five times as likely to commit a violent act as a man in the general population. For women, the equivalent pooled OR was 8.2 but there was a much greater variation between the ORs in the individual studies than in the studies that involved men. The researchers then used ''meta-regression'' to investigate the heterogeneity between the studies. This analysis suggested that none of the study characteristics examined apart from co-occurring substance abuse could have caused the variation between the studies. Importantly the authors found that risk estimates of violence in people with substance abuse but no psychosis were similar to those in people with substance abuse and psychosis and higher than those in people with psychosis alone. Finally, although people with schizophrenia were nearly 20 times more likely to have committed murder than people in the general population, only one in 300 people with schizophrenia had killed someone, a similar risk to that seen in people with substance abuse. What Do These Findings Mean? These findings indicate that schizophrenia and other psychoses are associated with violence but that the association is strongest in people with substance abuse and most of the excess risk of violence associated with schizophrenia and other psychoses is mediated by substance abuse. However, the increased risk in patients with comorbidity was similar to that in substance abuse without psychosis. A potential implication of this finding is that violence reduction strategies that focus on preventing substance abuse among both the general population and among people with psychoses might be more successful than strategies that solely target people with mental illnesses. However, the quality of the individual studies included in this meta-analysis limits the strength of its conclusions and more research into the association between schizophrenia, substance abuse, and violence would assist in clarifying how and if strategies for violence reduction are changed.
/** * Internal method to create the initial set of properties. There are two * layers of properties: the default layer and the base layer. The latter * contains properties defined in the stylesheet or by the user using this * API. */ private Map<String, String> createStylesheetProperties(Properties outputProperties) { if (outputProperties != null) return OutputPropertiesMapFactory.unmodifiableMapFromProperties(outputProperties); else { final Map<String, String> result = new HashMap<>(); result.put(OutputKeys.ENCODING, _translet._encoding); if (_translet._method != null) { result.put(OutputKeys.METHOD, _translet._method); } return result; } }
import java.util.*; public class Main { public static void main(String[] args) { Scanner in = new Scanner(System.in); String str = in.next(); List<Integer> a = new ArrayList<>(); List<Integer> b = new ArrayList<>(); int i=0; while(i<str.length()){ int tempa = 0; while(i<str.length()){ if(str.charAt(i) == 'a'){ tempa++; i++; } else { break; } } int tempb=0; while(i<str.length()){ if(str.charAt(i) == 'b'){ tempb++; i++; } else { break; } } a.add(tempa); b.add(tempb); } int ans = 0; int l = a.size(); int[] al = new int[l]; int[] ar = new int[l]; int[] bl = new int[l]; int[] br = new int[l]; al[0] = a.get(0); bl[0] = b.get(0); for(i=1; i<l; i++){ al[i] = al[i-1]+a.get(i); bl[i] = bl[i-1]+b.get(i); } ar[l-1] = a.get(l-1); br[l-1] = b.get(l-1); for(i=l-2;i>=0;i--){ ar[i] = ar[i+1]+a.get(i); br[i] = br[i+1]+b.get(i); } //=============== for(i=0;i<l;i++){ for(int j=i;j<l;j++){ // al, bl(j-i), ar int temp = al[i]; if(i==0) temp += bl[j]; else temp += bl[j] - bl[i-1]; if(j+1<l) temp+=ar[j+1]; if(temp>ans){ ans = temp; } // al, br temp = al[i]+br[j]; if(temp>ans){ ans = temp; } } } // bl, ar for(int j=0;j<l;j++){ int temp = bl[j]; if(j+1<l) temp+=ar[j+1]; if(temp>ans){ ans = temp; } } System.out.println(ans); } }
<gh_stars>1-10 import os import sys import greentest import gevent from gevent.fileobject import FileObject, FileObjectThread PYPY = hasattr(sys, 'pypy_version_info') class Test(greentest.TestCase): def _test_del(self, **kwargs): r, w = os.pipe() s = FileObject(w, 'wb') s.write('x') s.flush() if PYPY: s.close() else: del s try: os.close(w) except OSError: pass # expected, because SocketAdapter already closed it else: raise AssertionError('os.close(%r) must not succeed' % w) self.assertEqual(FileObject(r).read(), 'x') def test_del(self): self._test_del() def test_del_close(self): self._test_del(close=True) if FileObject is not FileObjectThread: def test_del_noclose(self): r, w = os.pipe() s = FileObject(w, 'wb', close=False) s.write('x') s.flush() if PYPY: s.close() else: del s os.close(w) self.assertEqual(FileObject(r).read(), 'x') def test_newlines(self): r, w = os.pipe() lines = ['line1\n', 'line2\r', 'line3\r\n', 'line4\r\nline5', '\nline6'] g = gevent.spawn(writer, FileObject(w, 'wb'), lines) try: result = FileObject(r, 'rU').read() self.assertEqual('line1\nline2\nline3\nline4\nline5\nline6', result) finally: g.kill() def writer(fobj, line): for character in line: fobj.write(character) fobj.flush() fobj.close() try: from gevent.fileobject import SocketAdapter except ImportError: pass else: class TestSocketAdapter(greentest.TestCase): def _test_del(self, **kwargs): r, w = os.pipe() s = SocketAdapter(w) s.sendall('x') if PYPY: s.close() else: del s try: os.close(w) except OSError: pass # expected, because SocketAdapter already closed it else: raise AssertionError('os.close(%r) must not succeed' % w) self.assertEqual(FileObject(r).read(), 'x') def test_del(self): self._test_del() def test_del_close(self): self._test_del(close=True) def test_del_noclose(self): r, w = os.pipe() s = SocketAdapter(w, close=False) s.sendall('x') del s os.close(w) self.assertEqual(FileObject(r).read(), 'x') if __name__ == '__main__': greentest.main()
<gh_stars>0 # -*- coding: utf-8 -*- """ Created on Fri Oct 30 14:46:44 2020 @author: <NAME> Run this script to train your model This script will detect faces via your webcam using multithread There should be no delay as a result of getting the faces from the model. Tested with OpenCV """ import torch import joblib # import numpy as np import pandas as pd from utils import features_training as ftrain import facenet_pytorch as facenet from torch.utils.data import DataLoader from torchvision import datasets net = facenet.InceptionResnetV1(pretrained='vggface2').eval() mtcnn = facenet.MTCNN(image_size=260) path = "data/train" def collate_fn(x): return x[0] def unpack_loader(loader, mtcnn, p=False): faces = [] classes = [] probs = [] for x, y in loader: face, prob = mtcnn(x, return_prob=True) if p: print(f'Detected {loader.dataset.dataset.classes[y]:20} face with a probability of {prob:.3f}%') if prob > 0.7: faces.append(face) classes.append(y) probs.append(prob) return (torch.stack(faces), torch.tensor(classes), torch.tensor(probs) ) def plot_features(X,y): pass if __name__=='__main__': df = datasets.ImageFolder(path) train_n = int(len(df) *0.8) test_n = len(df) - train_n train_df, test_df = torch.utils.data.random_split(df, [train_n, test_n]) train_loader = DataLoader(train_df, collate_fn=collate_fn) test_loader = DataLoader(test_df, collate_fn=collate_fn) faces, classes, probs = unpack_loader(train_loader, mtcnn, True) faces_t, classes_t, probs_t = unpack_loader(test_loader, mtcnn) features = net(faces).detach() features_t = net(faces_t).detach() classes_min = torch.bincount(classes).min().item() cv = min(classes_min, 4) search = ftrain.BestModel(cv=cv, params=ftrain.params) search.fit(features, classes) pd.DataFrame(df.classes).to_csv('results/classes.csv',header=['name'], index=False) joblib.dump(search.best_classifier, 'results/model.joblib') 'gcloud beta ai-platform predict --model face --version v1 --json-instances filename.json'
def make_saw_exception(ae: ArgoException) -> SAWException: specific_exception_class = error_code_table.get(ae.code) if specific_exception_class is not None: return specific_exception_class(ae) else: raise ae
/** * * A wrapper class that provides an iterator over the RocksDb map entries. It is compatible with Java's {@link Iterator}. * * Created by Maithem on 1/21/20. */ @NotThreadSafe public class RocksDbEntryIterator<K, V> implements Iterator<Map.Entry<K, V>>, AutoCloseable { /** * A reference to the underlying RocksDb iterator */ final private WrappedRocksIterator wrappedRocksIterator; /** * Serializer to serialize/deserialize the key/values */ final private ISerializer serializer; /** * place holder for the current value */ private Map.Entry<K, V> next; /** * Whether to load the value of a mapping */ private final boolean loadValues; final private ReadOptions readOptions; public RocksDbEntryIterator(RocksDB rocksDB, ISerializer serializer, boolean loadValues) { // Start iterator at the current snapshot readOptions = new ReadOptions(); readOptions.setSnapshot(null); this.wrappedRocksIterator = new WrappedRocksIterator(rocksDB.newIterator(readOptions)); this.serializer = serializer; wrappedRocksIterator.seekToFirst(); this.loadValues = loadValues; } public RocksDbEntryIterator(RocksDB rocksDB, ISerializer serializer) { this(rocksDB, serializer, true); } /** * {@inheritDoc} */ @Override public boolean hasNext() { if (next == null && wrappedRocksIterator.isOpen() && wrappedRocksIterator.isValid()) { // Retrieve entry if it exists and move the iterator K key = (K) serializer.deserialize(Unpooled.wrappedBuffer(wrappedRocksIterator.key()), null); V value = loadValues ? (V) serializer .deserialize(Unpooled.wrappedBuffer(wrappedRocksIterator.value()), null) : null; next = new AbstractMap.SimpleEntry(key, value); wrappedRocksIterator.next(); } if (next == null && wrappedRocksIterator.isOpen()) { // close the iterator if it has fully consumed. wrappedRocksIterator.close(); readOptions.close(); } return next != null; } /** * {@inheritDoc} */ @Override public Map.Entry<K, V> next() { if (hasNext()) { Map.Entry<K, V> res = next; next = null; return res; } else { throw new NoSuchElementException(); } } /** * {@inheritDoc} */ @Override public void close() { // Release the underlying RocksDB resources if (wrappedRocksIterator.isOpen()) { wrappedRocksIterator.close(); readOptions.close(); } } }
/** * Create an {@link A_Lexer} from the given filter and body primitives, and * install it in specified atom's bundle. Add the lexer to the root {@link * A_BundleTree} that is used for parsing module headers. * * @param filterPrimitive * A primitive for filtering the lexer by its first character. * @param bodyPrimitive * A primitive for constructing a tuple of tokens at the current * position. Typically the tuple has zero or one tokens, but more * can be produced to indicate ambiguity within the lexer. * @param atomName * The {@link A_Atom} under which to record the new lexer. */ private static void createPrimitiveLexerForHeaderParsing ( final Primitive filterPrimitive, final Primitive bodyPrimitive, final String atomName) { final A_Function stringLexerFilter = createFunction( newPrimitiveRawFunction(filterPrimitive, nil, 0), emptyTuple()); final A_Function stringLexerBody = createFunction( newPrimitiveRawFunction(bodyPrimitive, nil, 0), emptyTuple()); final A_Atom atom = createSpecialAtom(atomName); final A_Bundle bundle; try { bundle = atom.bundleOrCreate(); } catch (final MalformedMessageException e) { assert false : "Invalid special lexer name: " + atomName; throw new RuntimeException(e); } final A_Method method = bundle.bundleMethod(); final A_Lexer lexer = newLexer( stringLexerFilter, stringLexerBody, method, nil); moduleHeaderLexicalScanner.addLexer(lexer); }
/** * @author katkav * @author semancik */ public class ResourceContentTabPanel extends Panel { private static final long serialVersionUID = 1L; private static final Trace LOGGER = TraceManager.getTrace(ResourceContentTabPanel.class); enum Operation { REMOVE, MODIFY; } private static final String DOT_CLASS = ResourceContentTabPanel.class.getName() + "."; private static final String ID_INTENT = "intent"; private static final String ID_REAL_OBJECT_CLASS = "realObjectClass"; private static final String ID_OBJECT_CLASS = "objectClass"; private static final String ID_MAIN_FORM = "mainForm"; private static final String ID_REPO_SEARCH = "repositorySearch"; private static final String ID_RESOURCE_SEARCH = "resourceSearch"; private static final String ID_TABLE = "table"; private PageBase parentPage; private ShadowKindType kind; private boolean useObjectClass; private boolean isRepoSearch = true; private IModel<ResourceContentSearchDto> resourceContentSearch; public ResourceContentTabPanel(String id, final ShadowKindType kind, final IModel<PrismObject<ResourceType>> model, PageBase parentPage) { super(id, model); this.parentPage = parentPage; this.resourceContentSearch = createContentSearchModel(kind); this.kind = kind; initLayout(model, parentPage); } private IModel<ResourceContentSearchDto> createContentSearchModel(final ShadowKindType kind) { return new LoadableModel<ResourceContentSearchDto>(true) { private static final long serialVersionUID = 1L; @Override protected ResourceContentSearchDto load() { isRepoSearch = !getContentStorage(kind, SessionStorage.KEY_RESOURCE_PAGE_REPOSITORY_CONTENT).getResourceSearch(); return getContentStorage(kind, isRepoSearch ? SessionStorage.KEY_RESOURCE_PAGE_REPOSITORY_CONTENT : SessionStorage.KEY_RESOURCE_PAGE_RESOURCE_CONTENT).getContentSearch(); } }; } private void updateResourceContentSearch() { ResourceContentSearchDto searchDto = resourceContentSearch.getObject(); getContentStorage(kind, isRepoSearch ? SessionStorage.KEY_RESOURCE_PAGE_REPOSITORY_CONTENT : SessionStorage.KEY_RESOURCE_PAGE_RESOURCE_CONTENT).setContentSearch(searchDto); } private ResourceContentStorage getContentStorage(ShadowKindType kind, String searchMode) { return parentPage.getSessionStorage().getResourceContentStorage(kind, searchMode); } private void initLayout(final IModel<PrismObject<ResourceType>> model, final PageBase parentPage) { setOutputMarkupId(true); final Form mainForm = new Form(ID_MAIN_FORM); mainForm.setOutputMarkupId(true); mainForm.addOrReplace(initTable(model)); add(mainForm); AutoCompleteTextPanel<String> intent = new AutoCompleteTextPanel<String>(ID_INTENT, new PropertyModel<String>(resourceContentSearch, "intent"), String.class) { private static final long serialVersionUID = 1L; @Override public Iterator<String> getIterator(String input) { RefinedResourceSchema refinedSchema = null; try { refinedSchema = RefinedResourceSchemaImpl.getRefinedSchema(model.getObject(), parentPage.getPrismContext()); } catch (SchemaException e) { return new ArrayList<String>().iterator(); } return RefinedResourceSchemaImpl.getIntentsForKind(refinedSchema, getKind()).iterator(); } }; intent.getBaseFormComponent().add(new OnChangeAjaxBehavior() { private static final long serialVersionUID = 1L; @Override protected void onUpdate(AjaxRequestTarget target) { target.add(get(ID_REAL_OBJECT_CLASS)); updateResourceContentSearch(); mainForm.addOrReplace(initTable(model)); target.add(mainForm); } }); intent.setOutputMarkupId(true); intent.add(new VisibleEnableBehaviour() { private static final long serialVersionUID = 1L; @Override public boolean isVisible() { return !isUseObjectClass(); } }); add(intent); Label realObjectClassLabel = new Label(ID_REAL_OBJECT_CLASS, new AbstractReadOnlyModel<String>() { private static final long serialVersionUID = 1L; @Override public String getObject() { RefinedObjectClassDefinition ocDef; try { RefinedResourceSchema refinedSchema = RefinedResourceSchemaImpl .getRefinedSchema(model.getObject(), parentPage.getPrismContext()); if (refinedSchema == null) { return "NO SCHEMA DEFINED"; } ocDef = refinedSchema.getRefinedDefinition(getKind(), getIntent()); if (ocDef != null) { return ocDef.getObjectClassDefinition().getTypeName().getLocalPart(); } } catch (SchemaException e) { } return "NOT FOUND"; } }); realObjectClassLabel.setOutputMarkupId(true); add(realObjectClassLabel); AutoCompleteQNamePanel objectClassPanel = new AutoCompleteQNamePanel(ID_OBJECT_CLASS, new PropertyModel<QName>(resourceContentSearch, "objectClass")) { private static final long serialVersionUID = 1L; @Override public Collection<QName> loadChoices() { return createObjectClassChoices(model); } @Override protected void onChange(AjaxRequestTarget target) { LOGGER.trace("Object class panel update: {}", isUseObjectClass()); updateResourceContentSearch(); mainForm.addOrReplace(initTable(model)); target.add(mainForm); } }; objectClassPanel.add(new VisibleEnableBehaviour() { private static final long serialVersionUID = 1L; @Override public boolean isVisible() { return isUseObjectClass(); } }); add(objectClassPanel); AjaxLink<Boolean> repoSearch = new AjaxLink<Boolean>(ID_REPO_SEARCH, new PropertyModel<Boolean>(resourceContentSearch, "resourceSearch")) { private static final long serialVersionUID = 1L; @Override public void onClick(AjaxRequestTarget target) { isRepoSearch = true; getContentStorage(kind, SessionStorage.KEY_RESOURCE_PAGE_REPOSITORY_CONTENT).setResourceSearch(Boolean.FALSE); getContentStorage(kind, SessionStorage.KEY_RESOURCE_PAGE_RESOURCE_CONTENT).setResourceSearch(Boolean.FALSE); resourceContentSearch.getObject().setResourceSearch(Boolean.FALSE); updateResourceContentSearch(); mainForm.addOrReplace(initRepoContent(model)); target.add(getParent().addOrReplace(mainForm)); target.add(this); target.add(getParent().get(ID_RESOURCE_SEARCH) .add(AttributeModifier.replace("class", "btn btn-sm btn-default"))); } @Override protected void onBeforeRender() { super.onBeforeRender(); if (!getModelObject().booleanValue()) add(AttributeModifier.replace("class", "btn btn-sm btn-default active")); } }; add(repoSearch); AjaxLink<Boolean> resourceSearch = new AjaxLink<Boolean>(ID_RESOURCE_SEARCH, new PropertyModel<Boolean>(resourceContentSearch, "resourceSearch")) { private static final long serialVersionUID = 1L; @Override public void onClick(AjaxRequestTarget target) { isRepoSearch = false; getContentStorage(kind, SessionStorage.KEY_RESOURCE_PAGE_REPOSITORY_CONTENT).setResourceSearch(Boolean.TRUE); getContentStorage(kind, SessionStorage.KEY_RESOURCE_PAGE_RESOURCE_CONTENT).setResourceSearch(Boolean.TRUE); updateResourceContentSearch(); resourceContentSearch.getObject().setResourceSearch(Boolean.TRUE); mainForm.addOrReplace(initResourceContent(model)); target.add(getParent().addOrReplace(mainForm)); target.add(this.add(AttributeModifier.append("class", " active"))); target.add(getParent().get(ID_REPO_SEARCH) .add(AttributeModifier.replace("class", "btn btn-sm btn-default"))); } @Override protected void onBeforeRender() { super.onBeforeRender(); getModelObject().booleanValue(); if (getModelObject().booleanValue()) add(AttributeModifier.replace("class", "btn btn-sm btn-default active")); } }; add(resourceSearch); } private List<QName> createObjectClassChoices(IModel<PrismObject<ResourceType>> model) { RefinedResourceSchema refinedSchema; try { refinedSchema = RefinedResourceSchemaImpl.getRefinedSchema(model.getObject(), parentPage.getPrismContext()); } catch (SchemaException e) { warn("Could not determine defined obejct classes for resource"); return new ArrayList<QName>(); } Collection<ObjectClassComplexTypeDefinition> defs = refinedSchema.getObjectClassDefinitions(); List<QName> objectClasses = new ArrayList<QName>(defs.size()); for (ObjectClassComplexTypeDefinition def : defs) { objectClasses.add(def.getTypeName()); } return objectClasses; } private ResourceContentPanel initTable(IModel<PrismObject<ResourceType>> model) { if (isResourceSearch()) { return initResourceContent(model); } else { return initRepoContent(model); } } private ResourceContentResourcePanel initResourceContent(IModel<PrismObject<ResourceType>> model) { String searchMode = isRepoSearch ? SessionStorage.KEY_RESOURCE_PAGE_REPOSITORY_CONTENT : SessionStorage.KEY_RESOURCE_PAGE_RESOURCE_CONTENT; ResourceContentResourcePanel resourceContent = new ResourceContentResourcePanel(ID_TABLE, model, getObjectClass(), getKind(), getIntent(), searchMode, parentPage); resourceContent.setOutputMarkupId(true); return resourceContent; } private ResourceContentRepositoryPanel initRepoContent(IModel<PrismObject<ResourceType>> model) { String searchMode = isRepoSearch ? SessionStorage.KEY_RESOURCE_PAGE_REPOSITORY_CONTENT : SessionStorage.KEY_RESOURCE_PAGE_RESOURCE_CONTENT; ResourceContentRepositoryPanel repositoryContent = new ResourceContentRepositoryPanel(ID_TABLE, model, getObjectClass(), getKind(), getIntent(), searchMode, parentPage); repositoryContent.setOutputMarkupId(true); return repositoryContent; } private ShadowKindType getKind() { return resourceContentSearch.getObject().getKind(); } private String getIntent() { return resourceContentSearch.getObject().getIntent(); } private QName getObjectClass() { return resourceContentSearch.getObject().getObjectClass(); } private boolean isResourceSearch() { Boolean isResourceSearch = resourceContentSearch.getObject().isResourceSearch(); if (isResourceSearch == null) { return false; } return resourceContentSearch.getObject().isResourceSearch(); } private boolean isUseObjectClass() { return resourceContentSearch.getObject().isUseObjectClass(); } }
def emitLoadBytes(self, reg: int, data: bytes = None, typ: VMType = VMType.BYTES) -> None: if data is None: data = [] if len(data) > 0xffff: raise Exception("tried to load too much data") self.emit(Opcode.LOAD) self.appendByte(reg) self.appendByte(typ) self.emitVarInt(len(data)) self.emitBytes(data) return self
Here’s what a pact with Satan looks like: I deny God, Father, Son, and Holy Ghost, Mary and all the Saints, particularly Saint John the Baptist, the Church both Triumphant and Militant, all the sacraments, all the prayers prayed therein. I promise never to do good, to do all the evil I can, and would wish not at all to be a man, but that my nature be changed into a devil the better to serve thee, thou my lord and master Lucifer, and I promise thee that even if I be forced to do some good work, I will not do it in God’s honor, but in scorning him and in thine honor and that of all the devils, and that I ever give myself to thee and pray thee always to keep well the bond that I gave thee.
n=int(input()) l1=list(map(int,input().split())) i1=l1.index(max(l1)) x=min(l1) i2=0 for i in range(n-1,-1,-1): if l1[i]==x: i2=i break ans=i1 if i1>i2: ans-=1 ans+=(n-1)-i2 print(ans)
/** * Created by Administrator on 16-4-1. */ public class NetDataSource implements MyDataSource { @Override public String getStringData(String str_input) { try { Thread.sleep(1000); } catch (InterruptedException e) { e.printStackTrace(); } return str_input+" from internet"; } }
<gh_stars>100-1000 import "jest-styled-components"; import { mount, shallow } from "enzyme"; import React from "react"; import { ProductTile } from "."; import { PRODUCT } from "./fixtures"; describe("<ProductTile />", () => { it("exists", () => { const wrapper = shallow(<ProductTile product={PRODUCT} />); expect(wrapper.exists()).toEqual(true); }); it("has product name", () => { const wrapper = shallow(<ProductTile product={PRODUCT} />); expect(wrapper.text()).toContain(PRODUCT.name); }); it("has price displayed", () => { const wrapper = mount(<ProductTile product={PRODUCT} />); expect(wrapper.text()).toContain( String(PRODUCT.pricing!.priceRange!.start!.gross!.amount) ); }); });
<reponame>alanwei43/node-io-lib<gh_stars>0 import crypto from "crypto"; import { HashCalculateOptions } from "./hashStream"; /** * 计算hash * @param text 明文 * @param opts 默认使用 sha256 算法 * @date 2022-01-16 */ export function hashText(text: string, opts: HashCalculateOptions = {}): string { return crypto.createHash(opts.algorithm || "sha256").update(text).digest(opts.digest || "hex"); }
import { RouterModule, Routes } from '@angular/router'; import { ForumComponent } from './forum/forum.component'; import { LocaleEnComponent } from './locale/locale-en.component'; import { LocaleSvComponent } from './locale/locale-sv.component'; import { LocaleFiComponent } from './locale/locale-fi.component'; import { LocalizeGuard } from './locale/localize.guard'; import { NotFoundComponent } from './shared/not-found/not-found.component'; import { LocalizeInGuard } from './locale/localize-in.guard'; import { CheckLoginGuard } from './shared/guards/check-login.guard'; import { NgModule } from '@angular/core'; import { QuicklinkStrategy } from 'ngx-quicklink'; import { Global } from '../environments/global'; const baseRoutes: Routes = [ {path: '', pathMatch: 'full', loadChildren: () => import('./+home/home.module').then(m => m.HomeModule)}, {path: 'news', loadChildren: () => import('./+news/news.module').then(m => m.NewsModule), data: {title: 'news.title', preload: false}}, {path: 'about', loadChildren: () => import('./+information/information.module').then(m => m.InformationModule), data: {preload: false}}, {path: 'user', loadChildren: () => import('./+user/user.module').then(m => m.UserModule), data: {preload: false}}, {path: 'view', loadChildren: () => import('./+viewer/viewer.module').then(m => m.ViewerModule), data: {title: 'viewer.document', preload: false}}, {path: 'vihko', loadChildren: () => import('./+haseka/haseka.module').then(m => m.HasekaModule), data: {title: 'haseka.title', preload: false}}, {path: 'observation', loadChildren: () => import('./+observation/observation.module').then(m => m.ObservationModule), data: { title: 'navigation.observation' }}, {path: 'taxon', loadChildren: () => import('./+taxonomy/taxonomy.module').then(m => m.TaxonomyModule), data: { title: 'navigation.taxonomy' }}, {path: 'kartta', loadChildren: () => import('./+map/map.module').then(m => m.MapModule), data: {preload: false, canonical: '/map'}}, { path: 'map', loadChildren: () => import('./+map/map.module').then(m => m.MapModule), data: {title: 'navigation.map', displayFeedback: false, preload: false } }, {path: 'error/404', pathMatch: 'full', component: NotFoundComponent}, {path: 'theme', loadChildren: () => import('./+theme/theme.module').then(m => m.ThemeModule), data: {preload: false}}, // {path: 'admin', loadChildren: './admin/admin.module#AdminModule'}, // {path: 'shell', component: ForumComponent}, {path: 'forum', component: ForumComponent}, {path: 'ui-components', loadChildren: () => import('./+ui-components/ui-components.module').then(m => m.UiComponentsModule), data: {preload: false}}, {path: 'save-observations', loadChildren: () => import('./+save-observations/save-observations.module').then(m => m.SaveObservationsModule)}, {path: 'project', loadChildren: () => import('./+project-form/project-form.module').then(m => m.ProjectFormModule)} ]; const rootRouting = { 'talvilintu': '/project/MHL.3', 'ykj': '/theme/ykj', 'emk': '/theme/emk', 'profile': '/user', }; Object.keys(Global.oldThemeRouting).forEach(path => { rootRouting[path] = `/project/${Global.oldThemeRouting[path]}`; }); const redirectsEn: Routes = []; const redirectsSv: Routes = []; const redirectsFi: Routes = []; redirectsEn.push(...Object.keys(rootRouting).map(path => ({path, redirectTo: `/en${rootRouting[path]}`, pathMatch: 'full'}))); redirectsSv.push(...Object.keys(rootRouting).map(path => ({path, redirectTo: `/sv${rootRouting[path]}`, pathMatch: 'full'}))); redirectsFi.push(...Object.keys(rootRouting).map(path => ({path, redirectTo: `${rootRouting[path]}`, pathMatch: 'full'}))); const routesWithLang: Routes = [ {path: 'in', children: [ {path: '**', component: NotFoundComponent} ], component: LocaleEnComponent, canActivate: [LocalizeInGuard]}, {path: 'en', data: {lang: 'en'}, children: [ ...redirectsEn, {path: 'theme/emk', redirectTo: '/en/about/3061', pathMatch: 'full'}, ...baseRoutes, {path: '**', component: NotFoundComponent} ], component: LocaleEnComponent, canActivate: [LocalizeGuard]}, {path: 'sv', data: {lang: 'sv'}, children: [ ...redirectsFi, {path: 'theme/emk', redirectTo: '/sv/about/5719', pathMatch: 'full'}, ...baseRoutes, {path: '**', component: NotFoundComponent} ], component: LocaleSvComponent, canActivate: [LocalizeGuard]}, {path: '', data: {lang: 'fi'}, children: [ ...redirectsFi, {path: 'lajiluettelo', redirectTo: '/theme/checklist', pathMatch: 'full'}, {path: 'artlistan', redirectTo: '/sv/theme/checklist', pathMatch: 'full'}, {path: 'checklist', redirectTo: '/en/theme/checklist', pathMatch: 'full'}, {path: 'pinkka', redirectTo: '/theme/pinkka', pathMatch: 'full'}, {path: 'julkaisut', redirectTo: '/theme/publications', pathMatch: 'full'}, {path: 'bibliografi', redirectTo: '/sv/theme/publications', pathMatch: 'full'}, {path: 'publications', redirectTo: '/en/theme/publications', pathMatch: 'full'}, {path: 'hyonteisopas', redirectTo: '/theme/hyonteisopas', pathMatch: 'full'}, {path: 'laadunvalvonta', redirectTo: '/about/772', pathMatch: 'full'}, {path: 'sensitiiviset', redirectTo: '/about/709', pathMatch: 'full'}, {path: 'mobiilivihko', redirectTo: '/about/4981', pathMatch: 'full'}, {path: 'ilmoita', redirectTo: '/save-observations', pathMatch: 'full'}, {path: 'selaa', redirectTo: '/observation/list', pathMatch: 'full'}, {path: 'theme/emk', redirectTo: '/about/5719', pathMatch: 'full'}, ...baseRoutes, {path: '**', component: NotFoundComponent} ], component: LocaleFiComponent, canActivate: [LocalizeGuard]} ]; export const routes: Routes = [ {path: '', children: routesWithLang, canActivate: [CheckLoginGuard]} ]; @NgModule({ imports: [RouterModule.forRoot(routes, { enableTracing: false, preloadingStrategy: QuicklinkStrategy, initialNavigation: 'enabled', relativeLinkResolution: 'legacy' })], exports: [RouterModule], declarations: [] }) export class AppRoutingModule { }
import NavHeader from './nav-header' import NavMenu from './nav-menu' export { NavHeader, NavMenu }
/** * The second Fragment of the OrganizerActivity. * It hosts the current events of the organizer in a RecyclerView. */ public class OrganizerEventsFragment extends Fragment { private String organizerId; private RecyclerView mRecyclerView; private EventsAdapter adapter; public OrganizerEventsFragment() { // Required empty public constructor } /** * Use this factory method to create a new instance of * this fragment using the provided parameters. * * @return A new instance of fragment OrganizerInfoFragment. */ public static OrganizerEventsFragment newInstance(String organizerId) { OrganizerEventsFragment fragment = new OrganizerEventsFragment(); Bundle bundle = new Bundle(); bundle.putString("organizerId", organizerId); fragment.setArguments(bundle); return fragment; } /** * Sets all attributes from arguments. * * @param savedInstanceState */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); if (getArguments() != null) { organizerId = getArguments().getString("organizerId"); } } /** * Creates the View of the fragment. Similar to onCreate of Activities. * * @param inflater * @param container * @param savedInstanceState * @return */ @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // Inflate the layout for this fragment final View v = inflater.inflate(R.layout.fragment_organizer_events, container, false); // RecyclerView mRecyclerView = v.findViewById(R.id.frEventRv); mRecyclerView.setHasFixedSize(true); mRecyclerView.setLayoutManager(new LinearLayoutManager(v.getContext())); showData(new eventsAndOrganizerNamesCallback() { @Override public void onCallback(List<Event> events, Map<Event, String> eventsAndOrganizerNames, List<BlockedOrganizer> blockedOrganizers) { // Remove passed events events = EventHelper.getEventsWithoutPassedOnes(events); // Remove events of blocked organizers events = EventHelper.getEventsWithoutBlockedOnes(events, blockedOrganizers); adapter = new EventsAdapter(getActivity(), events, eventsAndOrganizerNames, v.getContext(), R.layout.row_events_organizer_events); mRecyclerView.setAdapter(adapter); } }); return v; } private void showData(final eventsAndOrganizerNamesCallback dbCallback) { EventsRepository eventsRepository = EventsRepository.getInstance(); final OrganizerRepository organizerRepository = OrganizerRepository.getInstance(); // show all events, even if the organizer is blocked final List<BlockedOrganizer> blockedOrganizers = new ArrayList<>(); // Get events of the specific organizer eventsRepository.getEventsByOrganizerId(organizerId, EVENTS_AMOUNT).addOnCompleteListener(new OnCompleteListener<List<Event>>() { @Override public void onComplete(@NonNull Task<List<Event>> task) { if (task.getResult() != null) { final List<Event> events = task.getResult(); final Map<Event, String> eventsAndOrganizerNames = new LinkedHashMap<>(); for (int a = 0; a < events.size(); a++) { final Event event = events.get(a); final int b = a; // Get names of organizers organizerRepository.getOrganizerById(event.getOrganizer()) .addOnCompleteListener(new OnCompleteListener<Organizer>() { @Override public void onComplete(@NonNull Task<Organizer> task) { if (task.isSuccessful()) { eventsAndOrganizerNames.put(event, task.getResult().getName()); if (b == (events.size()-1)) { dbCallback.onCallback(events, eventsAndOrganizerNames, blockedOrganizers); } } } }).addOnFailureListener(new OnFailureListener() { @Override public void onFailure(@NonNull Exception e) { Toast.makeText(getActivity(), e.getMessage(), Toast.LENGTH_SHORT).show(); } }); } } } }).addOnFailureListener(new OnFailureListener() { @Override public void onFailure(@NonNull Exception e) { Toast.makeText(getActivity(), e.getMessage(), Toast.LENGTH_SHORT).show(); } }); } }
main = getLine >>= putStrLn . solve . map read . words solve [0,1] = "Yes" solve [y,x] | x > 1 && (y - x + 1) >= 0 && even (y - x + 1) = "Yes" | otherwise = "No"
<gh_stars>1-10 // // Este archivo ha sido generado por la arquitectura JavaTM para la implantación de la referencia de enlace (JAXB) XML v2.2.11 // Visite <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Todas las modificaciones realizadas en este archivo se perderán si se vuelve a compilar el esquema de origen. // Generado el: 2016.07.20 a las 12:44:20 PM COT // package org.openfact.models.ubl; import javax.xml.bind.annotation.*; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import java.math.BigDecimal; /** * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;ccts:UniqueID xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns:clm54217="urn:un:unece:uncefact:codelist:specification:54217:2001" xmlns:clm5639="urn:un:unece:uncefact:codelist:specification:5639:1988" xmlns:clm66411="urn:un:unece:uncefact:codelist:specification:66411:2001" xmlns:clmIANAMIMEMediaType="urn:un:unece:uncefact:codelist:specification:IANAMIMEMediaType:2003" xmlns:udt="urn:un:unece:uncefact:data:specification:UnqualifiedDataTypesSchemaModule:2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"&gt;UDT0000013&lt;/ccts:UniqueID&gt; * </pre> * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;ccts:CategoryCode xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns:clm54217="urn:un:unece:uncefact:codelist:specification:54217:2001" xmlns:clm5639="urn:un:unece:uncefact:codelist:specification:5639:1988" xmlns:clm66411="urn:un:unece:uncefact:codelist:specification:66411:2001" xmlns:clmIANAMIMEMediaType="urn:un:unece:uncefact:codelist:specification:IANAMIMEMediaType:2003" xmlns:udt="urn:un:unece:uncefact:data:specification:UnqualifiedDataTypesSchemaModule:2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"&gt;UDT&lt;/ccts:CategoryCode&gt; * </pre> * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;ccts:DictionaryEntryName xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns:clm54217="urn:un:unece:uncefact:codelist:specification:54217:2001" xmlns:clm5639="urn:un:unece:uncefact:codelist:specification:5639:1988" xmlns:clm66411="urn:un:unece:uncefact:codelist:specification:66411:2001" xmlns:clmIANAMIMEMediaType="urn:un:unece:uncefact:codelist:specification:IANAMIMEMediaType:2003" xmlns:udt="urn:un:unece:uncefact:data:specification:UnqualifiedDataTypesSchemaModule:2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"&gt;Measure. Type&lt;/ccts:DictionaryEntryName&gt; * </pre> * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;ccts:VersionID xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns:clm54217="urn:un:unece:uncefact:codelist:specification:54217:2001" xmlns:clm5639="urn:un:unece:uncefact:codelist:specification:5639:1988" xmlns:clm66411="urn:un:unece:uncefact:codelist:specification:66411:2001" xmlns:clmIANAMIMEMediaType="urn:un:unece:uncefact:codelist:specification:IANAMIMEMediaType:2003" xmlns:udt="urn:un:unece:uncefact:data:specification:UnqualifiedDataTypesSchemaModule:2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"&gt;1.0&lt;/ccts:VersionID&gt; * </pre> * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;ccts:Definition xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns:clm54217="urn:un:unece:uncefact:codelist:specification:54217:2001" xmlns:clm5639="urn:un:unece:uncefact:codelist:specification:5639:1988" xmlns:clm66411="urn:un:unece:uncefact:codelist:specification:66411:2001" xmlns:clmIANAMIMEMediaType="urn:un:unece:uncefact:codelist:specification:IANAMIMEMediaType:2003" xmlns:udt="urn:un:unece:uncefact:data:specification:UnqualifiedDataTypesSchemaModule:2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"&gt;A numeric value determined by measuring an object along with the specified unit of measure.&lt;/ccts:Definition&gt; * </pre> * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;ccts:RepresentationTermName xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns:clm54217="urn:un:unece:uncefact:codelist:specification:54217:2001" xmlns:clm5639="urn:un:unece:uncefact:codelist:specification:5639:1988" xmlns:clm66411="urn:un:unece:uncefact:codelist:specification:66411:2001" xmlns:clmIANAMIMEMediaType="urn:un:unece:uncefact:codelist:specification:IANAMIMEMediaType:2003" xmlns:udt="urn:un:unece:uncefact:data:specification:UnqualifiedDataTypesSchemaModule:2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"&gt;Measure&lt;/ccts:RepresentationTermName&gt; * </pre> * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;ccts:PropertyTermName xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns:clm54217="urn:un:unece:uncefact:codelist:specification:54217:2001" xmlns:clm5639="urn:un:unece:uncefact:codelist:specification:5639:1988" xmlns:clm66411="urn:un:unece:uncefact:codelist:specification:66411:2001" xmlns:clmIANAMIMEMediaType="urn:un:unece:uncefact:codelist:specification:IANAMIMEMediaType:2003" xmlns:udt="urn:un:unece:uncefact:data:specification:UnqualifiedDataTypesSchemaModule:2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"&gt;Type&lt;/ccts:PropertyTermName&gt; * </pre> * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;ccts:PrimitiveType xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns:clm54217="urn:un:unece:uncefact:codelist:specification:54217:2001" xmlns:clm5639="urn:un:unece:uncefact:codelist:specification:5639:1988" xmlns:clm66411="urn:un:unece:uncefact:codelist:specification:66411:2001" xmlns:clmIANAMIMEMediaType="urn:un:unece:uncefact:codelist:specification:IANAMIMEMediaType:2003" xmlns:udt="urn:un:unece:uncefact:data:specification:UnqualifiedDataTypesSchemaModule:2" xmlns:xsd="http://www.w3.org/2001/XMLSchema"&gt;decimal&lt;/ccts:PrimitiveType&gt; * </pre> * * <pre> * &lt;?xml version="1.0" encoding="UTF-8"?&gt;&lt;xsd:BuiltinType xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:ccts="urn:un:unece:uncefact:documentation:2" xmlns:clm54217="urn:un:unece:uncefact:codelist:specification:54217:2001" xmlns:clm5639="urn:un:unece:uncefact:codelist:specification:5639:1988" xmlns:clm66411="urn:un:unece:uncefact:codelist:specification:66411:2001" xmlns:clmIANAMIMEMediaType="urn:un:unece:uncefact:codelist:specification:IANAMIMEMediaType:2003" xmlns:udt="urn:un:unece:uncefact:data:specification:UnqualifiedDataTypesSchemaModule:2"&gt;decimal&lt;/xsd:BuiltinType&gt; * </pre> * * * <p>Clase Java para MeasureType complex type. * * <p>El siguiente fragmento de esquema especifica el contenido que se espera que haya en esta clase. * * <pre> * &lt;complexType name="MeasureType"&gt; * &lt;simpleContent&gt; * &lt;extension base="&lt;http://www.w3.org/2001/XMLSchema&gt;decimal"&gt; * &lt;attribute name="unitCode" use="required" type="{urn:un:unece:uncefact:codelist:specification:66411:2001}UnitCodeContentType" /&gt; * &lt;/extension&gt; * &lt;/simpleContent&gt; * &lt;/complexType&gt; * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "MeasureType", namespace = "urn:un:unece:uncefact:data:specification:UnqualifiedDataTypesSchemaModule:2", propOrder = { "value" }) @XmlSeeAlso({ BaseUnitMeasureType.class, ChargeableWeightMeasureType.class, DegreesMeasureType.class, DurationMeasureType.class, GrossVolumeMeasureType.class, GrossWeightMeasureType.class, LatitudeDegreesMeasureType.class, LatitudeMinutesMeasureType.class, LeadTimeMeasureType.class, LengthMeasureType.class, LoadingLengthMeasureType.class, LongitudeDegreesMeasureType.class, LongitudeMinutesMeasureType.class, MaximumMeasureType.class, MeasureTypeCommBas.class, MinimumMeasureType.class, MinutesMeasureType.class, NetNetWeightMeasureType.class, NetVolumeMeasureType.class, NetWeightMeasureType.class, VolumeMeasureType.class, WeightMeasureType.class }) public class MeasureType { @XmlValue protected BigDecimal value; @XmlAttribute(name = "unitCode", required = true) @XmlJavaTypeAdapter(CollapsedStringAdapter.class) protected String unitCode; /** * Obtiene el valor de la propiedad value. * * @return * possible object is * {@link BigDecimal } * */ public BigDecimal getValue() { return value; } /** * Define el valor de la propiedad value. * * @param value * allowed object is * {@link BigDecimal } * */ public void setValue(BigDecimal value) { this.value = value; } /** * Obtiene el valor de la propiedad unitCode. * * @return * possible object is * {@link String } * */ public String getUnitCode() { return unitCode; } /** * Define el valor de la propiedad unitCode. * * @param value * allowed object is * {@link String } * */ public void setUnitCode(String value) { this.unitCode = value; } }
#include <iostream> #include <algorithm> #include <cstdio> #include <utility> using namespace std; typedef pair<int,int> pii; int t; int n; pii array[1002]; bool cmp ( pii p1, pii p2) { if (p1.first == p2.first) { return p1.second < p2.second; } return p1.first < p2.first; } int main() { cin >> t; while(t--) { cin >> n; for(int i=1;i<=n;++i) { scanf("%d %d", &array[i].first, &array[i].second); } sort(array+1, array+n+1, cmp); bool flag = false; for(int i=1;i<n;++i) { if (array[i].second > array[i+1].second) { flag = true; break; } } if (flag) { cout << "NO" << endl; continue; } cout << "YES" << endl; for(int i=0;i<n;++i) { int addr = array[i+1].first - array[i].first; int addu = array[i+1].second - array[i].second; for(int j=0;j<addr;++j) { cout << "R"; } for(int j=0;j<addu;++j) { cout << "U"; } } cout << endl; } }
// +build linux /* * Copyright (c) 2020 wellwell.work, LLC by Zoe * * Licensed under the Apache License 2.0 (the "License"); * You may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package automaxprocs import ( "log" "os" "runtime" "strconv" "go.zoe.im/x" "go.zoe.im/x/cgroup" ) const _maxProcsKey = "GOMAXPROCS" var ( // keep the prev count prevCount = 0 ) // Unset unset the max procs func Unset() { runtime.GOMAXPROCS(prevCount) } func init() { // first load the prev count at first prevCount = runtime.GOMAXPROCS(0) maxProc := 0 // if with some value from env if max, ok := os.LookupEnv(_maxProcsKey); ok { maxProc, _ = strconv.Atoi(max) log.Printf("auto set max proc from env: %v", maxProc) } else { maxProc, _ = x.V(maxProc).Unwrap(quotaToProcs(1)).Int() log.Printf("auto set max proc from cgroup: %v", maxProc) } // and set to runtime runtime.GOMAXPROCS(maxProc) } // quotaToProcs load max cpu from cgroup func quotaToProcs(min int) (int, error) { // TODO: load from cgroup cgs, err := cgroup.NewCGroupsForSelf() if err != nil { return min, err } v, ok, err := cgs.CPUQuota() if !ok { return min, err } xv, _ := x.V(int(v)).If(int(v) > min).Or(min).Int() return xv, nil }
/** * This method overrides the method in the ChatIF interface. It * displays a message onto the screen. It also deals with the disconnect * command from the server. * * @param message The string to be displayed. */ public void display(String message) { if (message.equals("#quit")) { client.logOff(); } else { System.out.println(message); } }
def trace(t, u): n = t.size z = np.ones(n, dtype=np.complex128) for i in range(n - 1, 0, -1): dt = t[i] - t[i - 1] du = u[i] - u[i - 1] z[i:] = vslit_zip(z[i:], dt, du) return z
x,_,w=input().split() if 'w'in w:print(52+(x in "56")) else:print(7 if x=="31" else 11 if x=="30" else 12)
def cmd_remove_path(value: List[str]) -> str: return "\n".join(f"set PATH=%PATH:{Path(entry)};=%" for entry in value)
— The future of a California soul-food chain remains to be seen after its parent company is officially filing for bankruptcy. The legendary Roscoe’s House of Chicken ‘n Waffles is known for being frequented by celebrities, including Snoop Dogg and Larry King. But new court documents show that of all the money Roscoe’s owes to various companies and people, one of the biggest debts is to Daniel Beasley of Compton. Beasley is a former employee who sued the restaurant on Pico Boulevard for racial discrimination and won. “You can’t treat people like that and get away with it constantly,” said Beasley in September. He had accused the restaurant of giving preferential treatment to Latino employees and harassing him for being black. Although Roscoe’s has filed for Chapter 11, it doesn’t mean the company is going under. Rather, the filing helps it buy time in order to restructure its debts. Roscoe lovers hope it all gets resolved soon. “I just wish them all the luck in regards to it,” said Bernie Mendoza. An attorney for Roscoe’s declined to comment Monday evening, while a lawyer for Beasley said the restaurant could have settled the case for less.
/** * Sends forward a message defined in the DSL. * * @author Marcin Grzejszczak */ class StubRunnerKafkaTransformer { private final StubRunnerKafkaMessageSelector selector; StubRunnerKafkaTransformer(List<Contract> groovyDsls) { this.selector = new StubRunnerKafkaMessageSelector(groovyDsls); } public Message<?> transform(Contract groovyDsl) { Object outputBody = outputBody(groovyDsl); Map<String, Object> headers = groovyDsl.getOutputMessage().getHeaders() .asStubSideMap(); Message newMessage = MessageBuilder.createMessage(outputBody, new MessageHeaders(headers)); this.selector.updateCache(newMessage, groovyDsl); return newMessage; } private Object outputBody(Contract groovyDsl) { Object outputBody = BodyExtractor .extractClientValueFromBody(groovyDsl.getOutputMessage().getBody()); if (outputBody instanceof FromFileProperty) { FromFileProperty property = (FromFileProperty) outputBody; return property.asBytes(); } return BodyExtractor.extractStubValueFrom(outputBody); } }
/** * @author Tomas Johansson */ public class PathGenericsImplTest { private Edge edgeAB3; private Edge edgeBC5; private Edge edgeCD7; private String firstVertex, secondVertex, thirdVertex, fourthVertex; private double weightFirstEdge, weightSecondEdge, weightThirdEdge, totalWeight; private Path path; @BeforeEach public void setUp() throws Exception { firstVertex = "A"; secondVertex = "B"; thirdVertex = "C"; fourthVertex = "D"; weightFirstEdge = 3; weightSecondEdge = 5; weightThirdEdge = 7; totalWeight = weightFirstEdge + weightSecondEdge + weightThirdEdge; edgeAB3 = createEdge(createVertex(firstVertex), createVertex(secondVertex), createWeight(weightFirstEdge)); edgeBC5 = createEdge(createVertex(secondVertex), createVertex(thirdVertex), createWeight(weightSecondEdge)); edgeCD7 = createEdge(createVertex(thirdVertex), createVertex(fourthVertex), createWeight(weightThirdEdge)); path = createPath(createWeight(totalWeight), Arrays.asList(edgeAB3, edgeBC5, edgeCD7)); } @Test public void testGetTotalWeightForPath() { assertEquals(totalWeight, path.getTotalWeightForPath().getWeightValue(), SMALL_DELTA_VALUE_FOR_WEIGHT_COMPARISONS); } @Test public void testGetEdgesForPath() { List<? extends EdgeGenerics> edgesForPath = path.getEdgesForPath(); assertEquals(3, edgesForPath.size()); assertEquals(edgeAB3, edgesForPath.get(0)); assertEquals(edgeBC5, edgesForPath.get(1)); assertEquals(edgeCD7, edgesForPath.get(2)); } @Test public void testExceptionIsThrownIfVerticesIsNotMatching() { RuntimeException exception = org.junit.jupiter.api.Assertions.assertThrows(RuntimeException.class, () -> { createPathGenerics( createWeight(15d), Arrays.asList( createEdge(createVertex("A"), createVertex("B"), createWeight(3d)), createEdge(createVertex("B"), createVertex("C"), createWeight(5d)), // Note that "X" should be "C" below, which is the reason for expected exceotion createEdge(createVertex("X"), createVertex("D"), createWeight(7d)) ), false, true // tell creation method to throw exception if not all vertices are matching ); }); assertThat(exception.getMessage(), containsString("vertices")); // Mismatching vertices detected } @Test public void testExceptionIsTotalWeightIsNotMatching() { RuntimeException exception = org.junit.jupiter.api.Assertions.assertThrows(RuntimeException.class, () -> { createPathGenerics( createWeight(16), // SHOULD be 15 ( 3 + 5 + 7 ) and therefore an exception should be thrown Arrays.asList( createEdge(createVertex("A"), createVertex("B"), createWeight(3d)), createEdge(createVertex("B"), createVertex("C"), createWeight(5d)), createEdge(createVertex("C"), createVertex("D"), createWeight(7d)) ), true, // tell creation method to throw exception if sum is not matching false ); }); assertThat(exception.getMessage(), containsString("weight")); // Incorrect weight } }
<reponame>martinpoljak/microstores<gh_stars>0 export type Subscriber<T> = (newer: T, older?: T) => void; export type Notifier = (dirtify: boolean) => void; export type ID = string | number;
<reponame>advancedwebdeveloper/flow9 #ifndef GLWEBCLIP_H #define GLWEBCLIP_H #include "GLClip.h" #include "GLRenderer.h" class GLWebClip : public GLClip { protected: ivec2 size; unicode_string url; bool useCache; StackSlot callback, ondone; void computeBBoxSelf(GLBoundingBox &bbox, const GLTransform &transform); void flowGCObject(GarbageCollectorFn); public: GLWebClip(GLRenderSupport *owner, ivec2 size, const unicode_string url, bool use_cache, const StackSlot & callback, const StackSlot & _ondone); unicode_string getUrl() { return url; } const StackSlot & getFlowCallback() { return callback; } bool getUseCache() { return useCache; } void notifyPageLoaded(); void notifyError(std::string e); DEFINE_FLOW_NATIVE_OBJECT(GLWebClip, GLClip); DECLARE_NATIVE_METHOD(webClipHostCall); DECLARE_NATIVE_METHOD(webClipEvalJS); DECLARE_NATIVE_METHOD(setWebClipZoomable); DECLARE_NATIVE_METHOD(setWebClipDomains); }; #endif // GLWEBCLIP_H
/** * This screen shows the status of the empire. You can see all your colonies, all your fleets, etc. */ public class EmpireScreen extends Screen { private static final Log log = new Log("EmpireScreen"); private EmpireLayout layout; @Override public void onCreate(ScreenContext context, ViewGroup container) { super.onCreate(context, container); layout = new EmpireLayout(context.getActivity()); } @Override public View onShow() { return layout; } }
// -*- mode:C++; tab-width:8; c-basic-offset:2; indent-tabs-mode:t -*- // vim: ts=8 sw=2 smarttab #ifndef COMMON_REF_H #define COMMON_REF_H #include <boost/intrusive_ptr.hpp> namespace ceph { template<typename T> using ref_t = boost::intrusive_ptr<T>; template<typename T> using cref_t = boost::intrusive_ptr<const T>; template<class T, class U> ref_t<T> ref_cast(const ref_t<U>& r) noexcept { return static_cast<T*>(r.get()); } template<class T, class U> ref_t<T> ref_cast(ref_t<U>&& r) noexcept { return {static_cast<T*>(r.detach()), false}; } template<class T, class U> cref_t<T> ref_cast(const cref_t<U>& r) noexcept { return static_cast<const T*>(r.get()); } template<class T, typename... Args> ceph::ref_t<T> make_ref(Args&&... args) { return {new T(std::forward<Args>(args)...), false}; } } // Friends cannot be partial specializations: https://en.cppreference.com/w/cpp/language/friend #define FRIEND_MAKE_REF(C) \ template<class T, typename... Args> friend ceph::ref_t<T> ceph::make_ref(Args&&... args) #endif
<filename>ocr/daemon.cc // Copyright 2020 The Chromium OS Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ocr/daemon.h" #include <memory> #include <string> #include <sysexits.h> #include <utility> #include <base/bind.h> #include <base/callback.h> #include <base/check.h> #include <base/files/file_util.h> #include <base/logging.h> #include <base/threading/thread_task_runner_handle.h> #include <base/unguessable_token.h> #include <dbus/object_path.h> #include <chromeos/dbus/service_constants.h> #include <mojo/public/cpp/platform/platform_channel_endpoint.h> #include <mojo/public/cpp/system/invitation.h> #include <mojo/core/embedder/embedder.h> #include "ocr/ocr_service_impl.h" namespace ocr { OcrDaemon::OcrDaemon() : brillo::DBusServiceDaemon(kOcrServiceName) { ocr_service_impl_ = std::make_unique<OcrServiceImpl>(); ocr_service_impl_->SetOnDisconnectCallback(base::BindRepeating( &OcrDaemon::OnDisconnect, weak_ptr_factory_.GetWeakPtr())); } OcrDaemon::~OcrDaemon() = default; int OcrDaemon::OnInit() { int return_code = brillo::DBusServiceDaemon::OnInit(); if (return_code != EX_OK) return return_code; // Initialize Mojo IPC. mojo::core::Init(); ipc_support_ = std::make_unique<mojo::core::ScopedIPCSupport>( base::ThreadTaskRunnerHandle::Get() /* io_thread_task_runner */, mojo::core::ScopedIPCSupport::ShutdownPolicy:: CLEAN /* blocking shutdown */); return EX_OK; } void OcrDaemon::RegisterDBusObjectsAsync( brillo::dbus_utils::AsyncEventSequencer* sequencer) { DCHECK(!dbus_object_); dbus_object_ = std::make_unique<brillo::dbus_utils::DBusObject>( nullptr /* object_manager */, bus_, dbus::ObjectPath(kOcrServicePath)); brillo::dbus_utils::DBusInterface* dbus_interface = dbus_object_->AddOrGetInterface(kOcrServiceInterface); DCHECK(dbus_interface); dbus_interface->AddSimpleMethodHandler(kBootstrapMojoConnectionMethod, base::Unretained(this), &OcrDaemon::BootstrapMojoConnection); dbus_object_->RegisterAsync(sequencer->GetHandler( "Failed to register D-Bus object" /* descriptive_message */, true /* failure_is_fatal */)); } std::string OcrDaemon::BootstrapMojoConnection(const base::ScopedFD& mojo_fd, bool should_accept_invitation) { VLOG(1) << "Received BootstrapMojoConnection D-Bus request"; if (!mojo_fd.is_valid()) { constexpr char kInvalidFileDescriptorError[] = "ScopedFD extracted from D-Bus call was invalid (i.e. empty)"; LOG(ERROR) << kInvalidFileDescriptorError; return kInvalidFileDescriptorError; } // We need a file descriptor that stays alive after the current method // finishes, but libbrillo's D-Bus wrappers currently don't support passing // base::ScopedFD by value. base::ScopedFD mojo_fd_copy(HANDLE_EINTR(dup(mojo_fd.get()))); if (!mojo_fd_copy.is_valid()) { constexpr char kFailedDuplicationError[] = "Failed to duplicate the Mojo file descriptor"; PLOG(ERROR) << kFailedDuplicationError; return kFailedDuplicationError; } if (!base::SetCloseOnExec(mojo_fd_copy.get())) { constexpr char kFailedSettingFdCloexec[] = "Failed to set FD_CLOEXEC on Mojo file descriptor"; PLOG(ERROR) << kFailedSettingFdCloexec; return kFailedSettingFdCloexec; } std::string token; mojo::ScopedMessagePipeHandle mojo_message_pipe; if (should_accept_invitation) { if (mojo_service_bind_attempted_) { // This should not normally be triggered, since the other endpoint - the // browser process - should bootstrap the Mojo connection only once, and // when that process is killed the Mojo shutdown notification should have // been received earlier. But handle this case to be on the safe side. // After we restart, the browser process is expected to invoke the // bootstrapping again. LOG(ERROR) << "Shutting down due to repeated Mojo bootstrap requests"; ocr_service_impl_.reset(); Quit(); return ""; } // Connect to Mojo in the requesting process. mojo::IncomingInvitation invitation = mojo::IncomingInvitation::Accept(mojo::PlatformChannelEndpoint( mojo::PlatformHandle(std::move(mojo_fd_copy)))); mojo_message_pipe = invitation.ExtractMessagePipe(kBootstrapMojoConnectionChannelToken); mojo_service_bind_attempted_ = true; } else { // Create a unique token which will allow the requesting process to connect // to us via Mojo. mojo::OutgoingInvitation invitation; token = base::UnguessableToken::Create().ToString(); mojo_message_pipe = invitation.AttachMessagePipe(token); mojo::OutgoingInvitation::Send( std::move(invitation), base::kNullProcessHandle, mojo::PlatformChannelEndpoint( mojo::PlatformHandle(std::move(mojo_fd_copy)))); } ocr_service_impl_->AddReceiver( mojo::PendingReceiver< chromeos::ocr::mojom::OpticalCharacterRecognitionService>( std::move(mojo_message_pipe)), should_accept_invitation); VLOG(1) << "Successfully bootstrapped Mojo connection"; return token; } void OcrDaemon::OnDisconnect(bool should_quit) { if (should_quit) { LOG(ERROR) << "OcrDaemon lost Mojo connection to the browser; quitting."; ocr_service_impl_.reset(); Quit(); } } } // namespace ocr