content
stringlengths
10
4.9M
//Connect to the NATS message queue func (natsConn *NatsConn) Connect(host, port string, errChan chan error) { log.Info("Connecting to NATS: ", host, ":", port) nh := "nats://" + host + ":" + port conn, err := nats.Connect(nh, nats.DisconnectErrHandler(func(_ *nats.Conn, err error) { errChan <- err }), nats.DisconnectHandler(func(_ *nats.Conn) { errChan <- errors.New("unexpectedly disconnected from nats") }), ) if err != nil { errChan <- err return } natsConn.Conn = conn natsConn.JS, err = conn.JetStream() if err != nil { errChan <- err return } err = natsConn.createStream() if err != nil { errChan <- err } }
<reponame>cemozerr/artemis /* * Copyright 2019 ConsenSys AG. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ package tech.pegasys.artemis.networking.p2p.hobbits; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.tuweni.bytes.Bytes32; import org.apache.tuweni.units.bigints.UInt64; final class Hello { private final long networkId; private final long chainId; private final Bytes32 latestFinalizedRoot; private final UInt64 latestFinalizedEpoch; private final Bytes32 bestRoot; private final UInt64 bestSlot; @JsonCreator Hello( @JsonProperty("network_id") long networkId, @JsonProperty("chain_id") long chainId, @JsonProperty("latest_finalized_root") Bytes32 latestFinalizedRoot, @JsonProperty("latest_finalized_epoch") UInt64 latestFinalizedEpoch, @JsonProperty("best_root") Bytes32 bestRoot, @JsonProperty("best_slot") UInt64 bestSlot) { this.networkId = networkId; this.chainId = chainId; this.latestFinalizedRoot = latestFinalizedRoot; this.latestFinalizedEpoch = latestFinalizedEpoch; this.bestRoot = bestRoot; this.bestSlot = bestSlot; } @JsonProperty("network_id") public long networkId() { return networkId; } @JsonProperty("chain_id") public long chainId() { return chainId; } @JsonProperty("latest_finalized_root") public Bytes32 latestFinalizedRoot() { return latestFinalizedRoot; } @JsonProperty("latest_finalized_epoch") public UInt64 latestFinalizedEpoch() { return latestFinalizedEpoch; } @JsonProperty("best_root") public Bytes32 bestRoot() { return bestRoot; } @JsonProperty("best_slot") public UInt64 bestSlot() { return bestSlot; } }
package sep.gaia.resources.wikipedia.test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import sep.gaia.resources.wikipedia.WikipediaManager; import sep.gaia.resources.wikipedia.WikipediaData; import sep.gaia.state.GeoState; public class WikipediaManagerTest { private static WikipediaManager manager; private static WikipediaData wallersdorf; @BeforeClass public static void init() { manager = new WikipediaManager(); } @AfterClass public static void destroy() { manager = null; } @Before public void loadOneWikipediaData() { // WikipediaData für Wallersdorf wallersdorf = new WikipediaData("Wallersdorf", "Wallersdorf", 0, 0); manager.load(wallersdorf); // Wartezeit miteinberechnen? try { Thread.sleep(5000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } @Test public void testRequestLoaderStop() { manager = new WikipediaManager(); manager.requestLoaderStop(); manager.load(wallersdorf); try { Thread.sleep(2000); } catch (InterruptedException e) { e.printStackTrace(); } assertTrue(manager.getCurrentWikipediaDatas().isEmpty()); } @Test public void testLoad() { fail("Not yet implemented"); } @Test public void testGetCurrentWikipediaDatas() { for (WikipediaData currentWiki : manager.getCurrentWikipediaDatas()) { assertEquals(wallersdorf, currentWiki); } } @Test public void testDisable() { manager.disable(); // Now, the WikipediaManager should be disabled and therefore, it holds // not WikipediaData objects. for (WikipediaData currentWiki : manager.getCurrentWikipediaDatas()) { assertEquals(null, currentWiki); } } @Test /** * Update GeoState -> Notify WikipediaManager * -> WikipediaManager loads current WikipediaData */ public void testGetUsedResources() { // Ort: Poxau GeoState geoState = new GeoState(15, 12.56123f, 48.56123f, null); manager.onUpdate(geoState); WikipediaData poxau = new WikipediaData("Poxau", null, 0, 0); boolean sameUsedResources = false; // Short description equal? for (WikipediaData currentWiki : manager.getCurrentWikipediaDatas()) { if (currentWiki.getSummaryText().equals(poxau.getSummaryText())) { sameUsedResources = true; } } assert(sameUsedResources); } @Test public void testGetWikipediaByTitle() { assertTrue(null == manager.getWikipediaByTitle("Passau")); assertTrue(null != manager.getWikipediaByTitle("Wallersdorf")); } }
package seedu.planner.testutil.activity; import static seedu.planner.logic.commands.CommandTestUtil.VALID_ACTIVITY_ADDRESS_A; import static seedu.planner.logic.commands.CommandTestUtil.VALID_ACTIVITY_ADDRESS_B; import static seedu.planner.logic.commands.CommandTestUtil.VALID_ACTIVITY_NAME_A; import static seedu.planner.logic.commands.CommandTestUtil.VALID_ACTIVITY_NAME_B; import static seedu.planner.logic.commands.CommandTestUtil.VALID_ADDRESS_AMY; import static seedu.planner.logic.commands.CommandTestUtil.VALID_ADDRESS_BOB; import static seedu.planner.logic.commands.CommandTestUtil.VALID_COST_HUNDRED; import static seedu.planner.logic.commands.CommandTestUtil.VALID_COST_TWO_HUNDRED; import static seedu.planner.logic.commands.CommandTestUtil.VALID_DURATION_A; import static seedu.planner.logic.commands.CommandTestUtil.VALID_DURATION_B; import static seedu.planner.logic.commands.CommandTestUtil.VALID_EMAIL_AMY; import static seedu.planner.logic.commands.CommandTestUtil.VALID_EMAIL_BOB; import static seedu.planner.logic.commands.CommandTestUtil.VALID_NAME_AMY; import static seedu.planner.logic.commands.CommandTestUtil.VALID_NAME_BOB; import static seedu.planner.logic.commands.CommandTestUtil.VALID_PHONE_AMY; import static seedu.planner.logic.commands.CommandTestUtil.VALID_PHONE_BOB; import static seedu.planner.logic.commands.CommandTestUtil.VALID_PRIORITY_SEVEN; import static seedu.planner.logic.commands.CommandTestUtil.VALID_PRIORITY_SIX; import static seedu.planner.logic.commands.CommandTestUtil.VALID_TAG_FRIEND; import static seedu.planner.logic.commands.CommandTestUtil.VALID_TAG_HIKING; import static seedu.planner.logic.commands.CommandTestUtil.VALID_TAG_SIGHTSEEING; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import seedu.planner.model.ActivityManager; import seedu.planner.model.activity.Activity; /** * An utility class containing a list of {@code Activity} objects to be used in tests. */ public class TypicalActivity { //Contact: name phone (OPT)email (OPT)address tags //Activity: name address (OPT)contact tags public static final Activity ACTIVITY_ONE = new ActivityBuilder().withName("Visit <NAME>") .withAddress("Kyoto") .withContact("<NAME>", "92007122", "<EMAIL>", "Nantan, Kyoto 601-0776, Japan", "tourguide") .withTags("sightseeing") .withDuration("30") .withPriority("1") .withCost("10").build(); public static final Activity ACTIVITY_TWO = new ActivityBuilder().withName("Visit Mount Fuji") .withAddress("Tokyo") .withContact("Matsafushi", "82337121", "<EMAIL>", "150-2345 Tokyo-to, Shibuya-ku, Hommachi 2 choume, 4-7, Sunny Mansion 203", "tourguide") .withTags("sightseeing") .withDuration("60") .withPriority("2") .withCost("20").build(); public static final Activity ACTIVITY_THREE = new ActivityBuilder().withName("sushi making") .withAddress("Kodaira, Tokyo") .withContact("Yui", "93619823", "<EMAIL>", "Akishima, Tokyo 196-0022") .withDuration("90") .withPriority("3") .withCost("30").build(); public static final Activity ACTIVITY_FOUR = new ActivityBuilder().withName("Visit Nagoya Castle") .withAddress("Tokyo") .withContact("Himari", "94523656", "<EMAIL>", "5 Chome Josai, Nishi Ward, Nagoya, Aichi 451-0031", "tourguide") .withDuration("120") .withPriority("4") .withCost("40").build(); public static final Activity ACTIVITY_FIVE = new ActivityBuilder().withName("Shop at Dontobori") .withAddress("Tokyo") .withContact("kosuke", "95255523", "<EMAIL>", "5 Chome Josai, Nishi Ward, Nagoya, Aichi 451-0031") .withTags("souvenirs") .withDuration("180") .withPriority("5") .withCost("50").build(); public static final Activity ACTIVITY_SIX = new ActivityBuilder().withName("Visit Monkey Park") .withAddress("Yokoyu River") .withContact("Kakashi", "95131415", "<EMAIL>", "<NAME>uyama, Aichi 484-0081", "tourguide", "experienced") .withTags("sightseeing") .withDuration("210") .withPriority("6") .withCost("60").build(); public static final Activity ACTIVITY_SEVEN = new ActivityBuilder().withName("Walk through Bamboo Forest") .withAddress("Kyoto") .withContact("Maylin", "95123444", "<EMAIL>", "<NAME>, Kyoto, 603-8477", "Japanfriend") .withTags("MUSTdo", "sightseeing") .withDuration("240") .withPriority("7") .withCost("70").build(); // Manually added public static final Activity ACTIVITY_EIGHT = new ActivityBuilder().withName("Visit Ramen Museum") .withAddress("Shitamachi") .withContact("Jack", "81241034", "<EMAIL>", "<NAME>, Shitamachi, 120-8222") .withDuration("270") .withPriority("4") .withCost("80").build(); public static final Activity ACTIVITY_NINE = new ActivityBuilder().withName("Watch Kabuki show") .withAddress("Osaka") .withContact("Gaara", "91037493", "<EMAIL>", "<NAME>, Osaka, 543-0017") .withDuration("300") .withPriority("5") .withCost("90").build(); // Manually added - Activity's details found in {@code CommandTestUtil} public static final Activity ACTIVITY_A = new ActivityBuilder().withName(VALID_ACTIVITY_NAME_A) .withAddress(VALID_ACTIVITY_ADDRESS_A) .withContact(VALID_NAME_AMY, VALID_PHONE_AMY, VALID_EMAIL_AMY, VALID_ADDRESS_AMY, VALID_TAG_FRIEND) .withTags(VALID_TAG_SIGHTSEEING) .withDuration(VALID_DURATION_A) .withPriority(VALID_PRIORITY_SIX) .withCost(VALID_COST_HUNDRED).build(); public static final Activity ACTIVITY_B = new ActivityBuilder().withName(VALID_ACTIVITY_NAME_B) .withAddress(VALID_ACTIVITY_ADDRESS_B) .withContact(VALID_NAME_BOB, VALID_PHONE_BOB, VALID_EMAIL_BOB, VALID_ADDRESS_BOB, VALID_TAG_FRIEND) .withTags(VALID_TAG_HIKING) .withDuration(VALID_DURATION_B) .withPriority(VALID_PRIORITY_SEVEN) .withCost(VALID_COST_TWO_HUNDRED) .build(); private TypicalActivity() { } // prevents instantiation /** * Returns an {@code Planner} with all the typical activities. */ public static ActivityManager getTypicalActivityManager() { ActivityManager am = new ActivityManager(); for (Activity activity : getTypicalActivities()) { am.addActivity(activity); } return am; } public static List<Activity> getTypicalActivities() { return new ArrayList<>(Arrays.asList(ACTIVITY_ONE, ACTIVITY_TWO, ACTIVITY_THREE, ACTIVITY_FOUR, ACTIVITY_FIVE, ACTIVITY_SIX, ACTIVITY_SEVEN)); } }
import heapq if __name__ == '__main__': n, m, k = [int(x) for x in input().split(' ')] assert 2 <= n <= 200000 assert 1 <= m assert 2 <= k assert m*k <= n ns = [int(x) for x in input().split(' ')] assert len(ns) == n pis = [i for (i, p) in heapq.nlargest(m * k, enumerate(ns), lambda x: x[1])] pis.sort() print(sum(ns[i] for i in pis)) print(*[pis[i * m - 1] + 1 for i in range(1, k)])
def map_hsv(self, H, S, L): size = max([v.size for v in (H, S, L)]) if H.size == 1: H = np.repeat(H, size) elif H.size != size: raise ValueError('cannot map HSV of unequal size') if S.size == 1: S = np.repeat(S, size) elif S.size != size: raise ValueError('cannot map HSV of unequal size') if L.size == 1: L = np.repeat(L, size) elif L.size != size: raise ValueError('cannot map HSV of unequal size') return H, S, L
package goethauth import ( "encoding/hex" "reflect" "testing" ) func Test_IsChallengeSignedByEthAccount(t *testing.T) { type args struct { ethAccountStr string msg string sigStr string } tests := []struct { name string args args want bool wantErr bool }{ { name: "valid signature and account", args: args{ ethAccountStr: "0xa26f2b342aab24bcf63ea218c6a9274d30ab9a16", msg: "NO_PRODUCTION_CHALLENGE", sigStr: "0xa226068b85f311996530599beabe5ba67036b4cd2362" + "660f6b959c74d6474c2a0f8fb5504aad06054f641d9a5d6fd5c" + "25d6ea5eb6edce5f2f02bd10c509bb1c51c", }, want: true, wantErr: false, }, { name: "valid signature and account with no 0x", args: args{ ethAccountStr: "a26f2b342aab24bcf63ea218c6a9274d30ab9a16", msg: "NO_PRODUCTION_CHALLENGE", sigStr: "0xa226068b85f311996530599beabe5ba67036b4cd236" + "2660f6b959c74d6474c2a0f8fb5504aad06054f641d9a5d6f" + "d5c25d6ea5eb6edce5f2f02bd10c509bb1c51c", }, want: true, wantErr: false, }, { name: "account too short", args: args{ ethAccountStr: "a26f2b342aab24bcf63ea218c6a9274d30ab9a1", msg: "NO_PRODUCTION_CHALLENGE", sigStr: "0xa226068b85f311996530599beabe5ba67036b4" + "<KEY>" + "a5d6fd5c25d6ea5eb6edce5f2f02bd10c509bb1c51c", }, want: false, wantErr: true, }, { name: "shorter signature", args: args{ ethAccountStr: "a26f2b342aab24bcf63ea218c6a9274d30ab9a16", msg: "NO_PRODUCTION_CHALLENGE", sigStr: "0xa22068b85f311996530599beabe5ba67036b" + "<KEY>" + "fd5c25d6ea5eb6edce5f2f02bd10c509bb1c51c", }, want: false, wantErr: true, }, { name: "signature not hex", args: args{ ethAccountStr: "a26f2b342aab24bcf63ea218c6a9274d30ab9a16", msg: "NO_PRODUCTION_CHALLENGE", sigStr: "0xa22R068b85f311996530599beabe5ba67036b4cd236" + "2660f6b959c74d6474c2a0f8fb5504aad06054f641" + "d9a5d6fd5c25d6ea5eb6edce5f2f02bd10c509bb1c51c", }, want: false, wantErr: true, }, { name: "wrong sig", args: args{ ethAccountStr: "a26f2b342aab24bcf63ea218c6a9274d30ab9a16", msg: "NO_PRODUCTION_CHALLENGE", sigStr: "0xa226018b85f311996530599beabe5ba67036b4cd236" + "2660f6b959c74d6474c2a0f8fb5504aad06054f641d9a5d6" + "fd5c25d6ea5eb6edce5f2f02bd10c509bb1c51c", }, want: false, wantErr: true, }, { name: "not hex account submitted", args: args{ ethAccountStr: "aR6f2b342aab24bcf63ea218c6a9274d30ab9a16", msg: "NO_PRODUCTION_CHALLENGE", sigStr: "0xa226018b85f311996530599beabe5ba67036b4cd" + "2362660f6b959c74d6474c2a0f8fb5504aad0" + "6054f641d9a5d6fd5c25d6ea5eb6edce5f2f02bd10c509bb1c51c", }, want: false, wantErr: true, }, { name: "account submitted not matching the sig one", args: args{ ethAccountStr: "a26e2b342aab24bcf63ea218c6a9274d30ab9a16", msg: "NO_PRODUCTION_CHALLENGE", sigStr: "0xa226068b85f311996530599beabe5ba67036b4cd23" + "62660f6b959c74d6474c2a0f8fb5504aad06" + "054f641d9a5d6fd5c25d6ea5eb6edce5f2f02bd10c509bb1c51c", }, want: false, wantErr: true, }, { name: "empty signature", args: args{ ethAccountStr: "a26e2b342aab24bcf63ea218c6a9274d30ab9a16", msg: "NO_PRODUCTION_CHALLENGE", sigStr: "", }, want: false, wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := IsChallengeSignedByEthAccount(tt.args.ethAccountStr, tt.args.msg, tt.args.sigStr) if (err != nil) != tt.wantErr { t.Errorf("IsChallengeSignedByEthAccount() error = %v, wantErr %v", err, tt.wantErr) return } if got != tt.want { t.Errorf("IsChallengeSignedByEthAccount() = %v, want %v", got, tt.want) } }) } } func TestHashEthereumString(t *testing.T) { tests := []struct { name string msg string want string }{ { name: "Hash message", msg: "🦄", want: "714436f28d7d871df2fce3ce77" + "6c0bead83ad0735b07534d0ad4c880ca5b7171", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got := HashEthereumString(tt.msg) wantB, _ := hex.DecodeString(tt.want) if !reflect.DeepEqual(got, wantB) { t.Errorf("HashEthereumString() = 0x%x, want 0x%x", got, wantB, ) } }) } } func Test_CleanSig(t *testing.T) { tests := []struct { name string sigB []byte want []byte }{ { name: "sig with no 1c or 1b", sigB: []byte{0x00, 0x01, 0x02}, want: []byte{0x00, 0x01, 0x02}, }, { name: "sig with 1b", sigB: []byte{0x00, 0x01, 0x1b}, want: []byte{0x00, 0x01, 0x00}, }, { name: "sig with 1c", sigB: []byte{0x00, 0x01, 0x1c}, want: []byte{0x00, 0x01, 0x01}, }, { name: "sig with len 1", sigB: []byte{0x00}, want: []byte{0x00}, }, { name: "sig with len 0", sigB: []byte{}, want: []byte{}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := CleanSig(tt.sigB); !reflect.DeepEqual(got, tt.want) { t.Errorf("CleanSig() = 0x%x, want 0x%x", got, tt.want) } }) } }
<reponame>skkuse-adv/2019Fall_team2 package kr.co.popone.fitts.di.component; import dagger.android.AndroidInjector; import kr.co.popone.fitts.feature.post.PostFirstWriteNoticeActivity; public interface PostFirstWriteNoticeActivityComponent extends AndroidInjector<PostFirstWriteNoticeActivity> { public static abstract class Builder extends dagger.android.AndroidInjector.Builder<PostFirstWriteNoticeActivity> { } }
Thanks to Nicholas Negroponte and the Media Lab at MIT, children in developing nations around the world will have access to technology. Negroponte, the co-founder of the Lab, said MIT and his non-profit, One Laptop Per Child , is in discussions with five countries -- Brazil, China, Thailand, Egypt and South Africa -- to distribute up to 15 million test systems to children. The idea is that governments will pay roughly $100 US for each laptop, and distribute them for free. The laptops will largely be powered by a side-mounted hand-crank, and can be juiced up with convential electric current or batteries, when they're available. The proposed design of the machines calls for a 500MHz processor, 1GB of memory and an innovative dual-mode display that can be used in full-color mode, or in a black-and-white sunlight-readable mode. One display design being considered would also consume unconventionally low amounts of power and money; Negroponte said the technology can be used to produce displays that cost roughly 10 cents per square inch. "The target is $12 for a 12-inch display with near-zero power consumption," he said. The systems will be Wi-Fi and cell phone-enabled, and along with four USB ports, will include something called "mesh networking," a peer-to-peer concept that allows machines to share a single internet connection. Negroponte said the current plan is to produce 100 million to 150 million units by 2007. ::CNET via ::Gizmodo
Digital Ischemia and Necrosis: A Rarely Described Complication of Gemcitabine in Pancreatic Adenocarcinoma Abstract Background: Gemcitabine, alone or in combination with other agents, has become an important part of the standard of care for treatment of both resectable and unresectable/advanced pancreatic adenocarcinoma. Gemcitabine is generally considered to have a favorable toxicity profile, with myelosuppression and hepatotoxicity as the most common adverse effects. There are just two prior published case reports of gemcitabine-associated digital toxicity in the treatment of pancreatic adenocarcinoma, and few case reports when considering all solid tumors. Presentation: A 70-year-old female developed hand numbness and tingling while receiving nab-paclitaxel plus gemcitabine for metastatic pancreatic adenocarcinoma. There was initial concern for Raynaud's or nab-paclitaxel-associated neuropathy, thus nab-paclitaxel was discontinued. However, her symptoms progressed to severe pain and her digits became dusky. An extensive evaluation revealed no alternative etiology except gemcitabine-associated digital ischemia (DI). The patient was treated with discontinuation of gemcitabine, and starting nitrates, opiates, calcium-channel blockers, and enoxaparin but eventually progressed to dry gangrene. Conclusion: Here we report a case of gemcitabine-associated DI, along with a review of the literature. Although a rare complication, DI must be recognized and treated promptly to reduce the likelihood of serious and permanent morbidity. Introduction Gemcitabine as a single agent or in combination therapy is a mainstay treatment for pancreatic adenocarcinoma. 1,2 Gemcitabine is generally well tolerated, with a favorable toxicity profile. 1 There are case reports of gemcitabine-associated digital ischemia (DI), but there are only two prior case reports in the treatment of pancreatic adenocarcinoma. Here we present a case of gemcitabine-associated DI in a patient with pancreatic adenocarcinoma, along with a literature review. Case Report A 70-year-old female with metastatic pancreatic adenocarcinoma receiving gemcitabine and nab-paclitaxel presented to clinic with numbness and tingling in her bilateral hands. Owing to initial concern for nabpaclitaxel-associated neurotoxicity, nab-paclitaxel was discontinued while the patient was continued on gemcitabine (cumulative dose 4160 mg/m 2 ). At 2-week follow-up, the patient reported progressive tenderness, coolness, and purple discoloration of her bilateral index fingers that was initially relieved with warm water immersion. Gemcitabine was promptly discontinued and the patient was admitted for further management. The patient had previously received FOLFOX and then FOLFIRI. She had a history of soleal deep vein thrombosis but no history of rheumatological, connective tissue or peripheral vascular disease. She was a 15-pack/ year smoker but quit 2 years previously. Her medications included diltiazem, pravastatin, and aspirin. Upon admission, the patient's vital signs were within normal limits. Examination revealed bilateral necrotic index fingers (Fig. 1). Basic laboratories revealed leukocytosis, mild anemia, and hypoalbuminemia. The prothrombin time/international normalized ratio, partial thromboplastin time, lupus anticoagulant, anticardiolipin antibody, cryoglobulin S, C3/C4, and extractable nuclear antigen testing were unremarkable. Antinuclear antibody was positive with 1:320 titer and speckled pattern. Echocardiogram showed no evidence of intracardiac thrombi. CT angiography revealed no evidence of systemic emboli. Arterial Doppler studies showed immeasurable digit/arm indices (Fig. 2). Given the negative evaluation for competing etiologies, gemcitabineassociated DI was diagnosed. The patient was managed with gemcitabine discontinuation, along with initiation of calcium-channel blockade, nitrates, and opiates. Therapeutic enoxaparin was also initiated, but was subsequently discontinued because of gastrointestinal bleed. Despite treatment, the patient's symptoms progressed to dry gangrene of the bilateral index fingers. Discussion Gemcitabine has been associated with thrombotic vascular complications affecting various organ systems. 3 However, DI is a rare adverse effect of gemcitabine likely caused by drug-related microvascular endothelial damage and a relative hypercoagulable state, 3 with only case reports to aid clinicians in identifying risk factors, recognizing signs/symptoms, and guiding management decisions. A PubMed search using ''digital ischemia/ necrosis'' and ''gemcitabine,'' selecting English language articles and also using relevant cases from those articles' references revealed nine cases of gemcitabine-associated DI ( 12 In each case, gemcitabine was withdrawn. In this case, there was no underlying disease. Prior smoking was the only risk factor. The cumulative gemcitabine dose of 4160 mg/m 2 was lower than previous reports. Given the lack of risk factors and initial suspicion for competing etiologies, gemcitabine was not immediately discontinued. Despite eventual discontinuation of gemcitabine and initiation of vasodilator therapy, the patient suffered irreversible digital necrosis. Therefore, based on variability in risk factors and cumulative dose exposure, clinicians should maintain a high degree of suspicion for gemcitabineassociated DI. Conclusion DI is a rare side effect of gemcitabine, which may be attributable to endothelial damage and a hypercoagulable state. There is variability in risk factors, presenting signs/symptoms, and treatment response, creating a diagnostic and treatment dilemma for clinicians. Withdrawing gemcitabine and initiation of prostaglandins and/or alternative vasodilators may be an effective treatment modality.
/** * Dispose of the camera preview. * @param holder */ public void surfaceDestroyed(SurfaceHolder holder) { if (mCamera != null){ mCamera.stopPreview(); } }
/** * Appends decoy sequences to the given target database file. * * @param fastaParameters the FASTA parsing parameters * @param waitingHandler the waiting handler * * @return the file created * @throws IOException exception thrown whenever an error happened while * reading or writing a FASTA file */ public File generateTargetDecoyDatabase( FastaParameters fastaParameters, WaitingHandler waitingHandler ) throws IOException { File fileIn = fastaCLIInputBean.getInputFile(); File fileOut = new File( fileIn.getParent(), IoUtil.removeExtension(fileIn.getName()) + fastaParameters.getTargetDecoyFileNameSuffix() + ".fasta" ); waitingHandler.setWaitingText("Appending Decoy Sequences. Please Wait..."); DecoyConverter.appendDecoySequences(fileIn, fileOut, fastaParameters, waitingHandler); return fileOut; }
Final Fantasy XV did the near impossible by finally releasing back in late November for PlayStation 4 and Xbox One after years of development. We’ve heard previous details of the number of units shipped at launch and since for the game, which have been pretty impressive, but now Square Enix has revealed just how successful the game was right out of the gate. We knew that the cost of a game like Final Fantasy XV that has been in development for so long would have to be rather high, but we’ve never really heard any exact totals. One report from game director Hajime Tabata stated that Square Enix needed to sell 10 million copies of the game to recoup the costs of the game, but he later stated that the 10 million was a high goal and not what was necessary to break even. While that did not turn out to be the case, we have now learned some interesting information on the topic. DualShockers was able to visit Square Enix’s Business Division 2 development floor in Tokyo, Japan where they were able to speak with Tabata. During this discussion, Tabata revealed that they had already broke even on Final Fantasy XV. In fact, he told them that they broke even on day one. This is very impressive, as they were able to have such a success with the game right out the gate by already recouping the game’s budget. It was previously announced that Square Enix shipped five million copies of the game on release day but we obviously have no official numbers of how many of those were actually sold. Final Fantasy XV still has plenty to go as well, with DLC still to come for the game moving forward. Some of that DLC was announced last week, which will be kicking off later in March.
<reponame>fossabot/solar-2<filename>src/actors/tcp_server.rs<gh_stars>0 use async_std::{ net::{TcpListener, ToSocketAddrs}, prelude::*, }; use futures::FutureExt; use kuska_ssb::keystore::OwnedIdentity; use crate::broker::*; use anyhow::Result; pub async fn actor(server_id: OwnedIdentity, addr: impl ToSocketAddrs) -> Result<()> { let broker = BROKER.lock().await.register("sbot-listener", false).await?; let mut ch_terminate = broker.ch_terminate.fuse(); let listener = TcpListener::bind(addr).await?; let mut incoming = listener.incoming(); loop { select_biased! { _ = ch_terminate => break, stream = incoming.next().fuse() => { if let Some(stream) = stream { if let Ok(stream) = stream { Broker::spawn(super::peer::actor(server_id.clone(), super::peer::Connect::ClientStream{stream})); } } else { break; } }, } } let _ = broker.ch_terminated.send(Void {}); Ok(()) }
def before_request(): is_known = not g.current_user.is_anonymous if not is_known: msg = "Account is unconfirmed." return forbidden(msg)
Regulations governing the EU wholesale electricity market have become so complex that the integration of the market is regressing instead of progressing, says Peter Styles, Chairman of the Electricity Committee of the European Federation of Energy Traders (EFET), in an interview with Energy Post. He notes TSOs (transmission system operators) on average now make less cross-border electricity transmission capacity available on the EU high voltage grid than in the late 1990s – before there had been any legislation forcing integration of national energy markets! Fortunately, the Clean Energy Package – the Commission’s proposed ambitious suite of revised legislation – can move the market back in the right direction, says Styles. If it is adopted with the right amendments … “The high point of a good EU approach to creating an integrated, open market in power was in 2003, with the adoption of the so-called 2nd IEM (Internal Energy Market) Legislative Package. All the pieces were put in place then to allow objective, transparent, non-discriminatory grid access on a cross-border as well as national basis, and to ensure that markets would remain open through regulatory oversight. But after that, especially with the advent of the Third Energy Package, which came into force after 2009, things went downhill. The market architecture and governance became overly complex. The EU legislation explicitly excluded renewable electricity generation output from the disciplines of the IEM. The EU emissions trading system (ETS) began to fail as a mechanism for pricing carbon emissions. Governments introduced new national measures in the name of security of supply, unchecked by EU oversight.” “With the Third Energy Package, the EU took a wrong turn” This sobering assessment comes from someone who is intimately familiar with the process of EU energy sector liberalisation and market integration. A soft-spoken Englishman, Peter Styles was, in the late 1990s, the EU representative of then-thriving Enron – the American company that became one of the largest energy trading houses in the world. After Enron collapsed into bankruptcy in late 2001, Styles was persuaded by fellow members of the Board of EFET, the association of European energy traders that he had helped found in 1999, to remain on the Board and become their retained electricity market specialist. EFET members include large and small generators, independent traders, banks, and large energy users from all over Europe, who have one common goal: to help create a competitive, transparent, integrated energy market in the EU, based on objective and non-discriminatory access to power and gas grids. They are convinced that a well-functioning EU wholesale energy market will not only lead to lower prices for consumers, but is also key to a successful low-carbon energy transition. Efficiently functioning markets in energy commodities, carbon emission allowances and renewable attributes will enable the most efficient production, storage and distribution of renewable energy across Europe in the future, Styles believes. Quasi-statutory role Indeed, EU energy policy is fully aligned with this goal – in theory at least. In practice, says Styles, things don’t look so good. Initially, in the early 2000s great strides were made in opening up and connecting markets, especially after the 2nd Electricity and Gas Directives were adopted in 2003. “For the first time, EU electricity market regulation was directly applicable in each member state, to govern cross-border grid access, thereby facilitating exports and imports of power”, says Styles. In those heady days, some new entrants and large industrial consumers were even dreaming of turning Europe into a “single copper plate”. At least there was real hope of replacing national boundaries to energy markets with enlarged bidding zones. But with the Third Energy Package, the EU took a wrong turn, according to Styles. One mistake, he says, was the conversion of ETSO, a voluntary association, into ENTSO-E, a compulsory grouping of European electricity TSOs (transmission system operators). Or rather, “the mistake was to give this organisation a quasi-statutory role. The European Commission entrusted the electricity TSOs jointly with the task to develop EU network codes setting out rules for the functioning of the market, or what we call market facing codes. In electricity, unlike in the gas sector, these draft codes were not a success at all.” “Five years ago, I would have been severely reprimanded by [green MEP] Claude Turmes for even suggesting such a prohibition, but now even the renewables organisations like WindEurope and Solar Europe accept it” The Commission was forced to acknowledge this, says Styles, and stopped short of adopting network codes directly applicable in the entire EU. Instead, the draft texts elaborated by ENTSO-E, but challenged by market participants and by ACER (the Agency for the Cooperation of Energy Regulators), were turned into binding Commission guidelines, to be implemented in effect in each member state. But as such, they are still meant to set market rules, notes Styles. “And they are terribly complex.” For example, there are guidelines by which TSOs calculate available cross-border transmission capacity and how they should cooperate on balancing their systems. The result, says Styles, is that “we as traders feel they are withholding transmission capacity from the market. ACER agrees with us. But the TSOs claim they are up against their security limits.” The situation is so bad, says Styles, that “back in the late 1990s, when there were no market integration rules and you had to make separate deals with each TSO, the allocation of capacity for cross-border trade was closer to the physical capacity of interconnection transmission lines than it is today”. RES collision course One reason the TSOs tend to withhold capacity from the market, Styles notes, is to allow for – often unpredictable – flows induced by solar and wind power generation, which in effect get “priority access” to the grid. On this point, EFET sympathizes with the TSOs. “This is another reason why I argue that EU legislation has taken a wrong turn”, says Styles. “Successive renewable energy directives have been developed in isolation from the internal energy market directives. I warned in 2008 at the Florence Forum [a high-level talking shop on the EU electricity market, ed.] that the second Renewable Energy Directive and the rest of the Third Energy Package were on a collision course. That’s how it has turned out to be.” All member states have programmes in place to support renewable energy. But “each scheme is different, each is contained within its national borders. They don’t take into account the needs and possibilities of cross-border transactions.” Rrenewables, if they need support, should be supported financially, not by giving them special privileges What is more, adds Styles, “they rely not just on financial support, but also on regulations that give them priority on the grid. This has constrained and distorted the market.” These measures taken together even have the effect of totally blocking borders at times, for example between Germany and Poland and Germany and Denmark. The result has been a suboptimal system, says Styles. “This is not just bad for trade. It’s bad for consumers, because it drives up costs, and it’s bad for the energy transition. We could have a balanced, low-carbon European electricity mix, with hydropower in the Nordic countries and central Europe, wind power in North West Europe, solar power in Southern Europe and remaining nuclear, if we had open borders. But we have witnessed a closing of borders rather than further opening them.” Styles stresses that he is not against renewable energy. “I can’t be. My members, including the large generators, are all getting into renewable power generation and supply. Even those who do not own renewable generation, are active in commercially managing renewable assets and doing the bidding for the owners of them.” But renewables, if they need support, should be supported financially, not by giving them special privileges. “The way to do it is to have a completely level playing field across the EU, and if renewables then still need support, they should receive that in monetised form, ideally based on an international, market based scheme.” The good news, says Styles, is that the Clean Energy Package – the legislative proposals presented by the European Commission on 30 November 2016 – is “going in the right direction. It is trying to re-orchestrate the provisions for renewable energy and the internal market, bringing them more into alignment with each other.” “If the DSOs need access to storage capacity, there is nothing to keep them from buying it – by procuring storage services in the market” For example, for new renewables projects, the proposed Electricity Market Regulation would prohibit priority dispatch. “This is a major victory for us,” says Styles. “Five years ago, I would have been severely reprimanded by [green MEP] Claude Turmes for even suggesting such a prohibition, but now even the renewables organisations like WindEurope and Solar Europe accept it.” New renewable generation units must also be “balance responsible”, if the Commission has its way. “Again, a major step forward”, notes Styles, although he is concerned that the new rules will not apply to smaller existing projects as things stand. “We believe there should be a phase-out period, so that within a few years all generation, no matter of what size or type or when installed, competes according to the same dispatch and imbalance regime. We are afraid otherwise there will be a rush to build new projects before 2019, when the new Regulation will probably take effect.” Storage facilities Meanwhile, another threat to the market is looming, in the view of EFET, namely the possibility that distribution system operators (DSOs) will be allowed to operate and own power storage facilities. Storage is more and more becoming a key asset in the emerging energy market, dominated by intermittent renewables. DSOs argue that they need storage to ensure the energy system will continue to function smoothly. But according to Styles, a core market principle is “strict unbundling” between network operators and market players. “Storage should be part of the market, just like generation. If the DSOs need access to storage capacity, there is nothing to keep them from buying it – by procuring storage services in the market.” “If we could start from scratch, I would tear up all the market facing electricity guidelines and re-create an EU legal framework based on principles rather than detailed rules” Styles notes that “many of our members are getting involved in decentralized services. They have affiliates which are active in aggregation, demand response and other small-scale services for customers. There is nothing to stop this from progressing as long as distribution tariffs are correctly structured and DSOs don’t steal their business.” He mentions the example of the Netherlands, where DSOs are taking more and more market activities for themselves, such as offering to manage charging and discharging of electric car batteries. Nevertheless, Styles is optimistic about the future. “We are learning the lessons of what went wrong with the Third Energy Package. I credit DG Energy with trying to get the renewable energy track aligned with the internal market track. Let’s keep going and let’s get it done.” He adds that “some of the best EU thinking on energy markets also takes place in DG COMP”, the EU’s competition directorate-general. “If it were up to me”, he says, “and we could start from scratch, I would tear up all the market facing electricity guidelines and re-create an EU legal framework based on principles rather than detailed rules.”
// Tideland Go Database Clients - CouchDB Client // // Copyright (C) 2016-2020 <NAME> / Tideland / Oldenburg / Germany // // All rights reserved. Use of this source code is governed // by the new BSD license. package couchdb // import "tideland.dev/go/db/couchdb" //-------------------- // IMPORTS //-------------------- import ( "fmt" ) //-------------------- // OPTIONS //-------------------- const ( defaultHost = "127.0.0.1:5984" defaultAddress = "127.0.0.1" defaultPort = 5984 defaultName = "default" defaultLogging = false ) // Options is returned when calling Options() on Database to // provide information about the database configuration. type Options struct { Host string Logging bool Name string } // Option defines a function setting an option. type Option func(db *Database) error // Host sets the network address and port of the CouchDB. func Host(address string, port int) Option { return func(db *Database) error { if address == "" { address = defaultAddress } if port <= 0 { port = defaultPort } db.host = fmt.Sprintf("%s:%d", address, port) return nil } } // Name sets the database name to use. func Name(name string) Option { return func(db *Database) error { if name == "" { name = defaultName } db.name = name return nil } } // Logging activates the logging. func Logging() Option { return func(db *Database) error { db.logging = true return nil } } // EOF
<filename>codegen/src/main/java/com/oes/openfmb/generation/Profiles.java // SPDX-FileCopyrightText: 2021 Open Energy Solutions Inc // // SPDX-License-Identifier: Apache-2.0 package com.oes.openfmb.generation; import com.google.protobuf.Descriptors; import openfmb.breakermodule.*; import openfmb.capbankmodule.*; import openfmb.essmodule.*; import openfmb.generationmodule.*; import openfmb.loadmodule.*; import openfmb.metermodule.*; import openfmb.reclosermodule.*; import openfmb.regulatormodule.*; import openfmb.resourcemodule.*; import openfmb.solarmodule.*; import openfmb.switchmodule.*; import java.util.*; public class Profiles { private Profiles() {} public static List<Descriptors.Descriptor> list = Collections.unmodifiableList(get()); private static List<Descriptors.Descriptor> get() { return Arrays.asList( // Breaker BreakerDiscreteControlProfile.getDescriptor(), BreakerEventProfile.getDescriptor(), BreakerReadingProfile.getDescriptor(), BreakerStatusProfile.getDescriptor(), // CapBank CapBankControlProfile.getDescriptor(), CapBankDiscreteControlProfile.getDescriptor(), CapBankEventProfile.getDescriptor(), CapBankReadingProfile.getDescriptor(), CapBankStatusProfile.getDescriptor(), // ESS ESSControlProfile.getDescriptor(), ESSEventProfile.getDescriptor(), ESSReadingProfile.getDescriptor(), ESSStatusProfile.getDescriptor(), // Generation GenerationControlProfile.getDescriptor(), GenerationDiscreteControlProfile.getDescriptor(), GenerationEventProfile.getDescriptor(), GenerationReadingProfile.getDescriptor(), GenerationStatusProfile.getDescriptor(), // Load LoadControlProfile.getDescriptor(), LoadEventProfile.getDescriptor(), LoadReadingProfile.getDescriptor(), LoadStatusProfile.getDescriptor(), // Meter MeterReadingProfile.getDescriptor(), // Recloser //RecloserControlProfile.getDescriptor(), RecloserDiscreteControlProfile.getDescriptor(), RecloserEventProfile.getDescriptor(), RecloserReadingProfile.getDescriptor(), RecloserStatusProfile.getDescriptor(), // Regulator RegulatorDiscreteControlProfile.getDescriptor(), RegulatorControlProfile.getDescriptor(), RegulatorEventProfile.getDescriptor(), RegulatorReadingProfile.getDescriptor(), RegulatorStatusProfile.getDescriptor(), // Resource ResourceDiscreteControlProfile.getDescriptor(), ResourceEventProfile.getDescriptor(), ResourceReadingProfile.getDescriptor(), ResourceStatusProfile.getDescriptor(), // Solar SolarControlProfile.getDescriptor(), SolarEventProfile.getDescriptor(), SolarReadingProfile.getDescriptor(), SolarStatusProfile.getDescriptor(), // Switch //SwitchControlProfile.getDescriptor(), SwitchDiscreteControlProfile.getDescriptor(), SwitchEventProfile.getDescriptor(), SwitchReadingProfile.getDescriptor(), SwitchStatusProfile.getDescriptor() ); } }
import React, { SVGProps } from 'react'; import generateIcon from '../../generateIcon'; const SolidGopuram = (props: SVGProps<SVGSVGElement>) => { return ( <svg viewBox="0 0 512 512" width="1em" height="1em" {...props}> <path d="M496 352h-16V240c0-8.8-7.2-16-16-16h-16v-80c0-8.8-7.2-16-16-16h-16V16c0-8.8-7.2-16-16-16s-16 7.2-16 16v16h-64V16c0-8.8-7.2-16-16-16s-16 7.2-16 16v16h-64V16c0-8.8-7.2-16-16-16s-16 7.2-16 16v16h-64V16c0-8.8-7.2-16-16-16S96 7.2 96 16v112H80c-8.8 0-16 7.2-16 16v80H48c-8.8 0-16 7.2-16 16v112H16c-8.8 0-16 7.2-16 16v128c0 8.8 7.2 16 16 16h80V352h32V224h32v-96h32v96h-32v128h-32v160h80v-80c0-8.8 7.2-16 16-16h64c8.8 0 16 7.2 16 16v80h80V352h-32V224h-32v-96h32v96h32v128h32v160h80c8.8 0 16-7.2 16-16V368c0-8.8-7.2-16-16-16zM232 176c0-8.8 7.2-16 16-16h16c8.8 0 16 7.2 16 16v48h-48zm56 176h-64v-64c0-8.8 7.2-16 16-16h32c8.8 0 16 7.2 16 16z" /> </svg> ); }; export default generateIcon(SolidGopuram);
/* tslint:disable:no-unused-variable */ import { BaseRequestOptions, Http, ResponseOptions, Response } from '@angular/http' import { TestBed, async, inject, fakeAsync, tick } from '@angular/core/testing'; import { MockBackend, MockConnection } from '@angular/http/testing'; import { ElastalertControlService } from './elastalert-control.service'; import { IJsendResponse } from '../../shared/base.service'; import { ELASTALERT_PATH } from '../../shared/api'; import { Observable } from 'rxjs'; describe('ElastalertControlService', () => { beforeEach(() => { TestBed.configureTestingModule({ providers: [ ElastalertControlService, BaseRequestOptions, MockBackend, { provide: Http, useFactory: function(backend, defaultOptions) { return new Http(backend, defaultOptions); }, deps: [MockBackend, BaseRequestOptions] } ] }); }); it('should inject GlobalConfigService', inject([ElastalertControlService], (service: ElastalertControlService) => { expect(service).toBeTruthy(); })); describe('restart method', () => { it('should post to API', inject([ElastalertControlService, MockBackend], fakeAsync((globalconfigService:ElastalertControlService, mockBackend:MockBackend) => { var result: number; mockBackend.connections.subscribe((c: MockConnection) => { expect(c.request.url).toBe(ELASTALERT_PATH + '/restart'); let mockResponseBody: IJsendResponse = { status: 'success', data: 123, message: '' }; let response = new ResponseOptions({body: JSON.stringify(mockResponseBody)}); c.mockRespond(new Response(response)); }); globalconfigService.restart().subscribe(response => { result = response; }); tick(); expect(result).toBe(123); }))) describe('start method', () => { it('should post to API', inject([ElastalertControlService, MockBackend], fakeAsync((globalconfigService:ElastalertControlService, mockBackend:MockBackend) => { var result: number; mockBackend.connections.subscribe((c: MockConnection) => { expect(c.request.url).toBe(ELASTALERT_PATH + '/start'); let mockResponseBody: IJsendResponse = { status: 'success', data: 123, message: '' }; let response = new ResponseOptions({body: JSON.stringify(mockResponseBody)}); c.mockRespond(new Response(response)); }); globalconfigService.start().subscribe(response => { result = response; }); tick(); expect(result).toBe(123); }))) }) describe('stop method', () => { it('should post to API', inject([ElastalertControlService, MockBackend], fakeAsync((globalconfigService:ElastalertControlService, mockBackend:MockBackend) => { var result: number; mockBackend.connections.subscribe((c: MockConnection) => { expect(c.request.url).toBe(ELASTALERT_PATH + '/stop'); let mockResponseBody: IJsendResponse = { status: 'success', data: 123, message: '' }; let response = new ResponseOptions({body: JSON.stringify(mockResponseBody)}); c.mockRespond(new Response(response)); }); globalconfigService.stop().subscribe(response => { result = response; }); tick(); expect(result).toBe(123); }))) }) }) });
/** * Follow a given path. * * <p> * Unlike the {@link Pathfinder#goToPosition(HeadingPoint)} method, this * does not attempt to optimize a path to as few targets as possible. * Rather, this method generates paths to each of the waypoints and merges * them all together - thus ensuring that these waypoints are still reached * and not entirely ignored. * </p> * * <p> * If the pathfinder fails to find/generate a path, nothing will happen. * No path will be followed. The robot won't move. The robot won't do * anything at all, actually. So if you're wondering why a path isn't * generating, well... now you know. * </p> * * <p> * Path and trajectory generations can be very expensive. The longer the * path you'd like to follow, the more paths, and thus the more * trajectories, that need to be generated. * </p> * * <p> * This method is used most effectively in conjunction with any type of * trajectory-based follower, such as the {@link SwerveFollower}. Although * it does work with any other type of follower, you don't get any of the * benefits of waypoint-based path generation by using a simpler follower * type, such as the {@link PidFollower}. * </p> * * @param points the points to be used as waypoints in path generation. * @return a chainable PromisedFinder object. * @see PathfinderManager#followPath(HeadingPoint...) */ @Async public PromisedFinder followPath(DynamicArray<HeadingPoint> points) { /* * As a wrapper class, Pathfinder provides very little functionality. * * The PathfinderManager class, then, does all of the heavy lifting. * The methods presented in the Pathfinder class are designed to * simplify the implementation of Pathfinder, not add to it. */ return getManager().followPath(points); }
def max_area(height: list) -> int: area = 0 front = 0 back = len(height) - 1 while front != back: area = max(area, min(height[front], height[back]) * (back - front)) if height[front] > height[back]: back -= 1 else: front += 1 return area
def as_html_in_list(self): return ''.join([ '<li class="list-group-item clearfix" style="font-size: 130%;">', ('<img src="%s" style="float:left; margin-right: 20px;">' % self.image.version_generate('thumbnail').url) if self.image else '', '%s<br>' % self.name, '<span style="font-family: courier, monospace; font-size: 70%">', '%s, Objekt-ID: %d</span></li>' % ( os.path.basename(self.doc.filename), self.id), ])
/* * Copyright 2010 Outerthought bvba * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.lilyproject.repository.impl.test; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.Arrays; import java.util.HashSet; import java.util.Random; import java.util.Set; import org.apache.commons.io.IOUtils; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import org.lilyproject.repository.api.Blob; import org.lilyproject.repository.api.BlobException; import org.lilyproject.repository.api.BlobManager; import org.lilyproject.repository.api.BlobNotFoundException; import org.lilyproject.repository.api.BlobReference; import org.lilyproject.repository.api.FieldNotFoundException; import org.lilyproject.repository.api.FieldType; import org.lilyproject.repository.api.FieldTypeEntry; import org.lilyproject.repository.api.FieldTypeNotFoundException; import org.lilyproject.repository.api.HierarchyPath; import org.lilyproject.repository.api.InvalidRecordException; import org.lilyproject.repository.api.QName; import org.lilyproject.repository.api.Record; import org.lilyproject.repository.api.RecordId; import org.lilyproject.repository.api.RecordNotFoundException; import org.lilyproject.repository.api.RecordType; import org.lilyproject.repository.api.Repository; import org.lilyproject.repository.api.RepositoryException; import org.lilyproject.repository.api.Scope; import org.lilyproject.repository.api.TypeManager; import org.lilyproject.repository.api.ValueType; import org.lilyproject.repository.impl.BlobIncubatorMonitor; import org.lilyproject.repository.impl.BlobStoreAccessRegistry; import org.lilyproject.repository.impl.id.IdGeneratorImpl; import org.lilyproject.repotestfw.RepositorySetup; import org.lilyproject.util.hbase.LilyHBaseSchema; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public abstract class AbstractBlobStoreTest { private static String namespace = "test"; protected static final RepositorySetup repoSetup = new RepositorySetup(); static { repoSetup.setBlobLimits(50, 1024); } protected static Repository repository; protected static TypeManager typeManager; protected static Random random = new Random(); protected static BlobStoreAccessRegistry testBlobStoreAccessRegistry; protected static BlobManager blobManager; @Test public void testCreate() throws Exception { QName fieldName = new QName(namespace, "testCreate"); FieldType fieldType = typeManager.newFieldType(typeManager.getValueType("BLOB"), fieldName, Scope.NON_VERSIONED); fieldType = typeManager.createFieldType(fieldType); RecordType recordType = typeManager.newRecordType(new QName(namespace, "testCreateRT")); FieldTypeEntry fieldTypeEntry = typeManager.newFieldTypeEntry(fieldType.getId(), true); recordType.addFieldTypeEntry(fieldTypeEntry); recordType = typeManager.createRecordType(recordType); byte[] bytes = Bytes.toBytes("someBytes"); Blob blob = writeBlob(bytes, "aMediaType", "testCreate"); Record record = repository.newRecord(); record.setRecordType(recordType.getName()); record.setField(fieldName, blob); record = repository.create(record); byte[] readBytes = readBlob(record.getId(), fieldName); assertTrue(Arrays.equals(bytes, readBytes)); // Test the getInputStream with giving the record instead of the recordId InputStream inputStream = repository.getInputStream(record, fieldName); try { byte[] readBytes2 = IOUtils.toByteArray(inputStream); assertTrue(Arrays.equals(bytes, readBytes2)); } finally { IOUtils.closeQuietly(inputStream); } } @Test public void testThreeSizes() throws Exception { QName fieldName1 = new QName(namespace, "testThreeSizes1"); QName fieldName2 = new QName(namespace, "testThreeSizes2"); QName fieldName3 = new QName(namespace, "testThreeSizes3"); FieldType fieldType1 = typeManager.newFieldType(typeManager.getValueType("BLOB"), fieldName1, Scope.NON_VERSIONED); fieldType1 = typeManager.createFieldType(fieldType1); FieldType fieldType2 = typeManager.newFieldType(typeManager.getValueType("BLOB"), fieldName2, Scope.NON_VERSIONED); fieldType2 = typeManager.createFieldType(fieldType2); FieldType fieldType3 = typeManager.newFieldType(typeManager.getValueType("BLOB"), fieldName3, Scope.NON_VERSIONED); fieldType3 = typeManager.createFieldType(fieldType3); RecordType recordType = typeManager.newRecordType(new QName(namespace, "testThreeSizes")); recordType.addFieldTypeEntry(typeManager.newFieldTypeEntry(fieldType1.getId(), true)); recordType.addFieldTypeEntry(typeManager.newFieldTypeEntry(fieldType2.getId(), true)); recordType.addFieldTypeEntry(typeManager.newFieldTypeEntry(fieldType3.getId(), true)); recordType = typeManager.createRecordType(recordType); byte[] small = new byte[10]; random.nextBytes(small); byte[] medium = new byte[100]; random.nextBytes(medium); byte[] large = new byte[2048]; random.nextBytes(large); Blob smallBlob = writeBlob(small, "mime/small", "small"); Blob mediumBlob = writeBlob(medium, "mime/medium", "medium"); Blob largeBlob = writeBlob(large, "mime/large", "large"); Record record = repository.newRecord(); record.setRecordType(recordType.getName()); record.setField(fieldName1, smallBlob); record.setField(fieldName2, mediumBlob); record.setField(fieldName3, largeBlob); record = repository.create(record); byte[] readBytes = readBlob(record.getId(), fieldName1); assertTrue(Arrays.equals(small, readBytes)); readBytes = readBlob(record.getId(), fieldName2); assertTrue(Arrays.equals(medium, readBytes)); readBytes = readBlob(record.getId(), fieldName3); assertTrue(Arrays.equals(large, readBytes)); } /** * Test case to reproduce the 'Row key is invalid' problem reported here: * https://groups.google.com/forum/#!topic/lily-discuss/XiRxOxJTv70/discussion * * @throws Exception */ @Test public void testForceInline() throws Exception { QName fieldName = new QName(namespace, "testForceInline"); FieldType fieldType = typeManager.newFieldType(typeManager.getValueType("BLOB"), fieldName, Scope.NON_VERSIONED); fieldType = typeManager.createFieldType(fieldType); RecordType recordType = typeManager.newRecordType(new QName(namespace, "testForceInlineRT")); FieldTypeEntry fieldTypeEntry = typeManager.newFieldTypeEntry(fieldType.getId(), true); recordType.addFieldTypeEntry(fieldTypeEntry); recordType = typeManager.createRecordType(recordType); int size = 4096; Random rg = new Random(); byte[] bytes = new byte[size]; rg.nextBytes(bytes); // create BLOB object Blob blob = new Blob("application/pdf", 0L, "Document"); // create a stream to write the BLOB OutputStream bos = repository.getOutputStream(blob); // write the data bos.write(bytes); bos.close(); blob.setSize(5L); // create a new record ID RecordId rid = repository.getIdGenerator().newRecordId(); // create a new record Record record = repository.newRecord(rid); record.setRecordType(new QName(namespace, "testForceInlineRT")); // set the blob record.setField(fieldName, blob); // create the record record = repository.create(record); byte[] readBytes = readBlob(record.getId(), fieldName); assertTrue(Arrays.equals(bytes, readBytes)); // Test the getInputStream with giving the record instead of the recordId InputStream inputStream = repository.getInputStream(record, fieldName); try { byte[] readBytes2 = IOUtils.toByteArray(inputStream); assertTrue(Arrays.equals(bytes, readBytes2)); } finally { IOUtils.closeQuietly(inputStream); } } @Test public void testCreateTwoRecordsWithSameBlob() throws Exception { QName fieldName = new QName(namespace, "ablob2"); FieldType fieldType = typeManager.newFieldType(typeManager.getValueType("BLOB"), fieldName, Scope.VERSIONED); fieldType = typeManager.createFieldType(fieldType); RecordType recordType = typeManager.newRecordType(new QName(namespace, "testCreateTwoRecordsWithSameBlobRT")); FieldTypeEntry fieldTypeEntry = typeManager.newFieldTypeEntry(fieldType.getId(), true); recordType.addFieldTypeEntry(fieldTypeEntry); recordType = typeManager.createRecordType(recordType); byte[] bytes = Bytes.toBytes("someBytes"); Blob blob = writeBlob(bytes, "aMediaType", "testCreate"); Record record = repository.newRecord(); record.setRecordType(recordType.getName(), null); record.setField(fieldName, blob); record = repository.create(record); Record record2 = repository.newRecord(); record2.setRecordType(recordType.getName(), null); record2.setField(fieldName, blob); record2 = repository.create(record2); // For an inline record this succeeds byte[] bytesLarge = new byte[3000]; random.nextBytes(bytesLarge); Blob largeBlob = writeBlob(bytesLarge, "largeBlob", "testCreate"); Record record3 = repository.newRecord(); record3.setRecordType(recordType.getName(), null); record3.setField(fieldName, largeBlob); record3 = repository.create(record3); Record record4 = repository.newRecord(); record4.setRecordType(recordType.getName(), null); record4.setField(fieldName, largeBlob); try { record4 = repository.create(record4); fail("Using the same blob in two records should not succeed"); } catch (InvalidRecordException expected) { } } @Test public void testUpdateNonVersionedBlobHDFS() throws Exception { testUpdateNonVersionedBlob(3000, true); } @Test public void testUpdateNonVersionedBlobHBase() throws Exception { testUpdateNonVersionedBlob(150, true); } @Test public void testUpdateNonVersionedBlobInline() throws Exception { testUpdateNonVersionedBlob(50, false); } private void testUpdateNonVersionedBlob(int size, boolean expectDelete) throws Exception { QName fieldName = new QName(namespace, "testUpdateNonVersionedBlob" + size); FieldType fieldType = typeManager.newFieldType(typeManager.getValueType("BLOB"), fieldName, Scope.NON_VERSIONED); fieldType = typeManager.createFieldType(fieldType); RecordType recordType = typeManager.newRecordType(new QName(namespace, "testUpdateNonVersionedBlobRT" + size)); FieldTypeEntry fieldTypeEntry = typeManager.newFieldTypeEntry(fieldType.getId(), true); recordType.addFieldTypeEntry(fieldTypeEntry); recordType = typeManager.createRecordType(recordType); byte[] bytes = new byte[size]; random.nextBytes(bytes); Blob blob = writeBlob(bytes, "aMediaType", "testUpdateNonVersionedBlob"); byte[] bytes2 = new byte[size]; random.nextBytes(bytes2); Blob blob2 = writeBlob(bytes2, "aMediaType", "testUpdateNonVersionedBlob2"); Record record = repository.newRecord(); record.setRecordType(recordType.getName(), null); record.setField(fieldName, blob); record = repository.create(record); Record record2 = repository.newRecord(record.getId()); record2.setRecordType(recordType.getName(), null); record2.setField(fieldName, blob2); record = repository.update(record2); // Reading should return blob2 byte[] readBytes = readBlob(record.getId(), record.getVersion(), fieldName); assertTrue(Arrays.equals(bytes2, readBytes)); assertBlobDelete(expectDelete, blob); } @Test public void testDeleteNonVersionedBlobHDFS() throws Exception { testDeleteNonVersionedBlob(3000, true); } @Test public void testDeleteNonVersionedBlobHBase() throws Exception { testDeleteNonVersionedBlob(150, true); } @Test public void testDeleteNonVersionedBlobInline() throws Exception { testDeleteNonVersionedBlob(50, false); } private void testDeleteNonVersionedBlob(int size, boolean expectDelete) throws Exception { QName fieldName = new QName(namespace, "testDeleteNonVersionedBlob" + size); FieldType fieldType = typeManager.newFieldType(typeManager.getValueType("BLOB"), fieldName, Scope.NON_VERSIONED); fieldType = typeManager.createFieldType(fieldType); RecordType recordType = typeManager.newRecordType(new QName(namespace, "testDeleteNonVersionedBlobRT" + size)); FieldTypeEntry fieldTypeEntry = typeManager.newFieldTypeEntry(fieldType.getId(), false); recordType.addFieldTypeEntry(fieldTypeEntry); recordType = typeManager.createRecordType(recordType); byte[] bytes = new byte[size]; random.nextBytes(bytes); Blob blob = writeBlob(bytes, "aMediaType", "testDeleteNonVersionedBlob"); Record record = repository.newRecord(); record.setRecordType(recordType.getName(), null); record.setField(fieldName, blob); record = repository.create(record); Record record2 = repository.newRecord(record.getId()); record2.setRecordType(recordType.getName(), null); record2.addFieldsToDelete(Arrays.asList(fieldName)); record = repository.update(record2); assertBlobDelete(expectDelete, blob); } @Test public void testUpdateMutableBlobHDFS() throws Exception { testUpdateMutableBlob(3000, true); } @Test public void testUpdateMutableBlobHBase() throws Exception { testUpdateMutableBlob(150, true); } @Test public void testUpdateMutableBlobInline() throws Exception { testUpdateMutableBlob(50, false); } private void testUpdateMutableBlob(int size, boolean expectDelete) throws Exception { QName fieldName = new QName(namespace, "testUpdateMutableBlob" + size); FieldType fieldType = typeManager.newFieldType(typeManager.getValueType("BLOB"), fieldName, Scope.VERSIONED_MUTABLE); fieldType = typeManager.createFieldType(fieldType); RecordType recordType = typeManager.newRecordType(new QName(namespace, "testUpdateMutableBlobRT" + size)); FieldTypeEntry fieldTypeEntry = typeManager.newFieldTypeEntry(fieldType.getId(), true); recordType.addFieldTypeEntry(fieldTypeEntry); recordType = typeManager.createRecordType(recordType); byte[] bytes = new byte[size]; random.nextBytes(bytes); Blob blob = writeBlob(bytes, "aMediaType", "testUpdateMutableBlob"); byte[] bytes2 = new byte[size]; random.nextBytes(bytes2); Blob blob2 = writeBlob(bytes2, "aMediaType", "testUpdateMutableBlob2"); Record record = repository.newRecord(); record.setRecordType(recordType.getName(), null); record.setField(fieldName, blob); record = repository.create(record); Record record2 = repository.newRecord(record.getId()); record2.setRecordType(recordType.getName(), null); record2.setField(fieldName, blob2); record2.setVersion(record.getVersion()); record = repository.update(record2, true, false); // Blob2 should still exist byte[] readBytes = readBlob(record.getId(), record.getVersion(), fieldName); assertTrue(Arrays.equals(bytes2, readBytes)); assertBlobDelete(expectDelete, blob); } @Test public void testDeleteMutableBlobHDFS() throws Exception { testDeleteMutableBlob(3000, true); } @Test public void testDeleteMutableBlobHBase() throws Exception { testDeleteMutableBlob(150, true); } @Test public void testDeleteMutableBlobInline() throws Exception { testDeleteMutableBlob(50, false); } private void testDeleteMutableBlob(int size, boolean expectDelete) throws Exception { QName fieldName = new QName(namespace, "testDeleteMutableBlob" + size); FieldType fieldType = typeManager.newFieldType(typeManager.getValueType("BLOB"), fieldName, Scope.VERSIONED_MUTABLE); fieldType = typeManager.createFieldType(fieldType); RecordType recordType = typeManager.newRecordType(new QName(namespace, "testDeleteMutableBlobRT" + size)); FieldTypeEntry fieldTypeEntry = typeManager.newFieldTypeEntry(fieldType.getId(), false); recordType.addFieldTypeEntry(fieldTypeEntry); recordType = typeManager.createRecordType(recordType); byte[] bytes = new byte[size]; random.nextBytes(bytes); Blob blob = writeBlob(bytes, "aMediaType", "testDeleteMutableBlob"); byte[] bytes2 = new byte[size]; random.nextBytes(bytes2); Blob blob2 = writeBlob(bytes2, "aMediaType", "testDeleteMutableBlob2"); Record record = repository.newRecord(); record.setRecordType(recordType.getName(), null); record.setField(fieldName, blob); record = repository.create(record); Record record2 = repository.newRecord(record.getId()); record2.setRecordType(recordType.getName(), null); record2.setField(fieldName, blob2); repository.update(record2, false, false); // Blob1 should still exist byte[] readBytes = readBlob(record.getId(), record.getVersion(), fieldName); assertTrue(Arrays.equals(bytes, readBytes)); // Blob2 should still exist readBytes = readBlob(record2.getId(), record2.getVersion(), fieldName); assertTrue(Arrays.equals(bytes2, readBytes)); Record record3 = repository.newRecord(record.getId()); record3.setRecordType(recordType.getName(), null); record3.addFieldsToDelete(Arrays.asList(fieldName)); record3.setVersion(record.getVersion()); repository.update(record3, true, false); // Blob2 should still exist readBytes = readBlob(record2.getId(), record2.getVersion(), fieldName); assertTrue(Arrays.equals(bytes2, readBytes)); assertBlobDelete(expectDelete, blob); } @Test public void testUpdateMutableMultivalueBlobHDFS() throws Exception { testUpdateMutableMultivalueBlob(3000, true); } @Test public void testUpdateMutableMultivalueBlobHBase() throws Exception { testUpdateMutableMultivalueBlob(150, true); } @Test public void testUpdateMutableMultivalueBlobInline() throws Exception { testUpdateMutableMultivalueBlob(50, false); } private void testUpdateMutableMultivalueBlob(int size, boolean expectDelete) throws Exception { QName fieldName = new QName(namespace, "testUpdateMutableMultivalueBlob" + size); FieldType fieldType = typeManager.newFieldType(typeManager.getValueType("LIST<BLOB>"), fieldName, Scope.VERSIONED_MUTABLE); fieldType = typeManager.createFieldType(fieldType); RecordType recordType = typeManager.newRecordType(new QName(namespace, "testUpdateMutableMultivalueBlobRT" + size)); FieldTypeEntry fieldTypeEntry = typeManager.newFieldTypeEntry(fieldType.getId(), true); recordType.addFieldTypeEntry(fieldTypeEntry); recordType = typeManager.createRecordType(recordType); byte[] bytes = new byte[size]; random.nextBytes(bytes); Blob blob = writeBlob(bytes, "aMediaType", "testUpdateMutableMultivalueBlob"); byte[] bytes2 = new byte[size]; random.nextBytes(bytes2); Blob blob2 = writeBlob(bytes2, "aMediaType", "testUpdateMutableMultivalueBlob2"); byte[] bytes3 = new byte[size]; random.nextBytes(bytes3); Blob blob3 = writeBlob(bytes3, "aMediaType", "testUpdateMutableMultivalueBlob3"); byte[] bytes4 = new byte[size]; random.nextBytes(bytes4); Blob blob4 = writeBlob(bytes4, "aMediaType", "testUpdateMutableMultivalueBlob4"); Record record = repository.newRecord(); record.setRecordType(recordType.getName(), null); record.setField(fieldName, Arrays.asList(blob, blob2)); record = repository.create(record); Record record2 = repository.newRecord(record.getId()); record2.setRecordType(recordType.getName(), null); record2.setField(fieldName, Arrays.asList(blob2, blob3)); record2 = repository.update(record2, false, false); // Mutable update of first version Record record3 = repository.newRecord(record.getId()); record3.setVersion(record.getVersion()); record3.setRecordType(recordType.getName(), null); record3.setField(fieldName, Arrays.asList(blob4)); record3 = repository.update(record3, true, false); //Blob2 byte[] readBytes = readBlob(record2.getId(), record2.getVersion(), fieldName, 0); assertTrue(Arrays.equals(bytes2, readBytes)); //Blob3 readBytes = readBlob(record2.getId(), record2.getVersion(), fieldName, 1); assertTrue(Arrays.equals(bytes3, readBytes)); //Blob4 in version 1 readBytes = readBlob(record.getId(), record.getVersion(), fieldName, 0); assertTrue(Arrays.equals(bytes4, readBytes)); assertBlobDelete(expectDelete, blob); try { readBlob(record.getId(), record.getVersion(), fieldName); fail("BlobNotFoundException expected since index should not be null"); } catch (BlobNotFoundException expected) { } try { readBlob(record.getId(), record.getVersion(), fieldName, 1); fail("BlobNotFoundException expected since index is out of bounds"); } catch (BlobNotFoundException expected) { } } @Test public void testUpdateMutableHierarchyBlobHDFS() throws Exception { testUpdateMutableHierarchyBlob(3000, true); } @Test public void testUpdateMutableHierarchyBlobHBase() throws Exception { testUpdateMutableHierarchyBlob(150, true); } @Test public void testUpdateMutableHierarchyBlobInline() throws Exception { testUpdateMutableHierarchyBlob(50, false); } private void testUpdateMutableHierarchyBlob(int size, boolean expectDelete) throws Exception { QName fieldName = new QName(namespace, "testUpdateMutableHierarchyBlob" + size); FieldType fieldType = typeManager.newFieldType(typeManager.getValueType("PATH<BLOB>"), fieldName, Scope.VERSIONED_MUTABLE); fieldType = typeManager.createFieldType(fieldType); RecordType recordType = typeManager.newRecordType(new QName(namespace, "testUpdateMutableHierarchyBlobRT" + size)); FieldTypeEntry fieldTypeEntry = typeManager.newFieldTypeEntry(fieldType.getId(), true); recordType.addFieldTypeEntry(fieldTypeEntry); recordType = typeManager.createRecordType(recordType); byte[] bytes = new byte[size]; random.nextBytes(bytes); Blob blob = writeBlob(bytes, "aMediaType", "testUpdateMutableHierarchyBlob"); byte[] bytes2 = new byte[size]; random.nextBytes(bytes2); Blob blob2 = writeBlob(bytes2, "aMediaType", "testUpdateMutableHierarchyBlob2"); byte[] bytes3 = new byte[size]; random.nextBytes(bytes3); Blob blob3 = writeBlob(bytes3, "aMediaType", "testUpdateMutableHierarchyBlob3"); byte[] bytes4 = new byte[size]; random.nextBytes(bytes4); Blob blob4 = writeBlob(bytes4, "aMediaType", "testUpdateMutableHierarchyBlob4"); Record record = repository.newRecord(); record.setRecordType(recordType.getName(), null); record.setField(fieldName, new HierarchyPath(blob, blob2)); record = repository.create(record); Record record2 = repository.newRecord(record.getId()); record2.setRecordType(recordType.getName(), null); record2.setField(fieldName, new HierarchyPath(blob2, blob3, blob4)); record2 = repository.update(record2, false, false); // Mutable update of first version Record record3 = repository.newRecord(record.getId()); record3.setVersion(record.getVersion()); record3.setRecordType(recordType.getName(), null); record3.setField(fieldName, new HierarchyPath(blob4, blob4)); record3 = repository.update(record3, true, false); // Blob2 byte[] readBytes = readBlob(record2.getId(), record2.getVersion(), fieldName, 0); assertTrue(Arrays.equals(bytes2, readBytes)); // Blob3 readBytes = readBlob(record2.getId(), record2.getVersion(), fieldName, 1); assertTrue(Arrays.equals(bytes3, readBytes)); // Blob4 in version1 readBytes = readBlob(record.getId(), record.getVersion(), fieldName, 1); assertTrue(Arrays.equals(bytes4, readBytes)); assertBlobDelete(expectDelete, blob); try { readBlob(record.getId(), record.getVersion(), fieldName); fail("BlobNotFoundException expected since index should not be null"); } catch (BlobNotFoundException expected) { } try { readBlob(record.getId(), record.getVersion(), fieldName, 2); fail("BlobNotFoundException expected since index is out of bounds"); } catch (BlobNotFoundException expected) { } } @Test public void testUpdateMutableMultivalueHierarchyBlobHDFS() throws Exception { testUpdateMutableMultivalueHierarchyBlob(3000, true); } @Test public void testUpdateMutableMultivalueHierarchyBlobHBase() throws Exception { testUpdateMutableMultivalueHierarchyBlob(150, true); } @Test public void testUpdateMutableMultivalueHierarchyBlobInline() throws Exception { testUpdateMutableMultivalueHierarchyBlob(50, false); } private void testUpdateMutableMultivalueHierarchyBlob(int size, boolean expectDelete) throws Exception { QName fieldName = new QName(namespace, "testUpdateMutableMultivalueHierarchyBlob" + size); FieldType fieldType = typeManager.newFieldType(typeManager.getValueType("LIST<PATH<BLOB>>"), fieldName, Scope.VERSIONED_MUTABLE); fieldType = typeManager.createFieldType(fieldType); RecordType recordType = typeManager.newRecordType(new QName(namespace, "testUpdateMutableMultivalueHierarchyBlobRT" + size)); FieldTypeEntry fieldTypeEntry = typeManager.newFieldTypeEntry(fieldType.getId(), true); recordType.addFieldTypeEntry(fieldTypeEntry); recordType = typeManager.createRecordType(recordType); byte[] bytes = new byte[size]; random.nextBytes(bytes); Blob blob = writeBlob(bytes, "aMediaType", "testUpdateMutableMultivalueHierarchyBlob"); byte[] bytes2 = new byte[size]; random.nextBytes(bytes); Blob blob2 = writeBlob(bytes2, "aMediaType", "testUpdateMutableMultivalueHierarchyBlob2"); byte[] bytes3 = new byte[size]; random.nextBytes(bytes3); Blob blob3 = writeBlob(bytes3, "aMediaType", "testUpdateMutableMultivalueHierarchyBlob3"); byte[] bytes4 = new byte[size]; random.nextBytes(bytes4); Blob blob4 = writeBlob(bytes4, "aMediaType", "testUpdateMutableMultivalueHierarchyBlob4"); Record record = repository.newRecord(); record.setRecordType(recordType.getName(), null); record.setField(fieldName, Arrays.asList(new HierarchyPath(blob, blob2), new HierarchyPath(blob3))); record = repository.create(record); Record record2 = repository.newRecord(record.getId()); record2.setRecordType(recordType.getName(), null); record2.setField(fieldName, Arrays.asList(new HierarchyPath(blob2), new HierarchyPath(blob3, blob4))); record2 = repository.update(record2, false, false); // Mutable update of first version Record record3 = repository.newRecord(record.getId()); record3.setVersion(record.getVersion()); record3.setRecordType(recordType.getName(), null); record3.setField(fieldName, Arrays.asList(new HierarchyPath(blob3, blob4), new HierarchyPath(blob4))); record3 = repository.update(record3, true, false); // Blob2 byte[] readBytes = readBlob(record2.getId(), record2.getVersion(), fieldName, 0, 0); assertTrue(Arrays.equals(bytes2, readBytes)); // Blob3 readBytes = readBlob(record2.getId(), record2.getVersion(), fieldName, 1, 0); assertTrue(Arrays.equals(bytes3, readBytes)); // Blob4 in version1 readBytes = readBlob(record.getId(), record.getVersion(), fieldName, 0, 1); assertTrue(Arrays.equals(bytes4, readBytes)); assertBlobDelete(expectDelete, blob); try { readBlob(record.getId(), record.getVersion(), fieldName); fail("BlobNotFoundException expected since index should not be null"); } catch (BlobNotFoundException expected) { } try { readBlob(record.getId(), record.getVersion(), fieldName, 0); fail("BlobNotFoundException expected since index should not be null"); } catch (BlobNotFoundException expected) { } try { readBlob(record.getId(), record.getVersion(), fieldName, 2, 0); fail("BlobNotFoundException expected since index is out of bounds"); } catch (BlobNotFoundException expected) { } try { readBlob(record.getId(), record.getVersion(), fieldName, 1, 1); fail("BlobNotFoundException expected since index is out of bounds"); } catch (BlobNotFoundException expected) { } } @Test public void testDelete() throws Exception { QName fieldName = new QName(namespace, "testDelete"); FieldType fieldType = typeManager.newFieldType(typeManager.getValueType("BLOB"), fieldName, Scope.NON_VERSIONED); fieldType = typeManager.createFieldType(fieldType); RecordType recordType = typeManager.newRecordType(new QName(namespace, "testDeleteRT")); FieldTypeEntry fieldTypeEntry = typeManager.newFieldTypeEntry(fieldType.getId(), true); recordType.addFieldTypeEntry(fieldTypeEntry); recordType = typeManager.createRecordType(recordType); byte[] bytes = new byte[3000]; random.nextBytes(bytes); Blob blob = writeBlob(bytes, "aMediaType", "testCreate"); Record record = repository.newRecord(); record.setRecordType(recordType.getName()); record.setField(fieldName, blob); record = repository.create(record); repository.delete(record.getId()); assertBlobDelete(true, blob); } @Test public void testDeleteMultivalueHierarchyBlobSmall() throws Exception { testDeleteMultivalueHierarchyBlob(50, false); // An inputstream for the inline blob is created on the blobKey directly } @Test public void testDeleteMultivalueHierarchyBlobMedium() throws Exception { testDeleteMultivalueHierarchyBlob(150, true); } @Test public void testDeleteMultivalueHierarchyBlobLarge() throws Exception { testDeleteMultivalueHierarchyBlob(3000, true); } private void testDeleteMultivalueHierarchyBlob(int size, boolean expectDelete) throws Exception { QName fieldName = new QName(namespace, "testDeleteMultivalueHierarchyBlob" + size); FieldType fieldType = typeManager.newFieldType(typeManager.getValueType("LIST<PATH<BLOB>>"), fieldName, Scope.VERSIONED_MUTABLE); fieldType = typeManager.createFieldType(fieldType); RecordType recordType = typeManager.newRecordType(new QName(namespace, "testDeleteMultivalueHierarchyBlobRT" + size)); FieldTypeEntry fieldTypeEntry = typeManager.newFieldTypeEntry(fieldType.getId(), true); recordType.addFieldTypeEntry(fieldTypeEntry); recordType = typeManager.createRecordType(recordType); byte[] bytes = new byte[size]; random.nextBytes(bytes); Blob blob = writeBlob(bytes, "aMediaType", "testUpdateMutableMultivalueHierarchyBlob"); byte[] bytes2 = new byte[size]; random.nextBytes(bytes); Blob blob2 = writeBlob(bytes2, "aMediaType", "testUpdateMutableMultivalueHierarchyBlob2"); byte[] bytes3 = new byte[size]; random.nextBytes(bytes3); Blob blob3 = writeBlob(bytes3, "aMediaType", "testUpdateMutableMultivalueHierarchyBlob3"); byte[] bytes4 = new byte[size]; random.nextBytes(bytes4); Blob blob4 = writeBlob(bytes4, "aMediaType", "testUpdateMutableMultivalueHierarchyBlob4"); Record record = repository.newRecord(); record.setRecordType(recordType.getName(), null); record.setField(fieldName, Arrays.asList(new HierarchyPath(blob, blob2), new HierarchyPath(blob3))); record = repository.create(record); Record record2 = repository.newRecord(record.getId()); record2.setRecordType(recordType.getName(), null); record2.setField(fieldName, Arrays.asList(new HierarchyPath(blob2), new HierarchyPath(blob3, blob4))); record2 = repository.update(record2, false, false); repository.delete(record.getId()); assertBlobDelete(expectDelete, blob); assertBlobDelete(expectDelete, blob2); assertBlobDelete(expectDelete, blob3); assertBlobDelete(expectDelete, blob4); } @Test public void testBlobIncubatorMonitorUnusedBlob() throws Exception { QName fieldName = new QName(namespace, "testBlobIncubatorMonitorUnusedBlob"); FieldType fieldType = typeManager.newFieldType(typeManager.getValueType("BLOB"), fieldName, Scope.NON_VERSIONED); fieldType = typeManager.createFieldType(fieldType); RecordType recordType = typeManager.newRecordType(new QName(namespace, "testBlobIncubatorMonitorUnusedBlobRT")); FieldTypeEntry fieldTypeEntry = typeManager.newFieldTypeEntry(fieldType.getId(), true); recordType.addFieldTypeEntry(fieldTypeEntry); recordType = typeManager.createRecordType(recordType); // Incubate blob but never use it byte[] bytes = new byte[3000]; random.nextBytes(bytes); Blob blob = writeBlob(bytes, "aMediaType", "testCreate"); // Give time for the blob to expire Thread.sleep(60); BlobIncubatorMonitor monitor = new BlobIncubatorMonitor(repoSetup.getZk(), repoSetup.getHbaseTableFactory(), repoSetup.getTableManager(), blobManager, typeManager, 50, 0, 0); monitor.runMonitorOnce(); assertBlobDelete(true, blob); } @Test public void testBlobIncubatorMonitorFailureAfterReservation() throws Exception { QName fieldName = new QName(namespace, "testBlobIncubatorMonitorFailureAfterReservation"); FieldType fieldType = typeManager.newFieldType(typeManager.getValueType("BLOB"), fieldName, Scope.NON_VERSIONED); fieldType = typeManager.createFieldType(fieldType); RecordType recordType = typeManager.newRecordType(new QName(namespace, "testBlobIncubatorMonitorFailureAfterReservationRT")); FieldTypeEntry fieldTypeEntry = typeManager.newFieldTypeEntry(fieldType.getId(), true); recordType.addFieldTypeEntry(fieldTypeEntry); recordType = typeManager.createRecordType(recordType); // This is the failure scenario where creating the record fails after reserving the blob byte[] bytes = new byte[3000]; random.nextBytes(bytes); Blob blob = writeBlob(bytes, "aMediaType", "testCreate"); IdGeneratorImpl idGeneratorImpl = new IdGeneratorImpl(); RecordId recordId = idGeneratorImpl.newRecordId(); BlobReference blobReference = new BlobReference(blob, recordId, fieldType); Set<BlobReference> blobs = new HashSet<BlobReference>(); blobs.add(blobReference); blobManager.reserveBlobs(blobs); // Give time for the blob to expire Thread.sleep(60); BlobIncubatorMonitor monitor = new BlobIncubatorMonitor(repoSetup.getZk(), repoSetup.getHbaseTableFactory(), repoSetup.getTableManager(), blobManager, typeManager, 50, 0, 0); monitor.runMonitorOnce(); assertBlobDelete(true, blob); } @Test public void testBlobIncubatorMonitorFailureBeforeRemovingReservation() throws Exception { QName fieldName = new QName(namespace, "testBlobIncubatorMonitorFailureBeforeRemovingReservation"); FieldType fieldType = typeManager.newFieldType(typeManager.getValueType("BLOB"), fieldName, Scope.NON_VERSIONED); fieldType = typeManager.createFieldType(fieldType); RecordType recordType = typeManager.newRecordType(new QName(namespace, "testBlobIncubatorMonitorFailureBeforeRemovingReservation")); FieldTypeEntry fieldTypeEntry = typeManager.newFieldTypeEntry(fieldType.getId(), true); recordType.addFieldTypeEntry(fieldTypeEntry); recordType = typeManager.createRecordType(recordType); // This is the failure scenario where creating the record fails after reserving the blob byte[] bytes = new byte[3000]; random.nextBytes(bytes); Blob blob = writeBlob(bytes, "aMediaType", "testCreate"); IdGeneratorImpl idGeneratorImpl = new IdGeneratorImpl(); RecordId recordId = idGeneratorImpl.newRecordId(); BlobReference blobReference = new BlobReference(blob, recordId, fieldType); Set<BlobReference> blobs = new HashSet<BlobReference>(); blobs.add(blobReference); repository.newRecord(); Record record = repository.newRecord(); record.setRecordType(recordType.getName()); record.setField(fieldName, blob); record = repository.create(record); // Faking failure HTableInterface blobIncubatorTable = LilyHBaseSchema.getBlobIncubatorTable(repoSetup.getHbaseTableFactory(), true); Put put = new Put(blob.getValue()); put.add(LilyHBaseSchema.BlobIncubatorCf.REF.bytes, LilyHBaseSchema.BlobIncubatorColumn.RECORD.bytes, record.getId().toBytes()); put.add(LilyHBaseSchema.BlobIncubatorCf.REF.bytes, LilyHBaseSchema.BlobIncubatorColumn.FIELD.bytes, fieldType.getId().getBytes()); blobIncubatorTable.put(put); // Give time for the blob to expire Thread.sleep(60); BlobIncubatorMonitor monitor = new BlobIncubatorMonitor(repoSetup.getZk(), repoSetup.getHbaseTableFactory(), repoSetup.getTableManager(), blobManager, typeManager, 50, 0, 0); monitor.runMonitorOnce(); assertBlobDelete(false, blob); Get get = new Get(blob.getValue()); Result result = blobIncubatorTable.get(get); assertTrue(result == null || result.isEmpty()); } private void assertBlobDelete(boolean expectDelete, Blob blob) throws BlobNotFoundException, BlobException { if (expectDelete) { try { testBlobStoreAccessRegistry.getBlobAccess(blob).getInputStream(); fail("The blob " + blob + " should have been deleted."); } catch (BlobException expected) { } } else { testBlobStoreAccessRegistry.getBlobAccess(blob).getInputStream(); } } @Test public void testBadEncoding() throws Exception { Blob blob = new Blob("aMediaType", (long) 10, "aName"); blob.setValue(new byte[0]); try { testBlobStoreAccessRegistry.getBlobAccess(blob).getInputStream(); fail(); } catch (BlobException expected) { } } @Test public void testInvalidReadRequests() throws Exception { ValueType stringType = typeManager.getValueType("STRING"); ValueType blobType = typeManager.getValueType("BLOB"); FieldType nonBlobField = typeManager.newFieldType(stringType, new QName(namespace, "NonBlobField"), Scope.VERSIONED); nonBlobField = typeManager.createFieldType(nonBlobField); FieldType absentField = typeManager .newFieldType(blobType, new QName(namespace, "AbsentField"), Scope.VERSIONED); absentField = typeManager.createFieldType(absentField); RecordType rt = typeManager.newRecordType(new QName(namespace, "NoBlobsRT")); rt.addFieldTypeEntry(nonBlobField.getId(), false); rt = typeManager.createRecordType(rt); Record record = repository.newRecord(); record.setRecordType(rt.getName()); record.setField(nonBlobField.getName(), "This is not a blob"); record = repository.create(record); try { repository.getInputStream(record.getId(), record.getVersion(), nonBlobField.getName(), null, null); fail("Expected exception"); } catch (BlobException e) { // ok } try { repository.getInputStream(record.getId(), record.getVersion(), absentField.getName(), null, null); fail("Expected exception"); } catch (FieldNotFoundException e) { // ok } try { repository.getInputStream(record.getId(), record.getVersion(), new QName(namespace, "nonExistingFieldType"), null, null); fail("Expected exception"); } catch (FieldTypeNotFoundException e) { // ok } try { repository.getInputStream(record.getId(), record.getVersion(), null, null, null); fail("Expected exception"); } catch (IllegalArgumentException e) { // ok } try { repository.getInputStream(repoSetup.getIdGenerator().fromString("USER.nonexistingrecord"), null, absentField.getName()); fail("Expected exception"); } catch (RecordNotFoundException e) { // ok } } private Blob writeBlob(byte[] bytes, String mediaType, String name) throws RepositoryException, InterruptedException, IOException { return writeBlob(bytes, mediaType, name, bytes.length); } /** * @param length The blob site to be used when constructing the blob * (this can be used to control how the blob will be stored) */ private Blob writeBlob(byte[] bytes, String mediaType, String name, long length) throws RepositoryException, InterruptedException, IOException { Blob blob = new Blob(mediaType, length, name); OutputStream outputStream = repository.getOutputStream(blob); outputStream.write(bytes); outputStream.close(); return blob; } private byte[] readBlob(RecordId recordId, QName fieldName) throws RepositoryException, InterruptedException, IOException { return readBlob(recordId, null, fieldName); } private byte[] readBlob(RecordId recordId, Long version, QName fieldName, int...indexes) throws RepositoryException, InterruptedException, IOException { InputStream inputStream = repository.getInputStream(recordId, version, fieldName, indexes); try { return IOUtils.toByteArray(inputStream); } finally { IOUtils.closeQuietly(inputStream); } } }
/** * The class represents a configuration for {@link CommonsRequestLoggingFilter} filter. Also, this * logging filter requires the logging level be set to DEBUG. * <p> * This bean will be created in case when logging level for CommonsRequestLoggingFilter is set to * debug. */ @Configuration @Deprecated public class RequestLoggingFilterConfiguration { @ConditionalOnProperty(name = "logging.level.org.springframework.web.filter.CommonsRequestLoggingFilter", havingValue = "DEBUG") @Bean public CommonsRequestLoggingFilter commonsRequestLoggingFilter() { CommonsRequestLoggingFilter filter = new CommonsRequestLoggingFilter(); filter.setIncludeQueryString(true); filter.setIncludeHeaders(true); filter.setIncludePayload(true); filter.setMaxPayloadLength(100000); filter.setAfterMessagePrefix("REQUEST DATA : "); return filter; } }
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TupleSections #-} -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iotanalytics-dataset-retentionperiod.html module Stratosphere.ResourceProperties.IoTAnalyticsDatasetRetentionPeriod where import Stratosphere.ResourceImports -- | Full data type definition for IoTAnalyticsDatasetRetentionPeriod. See -- 'ioTAnalyticsDatasetRetentionPeriod' for a more convenient constructor. data IoTAnalyticsDatasetRetentionPeriod = IoTAnalyticsDatasetRetentionPeriod { _ioTAnalyticsDatasetRetentionPeriodNumberOfDays :: Val Integer , _ioTAnalyticsDatasetRetentionPeriodUnlimited :: Val Bool } deriving (Show, Eq) instance ToJSON IoTAnalyticsDatasetRetentionPeriod where toJSON IoTAnalyticsDatasetRetentionPeriod{..} = object $ catMaybes [ (Just . ("NumberOfDays",) . toJSON . fmap Integer') _ioTAnalyticsDatasetRetentionPeriodNumberOfDays , (Just . ("Unlimited",) . toJSON . fmap Bool') _ioTAnalyticsDatasetRetentionPeriodUnlimited ] instance FromJSON IoTAnalyticsDatasetRetentionPeriod where parseJSON (Object obj) = IoTAnalyticsDatasetRetentionPeriod <$> fmap (fmap unInteger') (obj .: "NumberOfDays") <*> fmap (fmap unBool') (obj .: "Unlimited") parseJSON _ = mempty -- | Constructor for 'IoTAnalyticsDatasetRetentionPeriod' containing required -- fields as arguments. ioTAnalyticsDatasetRetentionPeriod :: Val Integer -- ^ 'itadsrpNumberOfDays' -> Val Bool -- ^ 'itadsrpUnlimited' -> IoTAnalyticsDatasetRetentionPeriod ioTAnalyticsDatasetRetentionPeriod numberOfDaysarg unlimitedarg = IoTAnalyticsDatasetRetentionPeriod { _ioTAnalyticsDatasetRetentionPeriodNumberOfDays = numberOfDaysarg , _ioTAnalyticsDatasetRetentionPeriodUnlimited = unlimitedarg } -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iotanalytics-dataset-retentionperiod.html#cfn-iotanalytics-dataset-retentionperiod-numberofdays itadsrpNumberOfDays :: Lens' IoTAnalyticsDatasetRetentionPeriod (Val Integer) itadsrpNumberOfDays = lens _ioTAnalyticsDatasetRetentionPeriodNumberOfDays (\s a -> s { _ioTAnalyticsDatasetRetentionPeriodNumberOfDays = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iotanalytics-dataset-retentionperiod.html#cfn-iotanalytics-dataset-retentionperiod-unlimited itadsrpUnlimited :: Lens' IoTAnalyticsDatasetRetentionPeriod (Val Bool) itadsrpUnlimited = lens _ioTAnalyticsDatasetRetentionPeriodUnlimited (\s a -> s { _ioTAnalyticsDatasetRetentionPeriodUnlimited = a })
package com.mjiayou.trecore.helper; import com.mjiayou.trecore.base.TCApp; import com.mjiayou.trecorelib.util.LogUtil; import com.wanjian.sak.LayoutManager; /** * Created by treason on 2016/12/20. */ public class SwissArmyKnifeUtil { private static final String TAG = SwissArmyKnifeUtil.class.getSimpleName(); /** * 初始化 */ public static void init() { LogUtil.printInit(TAG); LayoutManager.init(TCApp.get()); } }
<gh_stars>0 /***************************************************************************** * * test_pair_ss_cut_ij.c * * Edinburgh Soft Matter and Statictical Physics Group and * Edinburgh Parallel Computing Centre * * (c) 2022 The University of Edinburgh * * Contributing authors: * <NAME> (<EMAIL>) * *****************************************************************************/ #include <assert.h> #include <float.h> #include <math.h> #include "pe.h" #include "coords.h" #include "colloids.h" #include "pair_ss_cut_ij.h" #include "tests.h" int test_pair_ss_cut_ij_create(pe_t * pe, cs_t * cs); int test_pair_ss_cut_ij_param_set(pe_t * pe, cs_t * cs); int test_pair_ss_cut_ij_single(pe_t * pe, cs_t * cs); /***************************************************************************** * * test_pair_ss_cut_ij_suite * *****************************************************************************/ int test_pair_ss_cut_ij_suite(void) { pe_t * pe = NULL; cs_t * cs = NULL; pe_create(MPI_COMM_WORLD, PE_QUIET, &pe); cs_create(pe, &cs); cs_init(cs); test_pair_ss_cut_ij_create(pe, cs); test_pair_ss_cut_ij_param_set(pe, cs); test_pair_ss_cut_ij_single(pe, cs); cs_free(cs); pe_info(pe, "PASS ./unit/test_pair_ss_cut_ij\n"); pe_free(pe); return 0; } /***************************************************************************** * * test_pair_ss_cut_ij_create * *****************************************************************************/ int test_pair_ss_cut_ij_create(pe_t * pe, cs_t * cs) { pair_ss_cut_ij_t * obj = NULL; double epsilon[2] = {0}; double sigma[2] = {0}; double nu[2] = {0}; double hc[2] = {0}; pair_ss_cut_ij_create(pe, cs, 2, epsilon, sigma, nu, hc, &obj); assert(obj); assert(obj->ntypes == 2); assert(obj->epsilon); assert(obj->sigma); assert(obj->nu); assert(obj->hc); for (int i = 0; i < obj->ntypes; i++) { for (int j = 0; j < obj->ntypes; j++) { assert(fabs(obj->epsilon[i][j] - 0.0) < DBL_EPSILON); assert(fabs(obj->sigma[i][j] - 0.0) < DBL_EPSILON); assert(fabs(obj->nu[i][j] - 0.0) < DBL_EPSILON); assert(fabs(obj->hc[i][j] - 0.0) < DBL_EPSILON); } } pair_ss_cut_ij_free(obj); return 0; } /***************************************************************************** * * test_pair_ss_cut_ij_param_set * *****************************************************************************/ int test_pair_ss_cut_ij_param_set(pe_t * pe, cs_t * cs) { pair_ss_cut_ij_t * obj = NULL; double epsilon[3] = {1.0, 2.0, 3.0}; /* 11 12 22 */ double sigma[3] = {4.0, 5.0, 6.0}; double nu[3] = {0.5, 1.5, 2.5}; double hc[3] = {7.0, 8.0, 9.0}; assert(pe); assert(cs); pair_ss_cut_ij_create(pe, cs, 2, epsilon, sigma, nu, hc, &obj); { pair_ss_cut_ij_param_set(obj, epsilon, sigma, nu, hc); /* I'm going to write this out explicitly ... */ assert(fabs(obj->epsilon[0][0] - epsilon[0]) < DBL_EPSILON); assert(fabs(obj->epsilon[0][1] - epsilon[1]) < DBL_EPSILON); assert(fabs(obj->epsilon[1][0] - epsilon[1]) < DBL_EPSILON); assert(fabs(obj->epsilon[1][1] - epsilon[2]) < DBL_EPSILON); assert(fabs(obj->sigma[0][0] - sigma[0] ) < DBL_EPSILON); assert(fabs(obj->sigma[0][1] - sigma[1] ) < DBL_EPSILON); assert(fabs(obj->sigma[1][0] - sigma[1] ) < DBL_EPSILON); assert(fabs(obj->sigma[1][1] - sigma[2] ) < DBL_EPSILON); assert(fabs(obj->nu[0][0] - nu[0] ) < DBL_EPSILON); assert(fabs(obj->nu[0][1] - nu[1] ) < DBL_EPSILON); assert(fabs(obj->nu[1][0] - nu[1] ) < DBL_EPSILON); assert(fabs(obj->nu[1][1] - nu[2] ) < DBL_EPSILON); assert(fabs(obj->hc[0][0] - hc[0] ) < DBL_EPSILON); assert(fabs(obj->hc[0][1] - hc[1] ) < DBL_EPSILON); assert(fabs(obj->hc[1][0] - hc[1] ) < DBL_EPSILON); assert(fabs(obj->hc[1][1] - hc[2] ) < DBL_EPSILON); } pair_ss_cut_ij_free(obj); return 0; } /***************************************************************************** * * test_pair_ss_cut_ij_single * *****************************************************************************/ int test_pair_ss_cut_ij_single(pe_t * pe, cs_t * cs) { pair_ss_cut_ij_t * obj = NULL; double epsilon[3] = {1.0, 0.0, 0.0}; /* 00 interactions only */ double sigma[3] = {1.0, 1.0, 1.0}; double nu[3] = {1.0, 1.0, 1.0}; double hc[3] = {2.0, 2.0, 2.0}; assert(pe); assert(cs); pair_ss_cut_ij_create(pe, cs, 2, epsilon, sigma, nu, hc, &obj); { double h = 1.0; double f = 0.0; double v = 0.0; pair_ss_cut_ij_single(obj, 0, 0, h, &v, &f); assert(fabs(f - 0.25) < DBL_EPSILON); assert(fabs(v - 0.75) < DBL_EPSILON); pair_ss_cut_ij_single(obj, 0, 1, h, &v, &f); assert(fabs(f - 0.00) < DBL_EPSILON); assert(fabs(v - 0.00) < DBL_EPSILON); pair_ss_cut_ij_single(obj, 1, 0, h, &v, &f); assert(fabs(f - 0.00) < DBL_EPSILON); assert(fabs(v - 0.00) < DBL_EPSILON); pair_ss_cut_ij_single(obj, 1, 1, h, &v, &f); assert(fabs(f - 0.00) < DBL_EPSILON); assert(fabs(v - 0.00) < DBL_EPSILON); } pair_ss_cut_ij_free(obj); return 0; }
<reponame>mazhiwei/spring-rabbitmq-tutorials<filename>src/main/java/org/mzw/rabbitmq/tutorials/sender/RetryPublisher.java package org.mzw.rabbitmq.tutorials.sender; import org.mzw.rabbitmq.tutorials.conf.RetryConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.amqp.core.Message; import org.springframework.amqp.core.MessageBuilder; import org.springframework.amqp.core.MessageDeliveryMode; import org.springframework.amqp.core.MessageProperties; import org.springframework.amqp.rabbit.core.RabbitTemplate; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @Component public class RetryPublisher { private static final Logger LOGGER = LoggerFactory .getLogger(RetryPublisher.class); @Autowired private RabbitTemplate rabbitTemplate; public void send() { LOGGER.debug("send messages"); String content = "retry message at " + System.currentTimeMillis(); Message message = MessageBuilder.withBody(content.getBytes()) .setDeliveryMode(MessageDeliveryMode.PERSISTENT) .setContentType(MessageProperties.CONTENT_TYPE_BYTES).build(); rabbitTemplate.convertAndSend(RetryConfig.EMAIL_EXCHANGE, RetryConfig.EMAIL_ROUTING_KEY, message); } }
<gh_stars>1-10 /* Copyright (C) 2002 Univ. of Massachusetts Amherst, Computer Science Dept. This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit). http://www.cs.umass.edu/~mccallum/mallet This software is provided under the terms of the Common Public License, version 1.0, as published by http://www.opensource.org. For further information, see the file `LICENSE' included with this distribution. */ package cc.mallet.pipe; import cc.mallet.types.Alphabet; import cc.mallet.types.AugmentableFeatureVector; import cc.mallet.types.FeatureConjunction; import cc.mallet.types.Instance; import java.io.Serializable; /** * Add specified conjunctions to each instance. @author <NAME> <a href="mailto:<EMAIL>"><EMAIL></a> */ public class AugmentableFeatureVectorAddConjunctions extends Pipe implements Serializable { FeatureConjunction.List conjunctions; public AugmentableFeatureVectorAddConjunctions () { conjunctions = new FeatureConjunction.List (); } public AugmentableFeatureVectorAddConjunctions addConjunction (String name, Alphabet v, int[] features, boolean[] negations) { conjunctions.add (new FeatureConjunction (name, v, features, negations)); return this; } public Instance pipe (Instance carrier) { AugmentableFeatureVector afv = (AugmentableFeatureVector) carrier.getData(); conjunctions.addTo (afv, 1.0); return carrier; } }
def fit_transform(self, raw_documents: List[str]) -> List[List[int]]: KeyphraseCountVectorizer.fit(self=self, raw_documents=raw_documents) return CountVectorizer(vocabulary=self.keyphrases, ngram_range=(self.min_n_gram_length, self.max_n_gram_length), lowercase=self.lowercase, binary=self.binary, dtype=self.dtype).fit_transform( raw_documents=raw_documents)
/** * Holds do not disturb change attributes */ public class DNDChangeMessage { private boolean checked; public DNDChangeMessage(boolean checked) { this.checked = checked; } public boolean isChecked() { return checked; } }
def capture_diff_traces(self, dtype=float, sweep_mode: str = 'SINGLE', big_endian: bool = True): num_diff_pairs = self.num_ports // 2 self.set_sweep_mode(sweep_mode) dformat = self.get_data_format() num_points = self.get_number_sweep_points() diff_data = np.zeros((num_points, num_diff_pairs, num_diff_pairs), dtype=dtype) dtype_name = 'SDATA' if dtype == complex else 'FDATA' cmd = f'CALC1:DATA? {dtype_name}' for i in range(num_diff_pairs): for j in range(num_diff_pairs): self.write(f'CALC1:PAR:SEL \'sdd{i+1}{j+1}\'') if is_binary_format(dformat=dformat): chunk_size = num_points * 100 data: np.array = self.query_binary_values( message=cmd, datatype=get_binary_datatype(dformat=dformat), is_big_endian=big_endian, container=np.array, chunk_size=chunk_size ) else: data: np.array = self.query_ascii_values(message=cmd, container=np.array) if dtype == complex: data = data[0::2] + 1j * data[1::2] diff_data[:, i, j] = data return diff_data
import Privilege from "./Privilege"; import PrivilegeGroup from "./PrivilegeGroup"; import type * as UsersTypes from ".."; export default class PrivilegeCategory { main: UsersTypes.default; name: UsersTypes.PrivilegeKeysType; $privilegesContainer: JQuery<HTMLElement>; privileges: (Privilege | PrivilegeGroup)[]; data: UsersTypes.PrivilegeType; constructor(main, name: UsersTypes.PrivilegeKeysType) { this.main = main; this.name = name; this.$privilegesContainer = this.main.$privilegesContainer; this.privileges = []; this.data = this.main.privilegeListOrder[name]; if (!this.data) throw Error(`Can't find the details of the "${name}" category`); this.RenderDivider(); this.RenderPrivileges(); } RenderDivider() { this.main.RenderDivider(this.data.title); } RenderPrivileges() { this.data.privileges.forEach(this.RenderPrivilege.bind(this)); } RenderPrivilege(key: number | number[]) { let privilege; if (key instanceof Array) privilege = new PrivilegeGroup(this, key); else privilege = new Privilege(this, key); this.privileges.push(privilege); } }
package lockotron import ( "sync" ) type locker struct { mutex sync.Mutex mutexesByKey map[string]*sync.Mutex } func newLocker() *locker { return &locker{mutexesByKey: make(map[string]*sync.Mutex)} } func (l *locker) obtain(key string) *sync.Mutex { l.mutex.Lock() mutex, ok := l.mutexesByKey[key] if !ok { mutex = &sync.Mutex{} l.mutexesByKey[key] = mutex } l.mutex.Unlock() return mutex } func (l *locker) release(key string) { l.mutex.Lock() delete(l.mutexesByKey, key) l.mutex.Unlock() }
<reponame>i386/java-magento-client<filename>src/main/java/com/github/chen0040/magento/models/category/ProductLink.java package com.github.chen0040.magento.models.category; import lombok.AllArgsConstructor; import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; @Getter @Setter @NoArgsConstructor @AllArgsConstructor public class ProductLink { private String sku; private Integer position; private String category_id; }
<gh_stars>0 //! This module is supposed to define context-free grammars and their //! components. #[macro_use] pub mod symbol; use self::symbol::Symbol; use super::{Associativity, Operator, Parser, ParserAction}; use automata::{State, Transition}; use automata::dfa::DFA; use automata::nfa::NFA; use std::collections::HashMap; use std::collections::HashSet; use std::fmt::Debug; use std::hash::Hash; use problem_reporting::{InputPosition, Locatable}; /// Represents a production in the grammar. #[derive(Debug, Clone, PartialEq, Eq)] pub struct Production<Nonterminal, Terminal, AST> where Nonterminal: Clone + Debug + Eq + Hash, Terminal: Clone + Debug + Eq + Hash, AST: Clone + Debug + Eq + Default + From<(Terminal, InputPosition)> + Locatable { /// The non-terminal that uses this production. from: Symbol<Nonterminal, Terminal>, /// The symbols that the non-terminal gets converted to. to: Vec<Symbol<Nonterminal, Terminal>>, /// The action that is called during a reduce in the parser. /// /// This is what combines the sub syntax trees. /// /// The second parameter is a backup input position that can be used if the list /// is empty (for an empty production). reduce_action: fn(Vec<AST>, InputPosition) -> AST } /// This macro creates a production. /// /// First comes the non-terminal that is on the left-hand-side, then a comma-separated /// list of symbols on the right hand side, both inside of brackets. Then follows /// the action that is taken to combine the symbols. macro_rules! production { ([$nonterminal: expr => $($symbol: expr),*], $action: expr) => {{ use $crate::parser::grammar::Production; use $crate::parser::grammar::symbol::IntoSymbol; Production::new( $nonterminal.into_symbol(), vec![$($symbol.clone().into_symbol()),*], $action ) }} } impl<Nonterminal, Terminal, AST> Production<Nonterminal, Terminal, AST> where Nonterminal: Clone + Debug + Eq + Hash, Terminal: Clone + Debug + Eq + Hash, AST: Clone + Debug + Eq + Default + From<(Terminal, InputPosition)> + Locatable { /// Creates a new production. pub fn new(from: Symbol<Nonterminal, Terminal>, to: Vec<Symbol<Nonterminal, Terminal>>, reduce_action: fn(Vec<AST>, InputPosition) -> AST) -> Production<Nonterminal, Terminal, AST> { assert!(from.is_nonterminal()); Production { from, to, reduce_action } } /// Creates an NFA from this production. fn to_nfa_fragment(&self) -> NFAFragment<Nonterminal, Terminal> { let starting_state = State::new(); let mut last_state = starting_state; let mut transitions = Vec::new(); for symbol in &self.to { let new_state = State::new(); transitions.push(Transition::new(last_state, Some(symbol.clone()), new_state)); last_state = new_state; } NFAFragment { starting_state, transitions, accepting_state: last_state } } /// Returns the last operator in the right hand side of the production. fn last_operator<'a>(&'a self, operators: &'a Vec<Operator<Terminal>>) -> Option<&'a Operator<Terminal>> { let mut last_operator = None; for symbol in &self.to { match symbol { &Symbol::Terminal(ref terminal) => { for operator in operators { if &operator.terminal == terminal { last_operator = Some(operator); break; } } }, _ => (), } } last_operator } /// Reduces using this production, returning the result of the production. pub fn reduce(&self, stack: &mut Vec<(State, AST)>) -> (Symbol<Nonterminal, Terminal>, AST) { let nonterminal = self.from.clone(); let mut parameters = Vec::new(); for _ in 0..self.to.len() { if let Some((_, next_paramter)) = stack.pop() { parameters.insert(0, next_paramter); } else { panic!("Unexpected end of parse stack."); } } // The backup position is used if no other input position is available // (for example in the empty production) let backup_position = if stack.len() > 0 { let (_, ref top_ast) = stack[stack.len() - 1]; let mut position = top_ast.get_input_position().clone(); // The end of the previous position is taken as the beginning of the backup position. position.index = position.index + position.length; // The backup position doesn't have a meaningful length. position.length = 0; position } else { // In the case where there are parameters the backup position should not be necessary, // but it is still a parameter. So an arbitrary position is used as the backup // position. parameters[0].get_input_position().clone() }; (nonterminal, (self.reduce_action)(parameters, backup_position)) } } /// Represents an NFA fragment that is created from a production. struct NFAFragment<Nonterminal, Terminal> where Nonterminal: Clone + Debug + Eq + Hash, Terminal: Clone + Debug + Eq + Hash { /// The starting state of the NFA fragment. starting_state: State, /// The transitions of the NFA fragment. transitions: Vec<Transition<Symbol<Nonterminal, Terminal>>>, /// The accepting state of the NFA fragment. accepting_state: State } /// Represents a context-free grammar. #[derive(Debug)] pub struct Grammar<Nonterminal, Terminal, AST> where Nonterminal: Clone + Debug + Eq + Hash, Terminal: Clone + Debug + Eq + Hash, AST: Clone + Debug + Eq + Default + From<(Terminal, InputPosition)> + Locatable { /// The start symbol for the grammar. start_symbol: Symbol<Nonterminal, Terminal>, /// The productions for the grammar. productions: Vec<Production<Nonterminal, Terminal, AST>> } impl<Nonterminal, Terminal, AST> Grammar<Nonterminal, Terminal, AST> where Nonterminal: Clone + Debug + Eq + Hash, Terminal: Clone + Debug + Eq + Hash, AST: Clone + Debug + Eq + Default + From<(Terminal, InputPosition)> + Locatable { /// Creates a new grammar with the given start symbol and productions. pub fn new(start_symbol: Nonterminal, productions: Vec<Production<Nonterminal, Terminal, AST>>) -> Grammar<Nonterminal, Terminal, AST> { // Make sure no internal symbols are used. for production in &productions { for symbol in &production.to { assert!(symbol.is_matcher()); } } Grammar { start_symbol: Symbol::Nonterminal(start_symbol), productions } } /// Returns the set of nullable nonterminal symbols. fn nullable(&self) -> HashSet<&Symbol<Nonterminal, Terminal>> { let mut result = HashSet::new(); // Map right hand sides and nonterminals to their "nullable-ness". let mut right_hand_sides = HashMap::new(); let mut nonterminals = HashMap::new(); // Start with everything being non-nullable. for production in &self.productions { right_hand_sides.insert(&production.to, false); nonterminals.insert(&production.from, false); for symbol in &production.to { if symbol.is_nonterminal() { nonterminals.insert(&symbol, false); } } } // Iterate until no changes occur. let mut changed = true; while changed { changed = false; for (right_hand_side, value) in right_hand_sides.iter_mut() { if !*value { let mut nullable = true; for symbol in *right_hand_side { if symbol.is_nonterminal() { if !nonterminals.get(symbol).unwrap() { nullable = false; break; } } else { nullable = false; break; } } if nullable { *value = true; changed = true; } } } for (nonterminal, value) in nonterminals.iter_mut() { if !*value { for production in &self.productions { if &production.from == *nonterminal { if *right_hand_sides.get(&production.to).unwrap() { *value = true; changed = true; break; } } } } } } // Collect all nullable nonterminals. for (nonterminal, value) in nonterminals.iter() { if *value { result.insert(*nonterminal); } } result } /// Calculates the first set for each nonterminal. /// /// The first set contains all terminals that could be the first character /// of a derivation with the given nonterminal. fn first(&self) -> HashMap<&Symbol<Nonterminal, Terminal>, Vec<&Symbol<Nonterminal, Terminal>>> { let nullable = self.nullable(); // Map right hand sides and nonterminals to their first sets. let mut right_hand_sides = HashMap::new(); let mut nonterminals = HashMap::new(); // Start with empty sets. for production in &self.productions { right_hand_sides.insert(&production.to, Vec::new()); nonterminals.insert(&production.from, Vec::new()); for symbol in &production.to { if symbol.is_nonterminal() { nonterminals.insert(&symbol, Vec::new()); } } } // Iterate until no changes occur. let mut changed = true; while changed { changed = false; for (right_hand_side, value) in right_hand_sides.iter_mut() { for symbol in right_hand_side.iter() { // Iterate until we hit a non-nullable nonterminal or a terminal. if symbol.is_nonterminal() { for other_symbol in nonterminals.get(symbol).unwrap() { if !value.contains(other_symbol) { changed = true; value.push(*other_symbol); } } if !nullable.contains(&symbol) { break; } } else { if !value.contains(&symbol) { changed = true; value.push(symbol); } break; } } } for (nonterminal, value) in nonterminals.iter_mut() { for production in &self.productions { if &production.from == *nonterminal { let symbols = right_hand_sides.get(&production.to).unwrap(); for symbol in symbols { if !value.contains(symbol) { changed = true; value.push(symbol); } } } } } } nonterminals } /// Calculates the follow set for each nonterminal. /// /// The follow set contains all terminals that could follow an occurance of /// the nonterminal. pub fn follow (&self) -> HashMap<&Symbol<Nonterminal, Terminal>, Vec<Symbol<Nonterminal, Terminal>>> { let nullable = self.nullable(); let first = self.first(); // Map nonterminals to their follow sets. let mut nonterminals = HashMap::new(); let mut constraints = HashSet::new(); // Start with empty follow sets. for production in &self.productions { nonterminals.insert(&production.from, Vec::new()); for symbol in &production.to { if symbol.is_nonterminal() { nonterminals.insert(&symbol, Vec::new()); } } } // The start symbol may be followed by the end of input. nonterminals .get_mut(&self.start_symbol) .unwrap() .push(Symbol::EndOfInput); for (nonterminal, value) in nonterminals.iter_mut() { for production in &self.productions { let last_symbol = production.to.len(); for i in 0..last_symbol { if &production.to[i] == *nonterminal { let symbols = &production.to[i + 1..last_symbol]; let first_symbols = Grammar::<Nonterminal, Terminal, AST>::first_sequence(&nullable, &first, symbols); for symbol in first_symbols { if !value.contains(symbol) { value.push(symbol.clone()); } } if Grammar::<Nonterminal, Terminal, AST>::nullable_sequence(&nullable, symbols) { if &production.from != *nonterminal { constraints.insert((&production.from, nonterminal.clone())); } } } } } } let mut changed = true; while changed { changed = false; for &(subset, superset) in &constraints { nonterminals.get(subset).cloned(); for symbol in nonterminals.get(subset).unwrap().clone() { if !nonterminals.get(superset).unwrap().contains(&symbol) { changed = true; nonterminals .get_mut(superset) .unwrap() .push(symbol.clone()); } } } } nonterminals } /// Returns true if the given sequence is nullable. fn nullable_sequence(nullable: &HashSet<&Symbol<Nonterminal, Terminal>>, sequence: &[Symbol<Nonterminal, Terminal>]) -> bool { for symbol in sequence { if symbol.is_nonterminal() { if !nullable.contains(symbol) { return false; } } else { return false; } } true } /// Calculates the first set for a given sequence. fn first_sequence<'a>(nullable: &'a HashSet<&Symbol<Nonterminal, Terminal>>, first: &'a HashMap<&Symbol<Nonterminal, Terminal>, Vec<&Symbol<Nonterminal, Terminal>>>, sequence: &'a [Symbol<Nonterminal, Terminal>]) -> HashSet<&'a Symbol<Nonterminal, Terminal>> { let mut result = HashSet::new(); for symbol in sequence { if symbol.is_nonterminal() { result.extend(first.get(symbol).unwrap()); if !nullable.contains(symbol) { break; } } else { result.insert(symbol); break; } } result } /// Returns the corresponding operator to the terminal symbol, if it exists. fn find_operator<'a>(symbol: &'a Symbol<Nonterminal, Terminal>, operators: &'a Vec<Operator<Terminal>>) -> Option<&'a Operator<Terminal>> { match symbol { &Symbol::Terminal(ref terminal) => { for operator in operators { if &operator.terminal == terminal { return Some(operator); } } None }, _ => None, } } /// Creates a DFA from this grammar. fn to_dfa(&self) -> ((DFA<Production<Nonterminal, Terminal, AST>, Symbol<Nonterminal, Terminal>>, Vec<(HashSet<State>, State)>), HashMap<State, (u32, Production<Nonterminal, Terminal, AST>)>) { // Add a new starting production. let start_symbol: Symbol<Nonterminal, Terminal> = Symbol::InternalNonterminal(0); let start_production = Production::<Nonterminal, Terminal, AST>::new(start_symbol, vec![self.start_symbol.clone()], |_, _| AST::default()); // Calculate NFA fragments from the productions. let mut nfa_fragments = vec![(start_production.clone(), start_production.to_nfa_fragment())]; for production in &self.productions { nfa_fragments.push((production.clone(), production.to_nfa_fragment())); } // Combine the transitions. let mut transitions = Vec::new(); for &(_, ref fragment) in &nfa_fragments { transitions.extend(fragment.transitions.clone()); for transition in &fragment.transitions { // This is safe, because we know how the transitions are constructed. let transition_symbol = &transition.relevant_symbols()[0]; // If the transition is on a nonterminal, add transitions to that // nonterminals productions from the starting state. if transition_symbol.is_nonterminal() { let starting_state = transition.from(); for &(ref production, ref other_fragment) in &nfa_fragments { if &production.from == transition_symbol { transitions.push(Transition::new(starting_state, None, other_fragment.starting_state)); } } } } } // The starting state is the state of the added production. let starting_state = nfa_fragments[0].1.starting_state; // Build the accepting states containing the productions they used. let mut accepting_states = HashMap::new(); for (production, fragment) in nfa_fragments { accepting_states.insert(fragment.accepting_state, (0, production)); } (NFA::new(transitions, starting_state, accepting_states.clone()).to_dfa_with_map(), accepting_states) } /// Creates a parser from this grammar. pub fn to_parser(&self, operators: Vec<Operator<Terminal>>) -> Parser<Nonterminal, Terminal, AST> { let follow = self.follow(); let ((dfa, nfa_state_map), accepting_states) = self.to_dfa(); let mut table: HashMap<(State, Symbol<Nonterminal, Terminal>), ParserAction<Nonterminal, Terminal, AST>> = HashMap::new(); for &(_, state) in &nfa_state_map { for transition in dfa.transitions_from(state) { let symbol = transition.relevant_symbols()[0].clone(); if symbol.is_nonterminal() { table.insert((state, symbol.clone()), ParserAction::Go(dfa.transition(state, symbol).unwrap())); } else { table.insert((state, symbol.clone()), ParserAction::Shift(dfa.transition(state, symbol).unwrap())); } } } macro_rules! report_conflict { ($new_action: expr, $location: expr, $old_action: expr) => { panic!(concat!("Parser generation error: ", "Trying to add {:?} to the SLR table at {:?},", "but {:?} was already there."), $new_action, $location, $old_action); } } for &(ref nfa_states, state) in &nfa_state_map { for nfa_state in nfa_states { if let Some(production) = accepting_states.get(&nfa_state) { if production.1.from == Symbol::InternalNonterminal(0) { if let Some(conflicting_entry) = table.insert((state, Symbol::EndOfInput), ParserAction::Accept) { let accept: ParserAction<Nonterminal, Terminal, AST> = ParserAction::Accept; let end_of_input: Symbol<Nonterminal, Terminal> = Symbol::EndOfInput; report_conflict!(accept, (state, end_of_input), conflicting_entry); } } else { for symbol in follow.get(&production.1.from).unwrap() { // If the table already contains the key. if table.contains_key(&(state, symbol.clone())) { // Try to resolve the conflict using precendence. let new_operator = production.1.last_operator(&operators); let old_operator = Grammar::<Nonterminal, Terminal, AST>::find_operator(symbol, &operators); // Only resolve shift-reduce conflicts. if let Some(&ParserAction::Shift(_)) = table.get(&(state, symbol.clone())) { // Only resolve conflicts if both symbols involved are // operators. if let (Some(new_operator), Some(old_operator)) = (new_operator, old_operator) { // Pick the operator with the higher precedence. if new_operator.precedence < old_operator.precedence { table.insert((state, symbol.clone()), ParserAction::Reduce(production .1 .clone())); } else if old_operator.precedence == new_operator.precedence { // For equal precendence decide based on associativity. assert_eq!(old_operator.associativity, new_operator.associativity); match new_operator.associativity { Associativity::Left => table.insert((state, symbol.clone()), ParserAction::Reduce( production.1.clone())), Associativity::Right => None, Associativity::None => { table.remove(&(state, symbol.clone())) }, }; } } else { let conflicting_entry = table.get(&(state, symbol.clone())); report_conflict!(ParserAction::Reduce(production .1 .clone()), (state, symbol.clone()), conflicting_entry); } } else { let conflicting_entry = table.get(&(state, symbol.clone())); report_conflict!(ParserAction::Reduce(production.1.clone()), (state, symbol.clone()), conflicting_entry); } } else { // If there is no conflict, just add the action. table.insert((state, symbol.clone()), ParserAction::Reduce(production.1.clone())); } } } } } } Parser::new(table, dfa.start()) } }
import { Path } from '@serenity-js/core/lib/io'; import * as fs from 'fs'; import { GherkinDocument } from '../nodes'; import { UnableToParseFeatureFileError, UnableToReadFeatureFileError } from './errors'; /** * @private */ export class FeatureFileParser { constructor(private readonly gherkinParser: { parse: (feature: string) => GherkinDocument }) { } parse(uri: Path): Promise<GherkinDocument> { return new Promise((resolve, reject) => { fs.readFile(uri.value, (error: NodeJS.ErrnoException | undefined, data: Buffer) => { if (!! error) { return reject( new UnableToReadFeatureFileError(`Could not read feature file at "${ uri.value }"`, error), ); } try { return resolve(this.gherkinParser.parse(data.toString('utf8'))); } catch (parseError) { return reject( new UnableToParseFeatureFileError(`Could not parse feature file at "${ uri.value }"`, parseError), ); } }); }); } }
a, b = map(int, input().split()) count,flag=0,0 if(a==1 and b==1): flag=1 print(0) while(flag==0 and a > 0 and b > 0): count+=1 if(min(a,b)==a): a+=1 b-=2 else: b+=1 a-=2 if(not flag): print(count)
<filename>BOJ_Solved/BOJ-9012.py """ 백준 9012번 : 괄호 """ test = int(input( )) result = [] def is_vps(ps): stack = [] for p in ps: if len(stack) == 0 and p == ')': return False elif p == ')': del stack[len(stack) - 1] elif p == '(': stack.append(1) if len(stack) == 0: return True else: return False def print_yes_or_no(result): for r in result: if r == True: print('YES') else: print('NO') for _ in range(test): ps = input( ) result.append(is_vps(ps)) print_yes_or_no(result)
/** * @file ZW_global_definitions.h * * This file is a helper file for including all globally required parameters * and definitions for ease of coding. * It is defined as a ZWave API module so that it also can be used by the * ZAF API if needed. * * @copyright 2020 Silicon Laboratories Inc. */ #ifndef __ZW_GLOBAL_DEFINITIONS_H__ #define __ZW_GLOBAL_DEFINITIONS_H__ /**************************************************************************** * INCLUDE FILES ***************************************************************************/ #include <stdbool.h> #include <ZW_typedefs.h> /**************************************************************************** * CONFIGURATIONS* ***************************************************************************/ /**************************************************************************** * DEFINITIONS, TYPEDEF and CONSTANTS ***************************************************************************/ /**************************************************************************** * MACROS* ***************************************************************************/ /** * This will calculate the distance between two index values in a circular buffer. (FILO) * * @param tailIndex The index at which items are taken off the circular buffer. * (Old items are removed from buffer at this index) * @param headIndex The index at which items are placed on the circular buffer. * (New items are added to buffer at this index) * @param maxValue The maximum index value, or the size of the buffer minus 1. */ #define CIRCULAR_BUFFER_DIFF(tailIndex, headIndex, maxValue) \ ((headIndex >= tailIndex) ? (headIndex - tailIndex) : ((maxValue - tailIndex + 1) + headIndex)) /**************************************************************************** * EXTERNAL VARIABLES* ***************************************************************************/ /**************************************************************************** * ENUMERATIONS ***************************************************************************/ /** * Universal return code used by all functions where ever applicable. * * ATTENTION: Use the enum for catching errors at runtime, since the order * of these definitions can change in future versions!! */ typedef enum { /* Meaning: SUCCESS */ Code_Success = 0, /* Meaning: Default failed code with no helping indication. */ Code_Fail_Unknown, /* Meaning: Failed due to lack of memory. */ Code_Fail_NoMemory, /* Meaning: A set limit is reached for this operation. */ Code_Fail_LimitReached, /* Meaning: A specific entity could not be found. */ Code_Fail_NotFound, /* Meaning: The operation or the entire module is globally * suspended and cannot be utilized at this point in time. */ Code_Fail_Suspended, /* Meaning: Not in an appropriate state for this call. */ Code_Fail_InvalidState, /* Meaning: This operation can not be allowed at this point. */ Code_Fail_InvalidOperation, /* Meaning: Wrong/invalid input parameters. */ Code_Fail_InvalidParameter, /* Meaning: A required resource or dependency that this operation relies * on was not setup or available. Perhaps wrong initialization order. */ Code_DependencyUnavailable, /* Meaning: A resource needed by this operation or call is busy. * Try again later. Potential racing condition or untimely action. */ Code_Fail_Busy, /* Meaning: Failed due to timeout. */ Code_Fail_Timeout, /* Meaning: Error occurred at the driver level. */ Code_Fail_Driver, /* Meaning: This operation is not supported due to limited * implementation or need. */ Code_Fail_NotImplemented, /* Meaning: This operation cannot be performed, since it is * blocked or not allowed. */ Code_Fail_NotAllowed, } ReturnCode_t; /**************************************************************************** * STATIC CONTROLLING FUNCTIONS OF DUT ***************************************************************************/ /**************************************************************************** * API FUNCTIONS ***************************************************************************/ #endif // __ZW_GLOBAL_DEFINITIONS_H__
A couple of days ago, David asked me to add a new feature to the animation editor UI. He wanted the ability to have multiple timelines in the UI so he could mix animations. He drew me a picture to make it a bit more clear: It required a little bit of refactoring, but it was pretty straightforward to implement. Normally something like this would take a really long time, but thanks to our decision to standardize on HTML5 for our UI layer (via Webkit / Chrome / Awesomium), it took less than a day! Here's a video of the new UI coupled with soothing ambient music from Mikko Tarmia! Be sure to watch it in HD! I highly recommend watching this on YouTube or pressing the fullscreen button, so it is more legible. When we get a little further along, I am sure John will post another professional video showing how to use it in the game! In the meantime, I've posted the UI sources in the secret preorder forum to people who want to test it out.
/** * The main set of web services. */ @Named @Singleton public class TextelementController extends Controller { private final DocumentRepository documentRepository; // We are using constructor injection to receive a repository to support our desire for immutability. @Inject public TextelementController(DocumentRepository documentRepository) { this.documentRepository = documentRepository; } public Result newParagraph(Long id, Integer index) { Document doc = documentRepository.findOne(id); JpaFixer.removeDuplicatesWorkaround(doc); JsonNode json = request().body().asJson(); if (doc != null) { Paragraph para = new Paragraph(); para.text = "Neuer Paragraph"; if (json != null && StringUtils.isNotEmpty(json.asText())) { para.text = json.asText(); } doc.insertTextElement(para, index); documentRepository.save(doc); return ok(Json.toJson(para)); } else { return notFound(); } } public Result newHeadline(Long id, Integer index) { Document doc = documentRepository.findOne(id); JpaFixer.removeDuplicatesWorkaround(doc); if (doc != null) { Headline headline = new Headline(); headline.text = "Neue Headline"; headline.size = 3; doc.insertTextElement(headline, index); documentRepository.save(doc); return ok(Json.toJson(headline)); } else { return notFound(); } } public Result delete(Long docId, Long textelementId) { Document doc = documentRepository.findOne(docId); JpaFixer.removeDuplicatesWorkaround(doc); boolean removed = doc.removeTextelement(textelementId); documentRepository.save(doc); if (removed) { return ok(); } else { return notFound(); } } public Result getTypes() { return ok(Json.toJson(TextelementType.values())); } }
<reponame>mzy-ray/react-redux-ts import {createBrowserHistory, History} from 'history'; // subpath of the web service const defaultPath: string = process.env.PUBLIC_URL; const history: History = createBrowserHistory({basename: defaultPath}); export default history;
/* MIT License Copyright (c) 2022 Looker Data Sciences, Inc. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ import React from 'react' import ReactDOM from 'react-dom' import { ExtensionProvider } from '@looker/extension-sdk-react' import { ComponentsProvider, Spinner, Flex } from '@looker/components' import { i18nResources } from '@looker/filter-components' import { FilterDemo } from './FilterDemo' const getRoot = () => { const existingRoot = document.getElementById('extension-root') if (existingRoot) return existingRoot const root = document.createElement('div') root.setAttribute('id', 'extension-root') root.style.height = '100%' document.body.appendChild(root) return root } const render = (Component: typeof FilterDemo) => { const root = getRoot() const loading = ( <Flex width="100%" height="90%" alignItems="center" justifyContent="center"> <Spinner color="black" /> </Flex> ) ReactDOM.render( <ComponentsProvider resources={i18nResources}> <ExtensionProvider loadingComponent={loading} requiredLookerVersion=">=21.0" > <Component /> </ExtensionProvider> </ComponentsProvider>, root ) } window.addEventListener('DOMContentLoaded', async () => { render(FilterDemo) }) // Allow hot module reload if (module.hot) { module.hot.accept('./FilterDemo.tsx', () => { // eslint-disable-next-line @typescript-eslint/no-var-requires const NextFilterDemo = require('./FilterDemo.tsx').default render(NextFilterDemo) }) }
<gh_stars>1-10 package com.works.vetrestapi.repositories; import com.works.vetrestapi.entities.Depo; import org.springframework.data.jpa.repository.JpaRepository; public interface DepoRepository extends JpaRepository<Depo,Integer> { }
import torch import torch.utils.data import torch.cuda import torch.backends.cudnn import random import numpy as np from typing import Optional def fix(offset: int = 0, fix_cudnn: bool = True): random.seed(0x12345678 + offset) torch.manual_seed(0x0DABA52 + offset) torch.cuda.manual_seed(0x0DABA52 + 1 + offset) np.random.seed(0xC1CAFA52 + offset) if fix_cudnn: torch.backends.cudnn.deterministic = True torch.backends.cudnn.benchmark = False def get_randstate(seed: Optional[int] = None) -> np.random.RandomState: if seed is None: worker_info = torch.utils.data.get_worker_info() if worker_info is not None: seed = worker_info.seed else: seed = random.randint(0, 0x7FFFFFFF) return np.random.RandomState(seed)
/*------------------------------------------------------------------------------ Copyright © 2016 by <NAME> XLib is provided under the terms of The MIT License (MIT): Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ------------------------------------------------------------------------------*/ /** * @author <NAME> * Univerity of Verona, Dept. of Computer Science * <EMAIL> */ #include <algorithm> #include <exception> #include <chrono> #include <random> #if __linux__ #include <stdio.h> #include <sys/types.h> #include <sys/mman.h> #include <fcntl.h> #include <sys/stat.h> #include <unistd.h> #endif #include "Graph/Host/GraphSTD.hpp" #include "Base/Host/fUtil.hpp" #include "Base/Host/file_util.hpp" #include "Base/Host/print_ext.hpp" namespace graph { template<typename node_t, typename edge_t, typename dist_t> GraphSTD<node_t, edge_t, dist_t>::GraphSTD() : GraphBase<node_t, edge_t>(), OutOffset(nullptr), InOffset(nullptr), OutEdges(nullptr), InEdges(nullptr), OutDegrees(nullptr), InDegrees(nullptr), COO_Edges(nullptr), coo_edges(0), BFS(*this), SCC(*this) {} template<typename node_t, typename edge_t, typename dist_t> GraphSTD<node_t, edge_t, dist_t>::GraphSTD(EdgeType _edge_type) : GraphBase<node_t, edge_t>(_edge_type), OutOffset(nullptr), InOffset(nullptr), OutEdges(nullptr), InEdges(nullptr), OutDegrees(nullptr), InDegrees(nullptr), COO_Edges(nullptr), coo_edges(0), BFS(*this), SCC(*this) {} template<typename node_t, typename edge_t, typename dist_t> void GraphSTD<node_t, edge_t, dist_t>::Allocate() { try { OutOffset = new edge_t[ V + 1 ]; OutEdges = new node_t[ E ]; OutDegrees = new node_t[ V ](); COO_Edges = new node_t2[ coo_edges ]; if (Direction == EdgeType::UNDIRECTED) { InDegrees = OutDegrees; InOffset = OutOffset; InEdges = OutEdges; return; } InOffset = new edge_t[ V + 1 ]; InEdges = new node_t[ E ]; InDegrees = new degree_t[ V ](); } catch(std::bad_alloc& exc) { __ERROR("OUT OF MEMORY: Graph too Large !! V: " << V << " E: " << E); } } template<typename node_t, typename edge_t, typename dist_t> GraphSTD<node_t, edge_t, dist_t>::~GraphSTD() { if (OutOffset) delete[] OutOffset; if (OutEdges) delete[] OutEdges; if (OutDegrees) delete[] OutDegrees; if (COO_Edges) delete[] COO_Edges; if (Direction == EdgeType::UNDIRECTED) return; if (InOffset) delete[] InOffset; if (InEdges) delete[] InEdges; if (InDegrees) delete[] InDegrees; } template<typename node_t, typename edge_t, typename dist_t> void GraphSTD<node_t, edge_t, dist_t>::ToCSR(bool randomize) { if (randomize) { std::cout << std::endl << "Randomization..." << std::flush; auto seed = std::chrono::system_clock::now().time_since_epoch().count(); std::default_random_engine generator (seed); node_t* randomize_array = new node_t[V]; std::iota(randomize_array, randomize_array + V, 0); std::shuffle(randomize_array, randomize_array+ V, std::default_random_engine(seed)); for (edge_t i = 0; i < coo_edges; i++) { COO_Edges[i][0] = randomize_array[ COO_Edges[i][0] ]; COO_Edges[i][1] = randomize_array[ COO_Edges[i][1] ]; } delete[] randomize_array; } std::cout << std::endl << "COO To CSR...\t" << std::flush; for (edge_t i = 0; i < coo_edges; i++) { const node_t source = COO_Edges[i][0]; const node_t dest = COO_Edges[i][1]; OutDegrees[source]++; if (Direction == EdgeType::UNDIRECTED) OutDegrees[dest]++; else if (Direction == EdgeType::DIRECTED) InDegrees[dest]++; } OutOffset[0] = 0; std::partial_sum(OutDegrees, OutDegrees + V, OutOffset + 1); degree_t* TMP = new degree_t[V](); for (edge_t i = 0; i < coo_edges; i++) { const node_t source = COO_Edges[i][0]; const node_t dest = COO_Edges[i][1]; OutEdges[ OutOffset[source] + TMP[source]++ ] = dest; if (Direction == EdgeType::UNDIRECTED) OutEdges[ OutOffset[dest] + TMP[dest]++ ] = source; } if (Direction == EdgeType::DIRECTED) { InOffset[0] = 0; std::partial_sum(InDegrees, InDegrees + V, InOffset + 1); std::fill(TMP, TMP + V, 0); for (edge_t i = 0; i < coo_edges; ++i) { const node_t dest = COO_Edges[i][1]; InEdges[ InOffset[dest] + TMP[dest]++ ] = COO_Edges[i][0]; } } delete[] TMP; std::cout << "Complete!" << std::endl << std::endl << std::flush; } #if __linux__ template<typename node_t, typename edge_t, typename dist_t> void GraphSTD<node_t, edge_t, dist_t>::toBinary(const char* File) { const int fd = ::open(File, O_RDWR | O_CREAT | O_TRUNC, S_IRUSR | S_IWUSR); const size_t file_size = (3 + (V + 1) * 2 + V * 2 + E * 2) * sizeof(int); std::cout << "Graph To binary file: " << File << " (" << (static_cast<float>(file_size) / (1 << 20)) << ") MB" << std::endl; ::lseek(fd, static_cast<long int>(file_size - 1), SEEK_SET); long int r = ::write(fd, "", 1); if (r != 1) __ERROR("write error"); void* memory_mapped = ::mmap(0, file_size, PROT_WRITE, MAP_SHARED, fd, 0); if (memory_mapped == MAP_FAILED) __ERROR("memory_mapped error"); ::madvise(memory_mapped, file_size, MADV_SEQUENTIAL); reinterpret_cast<node_t*>(memory_mapped)[0] = V; memory_mapped = (char*) memory_mapped + sizeof(node_t); reinterpret_cast<edge_t*>(memory_mapped)[0] = E; memory_mapped = (char*) memory_mapped + sizeof(edge_t); reinterpret_cast<EdgeType*>(memory_mapped)[0] = Direction; memory_mapped = (char*) memory_mapped + sizeof(EdgeType); xlib::Batch batch(file_size - sizeof(node_t) - sizeof(edge_t) - sizeof(EdgeType)); batch.writeBinary(memory_mapped, OutOffset, V + 1, InOffset, V + 1, OutDegrees, V, InDegrees, V, OutEdges, E, InEdges, E); ::munmap(memory_mapped, file_size); ::close(fd); } #endif template<typename node_t, typename edge_t, typename dist_t> void GraphSTD<node_t, edge_t, dist_t>::print() const { using namespace xlib; printArray(OutOffset, V + 1, "OutOffset\t"); printArray(OutEdges, E, "OutEdges\t"); printArray(OutDegrees, V, "OutDegrees\t"); if (Direction == EdgeType::UNDIRECTED) return; printArray(InOffset, V + 1, "InOffset\t"); printArray(InEdges, E, "InEdges\t\t"); printArray(InDegrees, V, "InDegrees\t"); } } //@graph #include "GraphSTD_Read.cpp" #include "GraphSTD_BFS.cpp" #include "GraphSTD_SCC.cpp" template class graph::GraphSTD<>;
ALBANY, NY – The Albany Devils will face the Utica Comets during the First Round of the 2016 Calder Cup Playoffs. The Devils, who finished second in the North Division with a 46-20-8-2 record, will have home-ice advantage during the best-of-five series. Game 1 is slated for Friday, April 22 at 7 pm at Times Union Center. The two clubs will play Game 2 on Saturday, April 23 at 5 pm at Times Union Center. The series shifts to the Utica Memorial Auditorium for Game 3 on Tuesday, April 26 at 7 pm. If necessary, Game 4 will be on Thursday, April 28 at 7 pm at the Auditorium and Game 5 will be on Saturday, April 30 at 5 pm at Times Union Center. Game Date Day Location Time 1 April 22 Friday Times Union Center 7 PM 2 April 23 Saturday Times Union Center 5 PM 3 April 26 Tuesday Utica Auditorium 7 PM 4* April 28 Thursday Utica Auditorium 7 PM 5* April 30 Saturday Times Union Center 5 PM Albany and Utica met eight times during the regular season and the Devils held the advantage, going 6-0-2-0. Jim O’Brien and Matt Lorito led the team against the Comets, producing 11 and seven points, respectively. O’Brien had seven tallies, four assists and a +10 rating and Lorito producing three goals, four helpers and a +5 rating. Both Scott Wedgewood and Yann Danis went 3-0-1. Wedgewood had a 1.48 goals-against average and a .945 save percentage and Danis had a 2.21 goals-against average and a .909 save percentage. Jordan Subban led the Comets, which finished third in the division at 38-26-8-4, against the Devils with six points. He produced a tally and five assists during the eight games. In goal, Richard Bachman was 2-2-1 with a 2.77 goals-against average and a .889 save percentage. Joe Cannata went 0-1-1 with a 3.45 goals-against average and a .892 save percentage. Fans wishing to attend Games 1 and/or 2 can take advantage of the Devils First Round Presale, which is underway now. Tickets start at just $12 and can be purchased online at thealbanydevils.com/playoffs. The presale is scheduled to end on Monday, April 18 at 9:59 am. Tickets go on sale at the Times Union Center Box Office on Monday, April 18 at 10 am. Fans can also purchase tickets at any Ticketmaster outlet, via charge-by-phone at 1-800-745-3000 or online at ticketmaster.com. Fans are encouraged to call the Devils’ Executive Office at 518-4-DEVILS for information on Calder Cup Ticket Packages and to save significantly off box office pricing. Individuals also have an opportunity to receive complimentary first round tickets and secure their seat locations throughout the playoffs.
/** * Here 2 frames slide around on an imaginary table top extending into the * distance. This creates a 3D-type effect where frames move in and out of the * users field of view. * <P> * (The best way to understand exactly what this does is to see it in action.) * <P> * More technically: this creates a PerspectiveTransform and projects two dots * into the plane stretching into the distance, and then this positions the two * frames based on those dots. * */ public abstract class AbstractPlanarTransition2D extends Transition2D { Color background; public AbstractPlanarTransition2D() { this(Color.black); } public AbstractPlanarTransition2D(Color background) { this.background = background; } @Override public Transition2DInstruction[] getInstructions(float progress, Dimension size) { PerspectiveTransform transform; double upperY, lowerY; upperY = size.height * 7 / 10; lowerY = size.height; double x = size.width * 5 / 20; transform = PerspectiveTransform.getQuadToQuad(0, 0, 1, 0, 0, 1, 1, 1, x, upperY, size.width - x, upperY, 0, lowerY, size.width, lowerY); Point2D p = new Point2D.Double(0, .5); transform.transform(p, p); Point2D pA = getFrameALocation(progress); Point2D pB = getFrameBLocation(progress); transform.transform(pA, pA); transform.transform(pB, pB); double height, ratio, width; Rectangle2D r1 = new Rectangle2D.Double(); Rectangle2D r2 = new Rectangle2D.Double(); height = lowerY - (lowerY - pA.getY()) * 2; ratio = height / lowerY; width = size.getWidth() * ratio; r1.setFrame(pA.getX() - width / 2, pA.getY() - height, width, height); height = lowerY - (lowerY - pB.getY()) * 2; ratio = height / lowerY; width = size.getWidth() * ratio; r2.setFrame(pB.getX() - width / 2, pB.getY() - height, width, height); Rectangle big = new Rectangle(0, 0, size.width, size.height); AffineTransform transform1 = RectangularTransform.create(big, r1); AffineTransform transform2 = RectangularTransform.create(big, r2); float opacity1 = getFrameAOpacity(progress); float opacity2 = getFrameBOpacity(progress); ImageInstruction i1A = new ImageInstruction(true, transform1, null); ShapeInstruction i1B = new ShapeInstruction(r1, getShade(1 - opacity1)); ImageInstruction i2A = new ImageInstruction(false, transform2, null); ShapeInstruction i2B = new ShapeInstruction(r2, getShade(1 - opacity2)); AffineTransform transform1z = TransformUtils.createAffineTransform(0, 0, big.getWidth(), 0, 0, big.getHeight(), r1.getX(), r1.getY() + r1.getHeight() * 2, r1.getX() + r1.getWidth(), r1.getY() + r1.getHeight() * 2, r1.getX(), r1.getY() + r1.getHeight() + 1); AffineTransform transform2z = TransformUtils.createAffineTransform(0, 0, big.getWidth(), 0, 0, big.getHeight(), r2.getX(), r2.getY() + r2.getHeight() * 2, r2.getX() + r2.getWidth(), r2.getY() + r2.getHeight() * 2, r2.getX(), r2.getY() + r2.getHeight() + 1); Rectangle2D shadow1Rect = new Rectangle2D.Double(r1.getX(), r1.getY() + r1.getHeight() + 1, r1.getWidth(), r1.getHeight()); Rectangle2D shadow2Rect = new Rectangle2D.Double(r2.getX(), r2.getY() + r2.getHeight() + 1, r2.getWidth(), r2.getHeight()); ImageInstruction i1ShadowA = new ImageInstruction(true, transform1z, null); ShapeInstruction i1ShadowB = new ShapeInstruction(shadow1Rect, getShade(1 - opacity1 * .3f)); ImageInstruction i2ShadowA = new ImageInstruction(false, transform2z, null); ShapeInstruction i2ShadowB = new ShapeInstruction(shadow2Rect, getShade(1 - opacity2 * .3f)); ShapeInstruction backgroundRect = new ShapeInstruction(new Rectangle(0, 0, size.width, size.height), background, null, 0); if (r1.getHeight() > r2.getHeight()) { return new Transition2DInstruction[] { backgroundRect, i2A, i2B, i2ShadowA, i2ShadowB, i1A, i1B, i1ShadowA, i1ShadowB }; } else { return new Transition2DInstruction[] { backgroundRect, i1A, i1B, i1ShadowA, i1ShadowB, i2A, i2B, i2ShadowA, i2ShadowB }; } } private Color getShade(float opacity) { return new Color(background.getRed(), background.getGreen(), background.getBlue(), (int) (255 * opacity)); } /** * This should be a dot within the rectangle (0,0,1,1). Imagine the * rectangle is a diagram of a stage (you know, a theatrical stage, facing * an audience). When the point is (.5,1), this frame is exactly centered in * the user's field of view. At (.5,0) this frame is centered, but small -- * as if in the distance. So the y-coordinate is used to represent depth, * and the x-coordinate is used to represent horizontal movement. */ public abstract Point2D getFrameALocation(float p); /** * This should be a dot within the rectangle (0,0,1,1). Imagine the * rectangle is a diagram of a stage (you know, a theatrical stage, facing * an audience). When the point is (.5,1), this frame is exactly centered in * the user's field of view. At (.5,0) this frame is centered, but small -- * as if in the distance. So the y-coordinate is used to represent depth, * and the x-coordinate is used to represent horizontal movement. */ public abstract Point2D getFrameBLocation(float p); /** The opacity of the first frame */ public abstract float getFrameAOpacity(float p); /** The opacity of the second frame */ public abstract float getFrameBOpacity(float p); }
<gh_stars>0 # O(N) import unittest def is_substring(string, sub): return string.find(sub) != -1 def string_rotation(s1, s2): if len(s1) == len(s2) != 0: return is_substring(s1 + s1, s2) return False class Test(unittest.TestCase): '''Test Cases''' data = [ ('waterbottle', 'erbottlewat', True), ('foo', 'bar', False), ('foo', 'foofoo', False) ] def test_string_rotation(self): for [s1, s2, expected] in self.data: actual = string_rotation(s1, s2) self.assertEqual(actual, expected) if __name__ == "__main__": unittest.main()
Muscular Activity and the Biomechanics of the Hip The interactions between the forces transmitted by the muscles and by the bones are central to the understanding of load transmission in the musculoskeletal system. A reasonable concept of the biomechanics of the hip can only be grasped when the activities of all the major muscle groups acting across the hip and proximal femur are considered.
""" Hubspot models """ from django.db import models from ecommerce.models import Line class HubspotErrorCheck(models.Model): """ Store the datetime of the most recent Hubspot API error check. """ checked_on = models.DateTimeField() class HubspotLineResync(models.Model): """ Indicates that hubspot tried to sync a line before it's order and needs to be resynced """ line = models.ForeignKey(Line, on_delete=models.CASCADE)
<filename>core-project/asakusa-runtime/src/main/java/com/asakusafw/runtime/core/Report.java /** * Copyright 2011-2017 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.runtime.core; import java.io.IOException; import java.text.MessageFormat; import com.asakusafw.runtime.core.api.ApiStub; import com.asakusafw.runtime.core.api.ReportApi; import com.asakusafw.runtime.core.legacy.LegacyReport; import com.asakusafw.runtime.core.legacy.RuntimeResource; /** * Report API entry class. * The Report API enables to notify some messages in operator methods, to the runtime reporting system * (e.g. logger, standard output, or etc.). * Generally, the Report API does not have any effect on the batch execution, for example, the batch execution will * continue even if {@link Report#error(String)} is invoked. * Clients should put <code>&#64;Sticky</code> annotation for operator methods using this API, otherwise the Asakusa * DSL compiler optimization may remove the target operator. <pre><code> &#64;Sticky &#64;Update public void updateWithReport(Hoge hoge) { if (hoge.getValue() &lt; 0) { Report.error("invalid value"); } else { hoge.setValue(0); } } </code></pre> * @since 0.1.0 * @version 0.9.0 */ public final class Report { /** * The Hadoop property name of the custom implementation class name of {@link Report.Delegate}. * To use a default implementation, clients should set {@code com.asakusafw.runtime.core.Report$Default} to it. */ public static final String K_DELEGATE_CLASS = "com.asakusafw.runtime.core.Report.Delegate"; //$NON-NLS-1$ private static final ApiStub<ReportApi> STUB = new ApiStub<>(LegacyReport.API); private Report() { return; } /** * Reports an <em>informative</em> message. * Clients should put <code>&#64;Sticky</code> annotation to the operator method that using this. * @param message the message * @throws Report.FailedException if error was occurred while reporting tReportessage * @see Report */ public static void info(String message) { STUB.get().info(message); } /** * Reports an <em>informative</em> message. * Clients should put <code>&#64;Sticky</code> annotation to the operator method that using this. * @param message the message * @param throwable the optional exception object (nullable) * @throws Report.FailedException if error was occurred while reporting the message * @see Report * @since 0.5.1 */ public static void info(String message, Throwable throwable) { STUB.get().info(message, throwable); } /** * Reports a <em>warning</em> message. * Clients should put <code>&#64;Sticky</code> annotation to the operator method that using this. * @param message the message * @throws Report.FailedException if error was occurred while reporting the message * @see Report */ public static void warn(String message) { STUB.get().warn(message); } /** * Reports a <em>warning</em> message. * Clients should put <code>&#64;Sticky</code> annotation to the operator method that using this. * @param message the message * @param throwable the optional exception object (nullable) * @throws Report.FailedException if error was occurred while reporting the message * @see Report * @since 0.5.1 */ public static void warn(String message, Throwable throwable) { STUB.get().warn(message, throwable); } /** * Reports an <em>error</em> message. * Clients should put <code>&#64;Sticky</code> annotation to the operator method that using this. * Please be careful that this method will <em>NOT</em> shutdown the running batch. * To shutdown the batch, throw an exception ({@link RuntimeException}) in operator methods. * @param message the message * @throws Report.FailedException if error was occurred while reporting the message * @see Report */ public static void error(String message) { STUB.get().error(message); } /** * Reports an <em>error</em> message. * Clients should put <code>&#64;Sticky</code> annotation to the operator method that using this. * Please be careful that this method will <em>NOT</em> shutdown the running batch. * To shutdown the batch, throw an exception ({@link RuntimeException}) in operator methods. * @param message the message * @param throwable the optional exception object (nullable) * @throws Report.FailedException if error was occurred while reporting the message * @see Report * @since 0.5.1 */ public static void error(String message, Throwable throwable) { STUB.get().error(message, throwable); } /** * Returns the API stub. * Application developer must not use this directly. * @return the API stub * @since 0.9.0 */ public static ApiStub<ReportApi> getStub() { return STUB; } /** * {@link FailedException} is thrown when an exception was occurred while processing messages in {@link Report}. */ public static class FailedException extends RuntimeException { private static final long serialVersionUID = 1L; /** * Creates a new instance. */ public FailedException() { super(); } /** * Creates a new instance. * @param message the exception message (nullable) * @param cause the original cause (nullable) */ public FailedException(String message, Throwable cause) { super(message, cause); } /** * Creates a new instance. * @param message the exception message (nullable) */ public FailedException(String message) { super(message); } /** * Creates a new instance. * @param cause the original cause (nullable) */ public FailedException(Throwable cause) { super(cause); } } /** * An abstract super class of delegation objects for {@link Report}. * Application developers can inherit this class, and set the fully qualified name to the property * {@link Report#K_DELEGATE_CLASS} to use the custom implementation for the Report API. * @since 0.1.0 * @version 0.7.4 */ public abstract static class Delegate implements RuntimeResource { /** * Notifies a report. * @param level report level * @param message report message * @throws IOException if failed to notify this report by I/O error */ public abstract void report(Level level, String message) throws IOException; /** * Notifies a report. * @param level report level * @param message report message * @param throwable optional exception info (nullable) * @throws IOException if failed to notify this report by I/O error * @since 0.5.1 */ public void report(Level level, String message, Throwable throwable) throws IOException { report(level, message); } } /** * Represents levels of reporting. */ public enum Level { /** * Informative level. */ INFO, /** * Warning level. */ WARN, /** * Erroneous level. */ ERROR, } /** * A basic implementation of {@link Delegate}. * @since 0.1.0 * @version 0.5.1 */ public static class Default extends Delegate { @Override public void report(Level level, String message) { switch (level) { case INFO: System.out.println(message); break; case WARN: System.err.println(message); new Exception("Warning").printStackTrace(); break; case ERROR: System.err.println(message); new Exception("Error").printStackTrace(); break; default: throw new AssertionError(MessageFormat.format( "[{0}] {1}", //$NON-NLS-1$ level, message)); } } @Override public void report(Level level, String message, Throwable throwable) { switch (level) { case INFO: System.out.println(message); if (throwable != null) { throwable.printStackTrace(System.out); } break; case WARN: case ERROR: System.err.println(message); if (throwable != null) { throwable.printStackTrace(System.err); } break; default: throw new AssertionError(MessageFormat.format( "[{0}] {1}", //$NON-NLS-1$ level, message)); } } } }
// // Creates a new instance of the media session. // HRESULT MFCameraPlayer::CreateSession() { HRESULT hr = S_OK; MF_TOPOSTATUS topoStatus = MF_TOPOSTATUS_INVALID; CComQIPtr<IMFMediaEvent> mfEvent; do { hr = CloseSession(); BREAK_ON_FAIL(hr); assert(state_ == PlayerState::Closed); hr = MFCreateMediaSession(NULL, &pSession_); LOG_TRACE("MFCreateMediaSession returned: " << hr); BREAK_ON_FAIL(hr); state_ = PlayerState::Ready; hr = pSession_->BeginGetEvent((IMFAsyncCallback*)this, NULL); LOG_TRACE("BeginGetEvent returned: " << hr); BREAK_ON_FAIL(hr); } while(false); LOG_TRACE("Returning error code: " << hr); return hr; }
Europe's largest manufacturer of electrical cables and wires Leoni AG has seen its shares fall by between 5-7% after reporting that an email phishing scam caused the company to lose €40m ($44.7m, £33.7m) overnight. Leoni AG is a German firm, but it has a factory located in Bistrita, a city in northern Romania. According to Romanian newspapers, on 12 August, the funds disappeared because the CFO of the Bistrita factory was tricked into transferring money into an unknown bank account because the email looked like it came from one of the manufacturer's top executives in Germany. Somehow, the hackers knew that the Bistrita factory was the only one in Romania out of four Leoni factories that was allowed to authorise and make money transfers. The incident is so severe that it has been escalated by Romanian police and is now being investigated by Romania's Directorate for Investigating Organised Crime and Terrorism, according to Softpedia. But this is not the first high profile case of an email phishing scam targeting corporate entities. In March, it was revealed that in April 2015, Mattel almost lost $3m in a similar fraud in China, but fortunately it was a Bank holiday when the transfer was made to the hackers' bank account, and Chinese police in the province of Wenzhou and the bank in question were able to freeze the funds in time. How corporate email phishing scams work There are lots of security vulnerabilities for hackers to exploit, but it is often difficult to get cyberattacks to translate into tangible monetary returns. Nevertheless, there is a growing trend of cybercriminals who have figured out a sophisticated multi-step strategy known as Business Email Compromise (BEC) to trick companies into giving up big bucks, and the bad news is that it is working. The idea is that the cybercriminals use a vulnerability that has not been patched to hack into a corporate network, and then the hackers sit quietly unnoticed on the network pretending to be just another user. The hackers infiltrate corporate employee email accounts, and read enough email threads so that they can figure out the chain of command in the company, and specifically, how certain important figures in the company converse. When they are poised to strike, the cybercriminals impersonate the CEO or another senior figure, and send an email to the person in charge of financing, such as the Chief Financial Officer (CFO), requesting for a payment to be made to a third party contractor by transferring funds to a certain bank account. The CEO is the CFO's boss, and what do you do when your boss tells you to do something? Usually, you do it, no questions asked, and this is how the hackers win – the poor CFO in question receives an email from the CEO, from the CEO's email address, composed in the style the CEO would write such a correspondence, thinks nothing of it, and complies, transferring the funds to the account number stated in the email. To prevent your company from suffering a similar fate, it is important for enterprises to set out a policy whereby several employees all have to provide active in-person authorisation where confirmation is provided in real life contexts before funds can be transferred outside the company, rather than permitting funds to be sent via email authorisation. And of course, it would make sense to ensure that your IT department is keeping up to date with patching all security vulnerabilities and beefing up network security too.
def find_product_in_list(alist, factors): largest_product = 0 end = len(alist) - (factors - 1) n = 0 while n < end: list_slice = alist[n: n + factors] product = calculate_product(list_slice) if product > largest_product: largest_product = product n += 1 return largest_product
// Note unit struct (field-less struct) impl fmt::Display for InvalidBoardSize { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "Board data is of invalid size. Has to contain {size} elements", size = BOARD_SIZE ) } }
def from_tuple(cls, data: Tuple[Tuple, Tuple, Tuple, int, bool]) -> "Candle": return cls( Ohlc.from_tuple(data[0]), Ohlc.from_tuple(data[1]), Ohlc.from_tuple(data[2]), TimeInt(data[3]), data[4], )
def describe_xml(self): doc = None if self.allowed_type == ALLOWEDVALUETYPE.VALUE: doc = OWS.Value(str(self.value)) else: doc = OWS.Range() doc.set('{%s}rangeClosure' % NAMESPACES['ows'], self.range_closure) doc.append(OWS.MinimumValue(str(self.minval))) doc.append(OWS.MaximumValue(str(self.maxval))) if self.spacing: doc.append(OWS.Spacing(str(self.spacing))) return doc
//Get handler to get a especific menu func Get(mEnv *config.Data) httprouter.Handle { return func(w http.ResponseWriter, r *http.Request, ps httprouter.Params) { id, _ := primitive.ObjectIDFromHex(ps.ByName("id")) menu := OneData(mEnv.CL, id) fmt.Fprintf(w, "%s, %s, %T\n", menu.ID, menu.Category, menu.Items) } }
//////////////////////////////////////////////////////////////////////// // vehicle_nissanleaf_car_on() // Takes care of setting all the state appropriate when the car is on // or off. Centralized so we can more easily make on and off mirror // images. // void vehicle_nissanleaf_car_on(bool isOn) { StandardMetrics.ms_v_env_on->SetValue(isOn); StandardMetrics.ms_v_env_awake->SetValue(isOn); StandardMetrics.ms_v_env_handbrake->SetValue(!isOn); }
Selma James’ Sex, Race and Class offers a way to grapple with many of the unanswered questions that are frustrating today’s movements for social justice. This new collection of pieces (James’ short pamphlet of the same title was first published in 1974) is the harvest of seven decades of grassroots organising, a lifetime spent listening to – and amplifying – unheard voices, and a bold political imagination that is not shy to break new ground. What else would you expect of a book that has the nerve to subtitle itself ‘the perspective of winning’? It also treats you to wit of a kind you see only when the absolute absurdity of capital’s logic meets someone who accepts not even one little one of its premises. A review can’t begin to do justice to an anthology of classics that ranges from art and literature to Marx and feminism, from breastfeeding to Zionism, from 1950s Los Angeles through an occupied church in 1980s London to present day Haiti. Instead, here are a few of the questions this anthology addresses. Diversity A question often raised in social justice groups is ‘Why are we all so white and middle class?’ even in groups where everyone is not. Sex, Race and Class has a different starting point: how have we – a different, more inclusive, ‘we’ – been divided, and how and on what terms can we manage to come together? Looking ‘from the bottom up’, it opens with ‘A Woman’s Place’ (1952), written when James was a young housewife and factory worker, and is then informed by people of colour in the global South: by West Indians, Tanzanians, Haitians, Venezuelans, the immigrant ‘south in the north’, and more broadly by ‘the wageless of the world’. For James, the ‘work . . . hard work’ of ‘confronting the power relations among ourselves and with others as they surface in the course of our campaigning, spelling out the varied, pernicious and subtle forms they take, and working out ways to organise against them . . . is not separate or apart from organising but central to it.’ ‘Freely associating with each other to reshape the world’ James names the liberation we feel when we begin to overcome the pervasive social hierarchy, the hierarchy of sex, race, class, and, particularly, education, which diverts organisers’ energy into ‘competition, antagonism and even violence among us’ – and movements into directions of academics’ choice. In contrast to many activists’ horror of anyone having ‘more say than anyone else’, she sees leadership as potentially a power for suppressed social layers, encouraging working class ‘self-activity’. Sadly, she largely bypasses the question of how to prevent leadership, and organisations’ self-determined hierarchies, undermining collectivity. Work Most people know the call for full employment is as absurd as it is unachievable. Yet we’re still supposed to dance to the tune of ‘Fight for Jobs’, while the Labour Party promises childcare for all, so every woman ‘can work’, ignoring the work that every mother is already doing. Those who dreamed that technology could end wage slavery cower in fear of offending ‘hard-working families’ – or the many parties who claim to represent them, at a time when, increasingly, waged jobs are hard won, easily lost, and the only “respectable” route to survival. In her 1972 text ‘The Power of Women and the Subversion of the Community’, James makes the bold assertion that ‘we have worked enough’. This was based not only on an intimate understanding of the many ways capital steals your time – ‘which happens to be your life’ – but on a startling extension of Marxist theory. In these pages you find the first exposition of women’s work as reproduction of labour power, the commodity without which no wheel can turn. ‘First it must be nine months in the womb, must be fed, clothed, and trained; then when it works its bed must be made, its floor swept, its lunchbox prepared, its sexuality not gratified but quietened . . .’ On the foundation of this worldwide work stands an entitlement. ‘There is no cake, there is no budget, there is only the wealth which we have created and which they have stolen.’ And welfare ‘should be afforded the dignity of being called a wage’. A positive vision Thrown on the defensive by capital’s hoovering up of the world’s resources, betrayed by what’s called itself ‘socialism’ and fighting tooth and nail against austerity and climate apocalypse, many in movements of resistance are looking to identify ‘something positive to fight for’. ‘Invest in Caring Not Killing’ is a positive, world-embracing vision that is grounded in the here and now and blunt about the nature of the enemy. It was developed in 2000 by the international network Selma James founded, the Global Women’s Strike. This idea inspires the later pieces in the anthology. The concept is in tune with ‘people not profit’ but digs far deeper. Prioritising caring is human, common sense – and the opposite of market economics. Women, children, people who are old or disabled are at its core. Directly confronting the world’s most lethal industry, this vision offers a way to interrogate other industries – and, crucially, to question our own movements, and even ourselves as individuals. In practice, in the Strike, it helps to draw “the dividing line between reformism and revolutionary politics”, to refuse co-optation, and to “connect struggles that may seem to be in competition or even in conflict”. This, then, is both a history and a handbook, offering real answers to your most urgent questions, an uncompromising way forward, when we need it most.
/** * Test class for {@code Event}. * */ public class TestEvent { /** * Tries to create an instance without a source. */ @Test(expected = IllegalArgumentException.class) public void testInitNoSource() { new Event(null, Event.ANY); } /** * Tries to create an instance without a type. */ @Test(expected = IllegalArgumentException.class) public void testInitNoType() { new Event(this, null); } /** * Tests the string representation. */ @Test public void testToString() { final Event event = new Event(this, Event.ANY); final String s = event.toString(); assertEquals("Wrong string representation", "Event [ source=" + this + " eventType=" + Event.ANY + " ]", s); } }
def index2sentence(generated_word_index, prob_logit, ixtoword): for i in range(len(generated_word_index)): if generated_word_index[i] == 3 or generated_word_index[i] <= 1: sort_prob_logit = sorted(prob_logit[i]) curindex = np.where(prob_logit[i] == sort_prob_logit[-2])[0][0] count = 1 while curindex <= 3: curindex = np.where(prob_logit[i] == sort_prob_logit[(-2)-count])[0][0] count += 1 generated_word_index[i] = curindex generated_words = [] for ind in generated_word_index: generated_words.append(ixtoword[ind]) punctuation = np.argmax(np.array(generated_words) == '<eos>') + 1 generated_words = generated_words[:punctuation] generated_sentence = ' '.join(generated_words) generated_sentence = generated_sentence.replace('<bos> ', '') generated_sentence = generated_sentence.replace('<eos>', '') generated_sentence = generated_sentence.replace('--', '') generated_sentence = generated_sentence.split(' ') for i in range(len(generated_sentence)): generated_sentence[i] = generated_sentence[i].strip() if len(generated_sentence[i]) > 1: generated_sentence[i] = generated_sentence[i][0].upper() + generated_sentence[i][1:] + '.' else: generated_sentence[i] = generated_sentence[i].upper() generated_sentence = ' '.join(generated_sentence) generated_sentence = generated_sentence.replace(' i ', ' I ') generated_sentence = generated_sentence.replace("i'm", "I'm") generated_sentence = generated_sentence.replace("i'd", "I'd") return generated_sentence
Clang-Tidy is a linter from the LLVM ecosystem. I wanted to try to run it on the Linux kernel to see what kind of bugs it would find. The false positive rate seems pretty high (a persistent bane to static analysis), but some patching in both the tooling and the source can likely help bring this rate down. The most straightforward way to invoke Clang-Tidy is with a compilation database, which is a json based file that for each translation unit records The source file of the translation unit. The top level directory of the source. The exact arguments passed to the compiler. The exact arguments are required because -D and -I flags are necessary to reproduce the exact Abstract Syntax Tree (AST) used to compile your code. Given a compilation database, it’s trivial to parse and recreate a build. For the kernel’s KBuild, it’s a lot like encoding the output of make V=1 . In order to generate a compilation database, we can use an awesome tool called BEAR. BEAR will hook calls to exec and family, then write out the compilation database (compile_commands.json). With BEAR installed, we can invoke the kernel’s build with bear make -j . When we’re done: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 ➜ linux git: ( nick ) ✗ du -h compile_commands.json 11M compile_commands.json ➜ linux git: ( nick ) ✗ wc -l compile_commands.json 330296 compile_commands.json ➜ linux git: ( nick ) ✗ head -n 26 compile_commands.json [ { "arguments" : [ "cc" , "-c" , "-Wp,-MD,arch/x86/boot/tools/.build.d" , "-Wall" , "-Wmissing-prototypes" , "-Wstrict-prototypes" , "-O2" , "-fomit-frame-pointer" , "-std=gnu89" , "-Wno-unused-value" , "-Wno-unused-parameter" , "-Wno-missing-field-initializers" , "-I./tools/include" , "-include" , "include/generated/autoconf.h" , "-D__EXPORTED_HEADERS__" , "-o" , "arch/x86/boot/tools/build" , "arch/x86/boot/tools/build.c" ] , "directory" : "/home/nick/linux" , "file" : "arch/x86/boot/tools/build.c" } , Now with Clang-Tidy (probably worthwhile to build from source, but it’s also available off apt ), we want to grab this helper script, run-clang-tidy.py to help analyze all this code. 1 curl -O https://raw.githubusercontent.com/llvm-mirror/clang-tools-extra/master/clang-tidy/tool/run-clang-tidy.py Then we can run it from the same directory as compile_commands.json: 1 2 3 python run-clang-tidy.py \ -clang-tidy-binary /usr/bin/clang-tidy-4.0 \ > clang_tidy_output.txt This took about 1hr12min on my box. Let’s see what the damage is: 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 ➜ linux git: ( nick ) ✗ cat clang_tidy_output.txt \ | grep warning: | grep -oE '[^ ]+$' | sort | uniq -c 76 [ clang-analyzer-core.CallAndMessage ] 15 [ clang-analyzer-core.DivideZero ] 1 [ clang-analyzer-core.NonNullParamChecker ] 316 [ clang-analyzer-core.NullDereference ] 90 [ clang-analyzer-core.UndefinedBinaryOperatorResult ] 1 [ clang-analyzer-core.uninitialized.ArraySubscript ] 1410 [ clang-analyzer-core.uninitialized.Assign ] 10 [ clang-analyzer-core.uninitialized.Branch ] 5 [ clang-analyzer-core.uninitialized.UndefReturn ] 11 [ clang-analyzer-cplusplus.NewDeleteLeaks ] 694 [ clang-analyzer-deadcode.DeadStores ] 342 [ clang-analyzer-security.insecureAPI.strcpy ] 2 [ clang-analyzer-unix.API ] 11 [ clang-analyzer-unix.Malloc ] 4 [ clang-diagnostic-address-of-packed-member ] 2 [ clang-diagnostic-duplicate-decl-specifier ] 98 [ clang-diagnostic-implicit-int ] Looking through the output, there’s seems to be almost nothing but false positives, but who knows, maybe there’s an actual bug or two in there. Likely possible patches to LLVM, its checkers, or the Linux kernel could lower the false positive ratio. If you’re interested in seeing the kinds of warnings/outputs, I’ve uploaded my results run on a 4.12-rc3 based kernel that may or may not have been compiled with Clang to my clang_tidy branch of the kernel on GitHub. As in my sorted output, I find it handy to grep for warning: . Maybe you can find yourself a good first bug to contribute a fix to the kernel? There’s likely also some checks that make sense to disable or enable. Clang-Tidy also allows you to write and use your own checkers. Who knows, someone may just end up writing static analyses tailored to the Linux kernel.
Aesthetics of Acquisition: Notes on the Transactional Life of Persons and Things in Gabon Abstract Based on a historical study of older and newer visual regimes in Gabon, Equatorial Africa, this paper examines spectacles as world-manufacturing processes that produce and circulate assets. Visual and aesthetic strategies have often been analyzed as technologies of the self that transform and manifest people's identities. I show here that they also work as a means to create resources and put them into motion. The notion of “aesthetics of acquisition” helps to capture the dynamic energy of visual events and reinsert them into the realms of economic production and material exchange. If spectacles allow people to acquire riches, produce new statuses, and circulate resources, I argue, the process through which this occurs cannot be analytically reduced to a mere commodification of the person. Instead, I explain how aesthetics of acquisition enable institutional and social actors to assume temporary commodity status, a moment and a strategy that I call “transactional life.”
// NewTranslate creates new struct. func NewTranslate() (*Translate, error) { sess, err := session.NewSession() if err != nil { return nil, fmt.Errorf("aws session error: %w", err) } return &Translate{ svc: translate.New(sess), }, nil }
export * from './FunctionProperties'; export * from './FunctionPropertyNames'; export * from './Head'; export * from './Next';
def _IsContentfulLine(self, filename, line, is_in_multiline_c_comment): return not (_WHITESPACE_LINE_RE.search(line) or _CXX_COMMENT_LINE.search(line) or _C_COMMENT_LINE.search(line) or self._IsHeaderGuardLine(filename, line) or self._IsInACComment(line, is_in_multiline_c_comment))
<filename>archive/errorAndLoading.ts<gh_stars>1-10 import { ReduxStory, ReduxReducer, ReduxStoryLine, ErrorState, LoadingState } from "./baseTypes"; import { unpackStoriesIntoStoryLines } from "./util"; export function createPendingFlagReducer(reduxes) { const initialState = reduxes.reduce((acc, cur) => { return { ...acc, [cur.actions.BASE_NAME]: true } }, {}); return function (state = initialState, action) { const { type } = action; const matches = /(.*)_(REQUEST|SUCCESS|FAILURE|RESET)/.exec(type); if (!matches) return state; const [, requestName, requestState] = matches; return { ...state, [requestName]: !(requestState === 'REQUEST'), } } } /** * This is a bit hacky for now. */ const CLEAR_ALL_ERRORS_SUCCESS = "CLEAR_ALL_ERRORS_SUCCESS"; const CLEAR_SPECIFIC_ERROR = "CLEAR_SPECIFIC_ERROR"; export function createClearSpecificError(baseName: string) { return { type: CLEAR_SPECIFIC_ERROR, payload: baseName } } export function requestClearAllErrors() { return { type: CLEAR_ALL_ERRORS_SUCCESS, payload: null, }; } export function selectAllErrors(state) { return Object.values(state.error).filter(v => !!v); } export const RESET_LOADING_ACTION = 'RESET_LOADING_ACTION'; /** * The error reducer * Only needs to be called once * * For the given list of reduxes, it will instantiate them to null * @param {*} reduxes */ export function createErrorReducer(reduxes : ReduxStoryLine<any, any, any, any>[]) : ReduxReducer<ErrorState, any> { const initialState : ErrorState = reduxes.reduce((acc, cur) => { return { ...acc, [cur.actions.BASE_NAME]: null } }, {}); return function (state = initialState, action) { const { type, payload } = action; if (type === CLEAR_ALL_ERRORS_SUCCESS) { return initialState } if (type === CLEAR_SPECIFIC_ERROR && state[payload]) { return { ...state, [payload]: null, } } const matches = /(.*)_(REQUEST|FAILURE)/.exec(type); if (!matches) return state; const [, requestName, requestState] = matches; return { ...state, [requestName]: requestState === 'FAILURE' ? payload : null, }; } } /** * The loading flag reducer * Only needs to be called once * * For the given list of reduxes, it will instantiate them with false. * @param {*} reduxes */ export function createLoadingFlagReducer(reduxes : ReduxStoryLine<any, any, any, any>[]) : ReduxReducer<LoadingState, any> { console.log(reduxes); const initialState = reduxes.reduce((acc, cur) => { console.log(cur); return { ...acc, [cur.actions.BASE_NAME]: false } }, {}); return function (state = initialState, action) { const { type } = action; if (type === RESET_LOADING_ACTION) { return initialState; } const matches = /(.*)_(REQUEST|SUCCESS|FAILURE)/.exec(type); if (!matches) return state; const [, requestName, requestState] = matches; return { ...state, [requestName]: requestState === 'REQUEST', } } }; export function clearAllLoading() { return { type: RESET_LOADING_ACTION, payload: null, } } export function createLoadingFlagReducerFromStories(stories: ReduxStory<any, object>[]) : ReduxReducer<LoadingState, any>{ return createLoadingFlagReducer(unpackStoriesIntoStoryLines(stories)); } export function createErrorReducerFromStories(stories: ReduxStory<any, object>[]) : ReduxReducer<ErrorState, any> { return createErrorReducer(unpackStoriesIntoStoryLines(stories)); }
<reponame>snolflake/graphs-and-tracks<filename>src/app/settings/challenge-messages.ts import { TutorialStep, Message, UI_CONTROL } from '../shared/types' export const TUTORIAL_STEPS: TutorialStep[] = [ { title: 'Welcome to Graphs and Tracks!', content: ` Your mission is to discover the motion of a rolling ball by using the information provided in graphs of position, velocity and acceleration. ` }, { title: 'Motion graphs', content: ` Try to recreate the motion by setting the initial conditions and adjusting the tracks. <br> When you are successful, your graph will match the challenge graph. <br> <br> Click on the <b class="b">ROLL BALL</b> button. `, requires: [UI_CONTROL.ROLL_BUTTON], triggers: [UI_CONTROL.POSITION_GRAPH] }, { title: 'Motion graphs', content: ` Your graphs, with <span class="solid">solid lines</span>, should match the challenge graphs, the <span class="dashed">dashed lines</span>. `, requires: [], triggers: [UI_CONTROL.POSITION_GRAPH] }, { title: 'Position graph', content: ` The vertical axis of the <b class="s">POSITION</b> vs. time graph corresponds to the horizontal position of the ball. `, requires: [], triggers: [] }, { title: 'Finding the right place', content: ` Note the scale of <b class="s">Initial Position</b> directly below the tracks. <br> <br> You can set the starting position of the ball by clicking on new position values, dragging the ball, or dragging the scale pointers. Try it! `, requires: [UI_CONTROL.POSITION_SCALE], triggers: [UI_CONTROL.TUTORIAL_NEXT] }, { title: 'Finding the right place', content: ` The position vs. time graph shows what direction the ball travels. An upward line indicates motion to the right; a dopwnward line shows motion to the left. <br> <br> When ready, click on <b class="v">VELOCITY</b> to see a graph of velocity vs. time. `, requires: [UI_CONTROL.VELOCITY_GRAPH], triggers: [UI_CONTROL.TUTORIAL_NEXT] }, { title: 'Velocity graph', content: ` Here is the <b class="v">VELOCITY</b> graph for this first example of motion. <br><br> Can you see places where the speed changes? Or stays the same for a period of time? `, requires: [], triggers: [] }, { title: 'Initial speed and direction', content: ` In some challenge motions, the ball may already be moving at time t=0. <br> <br> Its initial velocity may be positive <span>(moving to the right)</span> or negative <span>(moving to the left)</span>, or zero <span>(at rest)</span>. <br> <br> Use the <b class="v">Initial Velocity</b> scale to set the ball’s starting velocity. `, requires: [UI_CONTROL.VELOCITY_SCALE], triggers: [UI_CONTROL.TUTORIAL_NEXT] }, { title: 'Initial speed and direction', content: ` Try different initial velocities (positive and negative) and see what happens. <br> <br> When ready, click on <b class="a">ACCELERATION</b>. `, requires: [UI_CONTROL.ACCELERATION_GRAPH], triggers: [UI_CONTROL.TUTORIAL_NEXT] }, { title: 'Acceleration graph', content: ` The acceleration vs. time graph shows when the acceleration of the ball is positive (to the right), negative (to the left) or zero (constant velocity). `, requires: [], triggers: [] }, { title: 'Change the track setup', content: ` Tap just above or below the top of a <b>post</b> to raise or lower it. <br> <br> Or drag the top up or down to increase or decrease its <b>height</b>. <br> <br> The <b>height</b> of each <b>post</b> is displayed at its base. <br> <br> `, requires: [UI_CONTROL.TRACK_POST_ANY], triggers: [] }, { title: 'Test your new setup', content: ` Select the <b class="a">ACCELERATION</b> graph. <br> <br> Then click <b class="b">ROLL BALL</b> to see how the acceleration of the ball changes. <br> <br> Acceleration graphs will help you for find out how steep to make your ramps. `, requires: [UI_CONTROL.ROLL_BUTTON], triggers: [UI_CONTROL.ACCELERATION_GRAPH] }, { title: 'One step at a time', content: ` The problem can be simplified by focusing on short segments. <br> <br> To see the motion for only one sloping section at a time, click and <b>hold down</b> the <b class="b">ROLL BALL</b> button. `, requires: [UI_CONTROL.ROLL_BUTTON_HOLD], triggers: [] }, { title: 'Tutorial complete', content: ` That’s it. Feel free to experiment in the <a [routerLink]="['/challenges/practice']">Practice</a> section, or go directly to the <a [routerLink]="['/challenges']">Challenges</a> list to pick your first one. `, requires: [] }, ] export const HINT_MESSAGES: { [name: string]: Message } = { intro: { // NOTE: titles and messages can be arrays, they will be randomly selected title: ['Hints enabled'], content: ` With hints enabled, we will provide guidance on how to improve your current setup. <br> <br> You can select the <b>tutorial</b> button for instructions on how to use the program. ` }, position: { title: 'Starting position', content: ` Your ball starts at the wrong place. <br> <br> Examine the <b class="s">position graph</b> and read off the <b class="s">initial position</b> <span>(at Time t=0)</span>. ` }, velocity: { title: 'Initial velocity', content: ` Use the <b class="v">VELOCITY</b> graph to read off the initial velocity. <br> If the initial velocity is positive, this means the ball is initially rolling to the right; if it’s negative the ball rolls to the left. ` }, posts: { title: 'Accelerations on ramps', content: ` The highlighted ramp has the wrong inclination. <br><br> Take a careful look at the <b class="a">acceleration graph</b> to see whether the acceleration on this ramp should be <b>positive</b> <span>(ramp slopes downward to the right)</span>, <b>negative</b> <span>(slopes upward to the right)</span> or <b>zero</b> (ramp is level). <br> <br> Holding down the <b class="b">ROLL BALL</b> button will show the motion one segment at a time. ` } } const KUDOS_TITLES: string[] = [ 'Congratulations!', 'Excellent!', 'That’s it!', 'Very good!', ] const KUDOS_INTROS: string[] = [ 'You have successfully recreated the motion described in the challenge graphs.', 'All three graphs match. You have found the correct motion.', 'You have reproduced the challenge motion.', 'The graphs for your motion match the graphs of the challenge.', ] const KUDOS_ICONS: { [key: string]: string } = { normal: 'sentiment_satisfied', good: 'mood', awesome: 'sentiment_very_satisfied', } const KUDOS_H0N0 = 'Without using any hints, you discovered the motion on your first try.' const KUDOS_H0N1 = 'Without using any hints, you found the solution after %N% tries.' const KUDOS_H1N0 = 'You found the correct arrangement on your first try. Now try another challenge without using any hints.' const KUDOS_H1N1 = 'You found the correct arrangement after %N% tries. Try another. See if you can do it without hints.' const KUDOS_SS = `Note that with this track arrangement and the initial position and initial velocity values as shown on the scales, the graphs match. <br><br> When ready, try a challenge without viewing the solution.` export const KUDOS = { titles: KUDOS_TITLES, intros: KUDOS_INTROS, icons: KUDOS_ICONS, h0n0: KUDOS_H0N0, h0n1: KUDOS_H0N1, h1n0: KUDOS_H1N0, h1n1: KUDOS_H1N1, ss: KUDOS_SS }
<filename>src/global.d.ts<gh_stars>0 declare global { function setInterval(handler: TimerHandler, timeout?: number, ...arguments: any[]): number; }
/* Free a set of linked memory blocks */ static void freeMemBlock(mem_blk_set_t* head) { mem_blk_set_t* iter; if(!head) return; freeMemBlock(head->next); head->next = NULL; #ifdef _DELETE printf("Freeing mem block = %x\n", head); #endif free(head); }
Myelodysplastic syndromes: the pediatric point of view. Myelodysplastic syndromes (MDS) are clonal disorders of the multipotent hematopoietic stem cell characterized by ineffective hematopoiesis and associated with marrow hypercellularity, increased intramedullary cell death and peripheral cytopenias of varying severity. Patients with myelodysplasia have a propensity (20% to 30% of cases) to undergo transformation into acute myeloid leukemia (AML), and a large body of evidence indicates that MDS represent steps in the multiphasic evolution of AML. Progression of the disease is characterized by expansion of the abnormal clone and inhibition of normal hematopoiesis leading to deterioration of the blood cell count and/or development of AML. MDS are relatively unusual in childhood, representing only 3% of pediatric hematological malignancies, although it has been reported that up to 17% of pediatric AML cases may have a previous myelodysplastic phase. The first systematic attempt at morphological classification of MDS was provided by the French-American-British (FAB) group. However, the FAB classification of MDS is only partially applicable in children. Some variants are extremely rare or absent (refractory anemia with ring sideroblasts and chronic myelomonocytic leukemia), and other peculiar pediatric disorders, represented by juvenile chronic myelogenous leukemia (JCML) and the monosomy 7 syndrome, are not included. Moreover, since there is a partial overlap between pediatric MDS and myeloproliferative disorders and the variants occurring in young children have rather specific features, some confusion still surrounds the nosographical definition of childhood MDS, so that none of the proposed classifications are widely accepted and used. Characteristically, some genetic conditions such as Fanconi's anemia, Shwachman's and Down's syndromes predispose to the development of MDS in childhood. The most common variants of childhood MDS are represented by JCML and the monosomy 7 syndrome, both disorders typically occurring in young children. JCML is characterized by a spontaneous growth of granulocyte-macrophage progenitors that show a striking hypersensitivity to granulocyte-macrophage colony-stimulating factor. Clinical presentation resembles that of some myeloproliferative disorders, with massive organomegaly usually not observed in the classically reported variants of MDS. Clinical features of the monosomy 7 syndrome resemble those observed in JCML and a differential diagnosis between these two entities relies upon the higher percentage of fetal hemoglobin, the more pronounced decrease in platelet count and, in some cases, the lack of the peculiar cytogenetic abnormality in the latter. With the number of children being cured of cancer constantly rising, a significant increase in secondary or chemotherapy-related myelodysplasia is being observed, and these disorders represent a formidable challenge for pediatric hematologists due to their poor response to chemotherapy.(ABSTRACT TRUNCATED AT 400 WORDS)
<reponame>mdmuidulalam/muskdailyapi package data import ( "context" "go.mongodb.org/mongo-driver/bson/primitive" model "muskdaily.com/model" ) type AccountData struct { Data } func (this AccountData) InsertAccount(account model.Account) bool { collection := this.client.Database("muskdaily").Collection("accounts") _, err := collection.InsertOne(context.TODO(), account) if err != nil { panic(err) } return true } func (this AccountData) SelectAccounts(filter primitive.D) []*model.Account { var results []*model.Account collection := this.client.Database("muskdaily").Collection("accounts") cur, err := collection.Find(context.TODO(), filter) if err != nil { panic(err) } for cur.Next(context.TODO()) { var account model.Account err := cur.Decode(&account) if err != nil { panic(err) } results = append(results, &account) } return results } func (this AccountData) UpdateAccounts(filter, update primitive.D) (int64, int64) { collection := this.client.Database("muskdaily").Collection("accounts") updateResult, err := collection.UpdateMany(context.TODO(), filter, update) if err != nil { panic(err) } return updateResult.MatchedCount, updateResult.ModifiedCount }
#include <iostream> #include <string> #include <sstream> #include <ctype.h> using namespace std; class Solution { public: bool isPalindrome(string s) { if (s.empty()) return true; stringstream str(s); string buffer; string newS; //clean up the string while (str >> buffer) { for (int i = 0; i< buffer.size(); i++) { if (isalnum(buffer[i])) { newS += tolower(buffer[i]); } } } //test if is palindrome for (int i = 0; i < newS.size()/2; i++) { if (newS[i] != newS[newS.size()-1-i]) return false; } return true; } }; //inplace treatment by others // class Solution { // public: // bool isPalindrome(string s) { // int i = 0, j = s.size() - 1; // while (i < j) { // if (!isalnum(s[i])) // i++; // else if (!isalnum(s[j])) // j--; // else if (tolower(s[i++]) != tolower(s[j--])) // return false; // } // return true; // } // };
def merge_lmdbs(): db_path = 'sexp_cache' dst_db = lmdb.open(db_path, map_size=1e11, writemap=True, readahead=False) files = glob('data/**/*sexp_cache', recursive=True) bar = ProgressBar(max_value=len(files)) for i, db_path in enumerate(files): print(db_path) try: src_db = lmdb.open(db_path, map_size=1e11, readonly=True, readahead=True, lock=False) except lmdb.Error as ex: print(ex) continue with src_db.begin() as txn1: cursor = txn1.cursor() for key, value in cursor: with dst_db.begin(write=True) as txn2: txn2.put(key, value, dupdata=False, overwrite=False) src_db.close() os.system('rm -r "%s"' % db_path) bar.update(i) dst_db.close()
/** * Parses an AspectService annotation. * @param annotation */ private void parseAdapterService(Annotation annotation) { EntryWriter writer = new EntryWriter(EntryType.AdapterService); m_writers.add(writer); writer.put(EntryParam.impl, m_componentClassName); String adapteeFilter = annotation.get(EntryParam.adapteeFilter.toString()); if (adapteeFilter != null) { Verifier.verifyFilter(adapteeFilter, 0); writer.put(EntryParam.adapteeFilter, adapteeFilter); } writer.putClass(annotation, EntryParam.adapteeService); parseCommonComponentAttributes(annotation, writer); if (writer.putClassArray(annotation, EntryParam.provides, m_interfaces, m_exportService) == 0) { checkRegisteredUnregisteredNotPresent(); } writer.putString(annotation, EntryParam.factoryMethod, null); writer.putString(annotation, EntryParam.propagate, null); parseAspectOrAdapterCallbackMethods(annotation, writer); }
/** * Updates existing movie. If movie does not exist in database, will do nothing. */ public void updateMovie(MovieDetails details, int tmdbId) { ContentValues values = details.toContentValuesUpdate(); if (values.size() == 0) { return; } values.put(SeriesGuideContract.Movies.LAST_UPDATED, System.currentTimeMillis()); context.getContentResolver().update(SeriesGuideContract.Movies.buildMovieUri(tmdbId), values, null, null); }
<reponame>alexa/apl-core-library /** * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0/ * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ #include "../testeventloop.h" #include "apl/touch/pointerevent.h" #include "apl/animation/coreeasing.h" using namespace apl; class NativeGesturesScrollableTest : public DocumentWrapper { public: NativeGesturesScrollableTest() : DocumentWrapper() { config->set({ {RootProperty::kTapOrScrollTimeout, 5}, {RootProperty::kPointerInactivityTimeout, 250}, {RootProperty::kPointerSlopThreshold, 10}, {RootProperty::kUEScrollerDeceleration, 0.2}, {RootProperty::kUEScrollerVelocityEasing, "linear"}, {RootProperty::kScrollFlingVelocityLimitEasingVertical, CoreEasing::bezier(0,1,0,1)}, {RootProperty::kScrollFlingVelocityLimitEasingHorizontal, CoreEasing::bezier(0,1,0,1)}, }); } }; TEST_F(NativeGesturesScrollableTest, Configuration) { ASSERT_EQ(Object(5), config->getProperty(RootProperty::kTapOrScrollTimeout)); ASSERT_EQ(Object(0.5), config->getProperty(RootProperty::kSwipeAwayFulfillDistancePercentageThreshold)); ASSERT_EQ(Object(CoreEasing::bezier(0,0,0.58,1)), config->getProperty(RootProperty::kSwipeAwayAnimationEasing)); ASSERT_EQ(Object(500), config->getProperty(RootProperty::kSwipeVelocityThreshold)); ASSERT_EQ(Object(2000), config->getProperty(RootProperty::kSwipeMaxVelocity)); ASSERT_EQ(Object(200), config->getProperty(RootProperty::kDefaultSwipeAnimationDuration)); ASSERT_EQ(Object(400), config->getProperty(RootProperty::kMaxSwipeAnimationDuration)); ASSERT_EQ(Object(50), config->getProperty(RootProperty::kMinimumFlingVelocity)); ASSERT_EQ(Object(1200), config->getProperty(RootProperty::kMaximumFlingVelocity)); ASSERT_EQ(Object(250), config->getProperty(RootProperty::kPointerInactivityTimeout)); ASSERT_EQ(Object(10), config->getProperty(RootProperty::kPointerSlopThreshold)); ASSERT_EQ(Object(1000), config->getProperty(RootProperty::kScrollCommandDuration)); ASSERT_EQ(Object(500), config->getProperty(RootProperty::kScrollSnapDuration)); ASSERT_NEAR(1.48, config->getProperty(RootProperty::kScrollAngleSlopeVertical).getDouble(), 0.01); ASSERT_NEAR(0.64, config->getProperty(RootProperty::kScrollAngleSlopeHorizontal).getDouble(), 0.01); ASSERT_NEAR(0.84, config->getProperty(RootProperty::kSwipeAngleTolerance).getDouble(), 0.01); ASSERT_EQ(Object(CoreEasing::linear()), config->getProperty(RootProperty::kUEScrollerVelocityEasing)); ASSERT_EQ(Object(CoreEasing::bezier(.65,0,.35,1)), config->getProperty(RootProperty::kUEScrollerDurationEasing)); ASSERT_EQ(Object(3000), config->getProperty(RootProperty::kUEScrollerMaxDuration)); ASSERT_EQ(Object(0.2), config->getProperty(RootProperty::kUEScrollerDeceleration)); } static const char *SCROLL_TEST = R"({ "type": "APL", "version": "1.4", "mainTemplate": { "items": { "type": "Sequence", "id": "scrollings", "width": 200, "height": 300, "data": ["red", "green", "yellow", "blue", "purple", "gray", "red", "green", "yellow", "blue", "purple", "gray"], "items": [ { "type": "TouchWrapper", "id": "${data}${index}", "width": 200, "height": 100, "item": { "type": "Frame", "backgroundColor": "${data}", "width": 200, "height": 100 }, "onDown": { "type": "SendEvent", "sequencer": "MAIN", "arguments": [ "onDown:${event.source.id}" ] }, "onMove": { "type": "SendEvent", "sequencer": "MAIN", "arguments": [ "onMove:${event.source.id}" ] }, "onUp": { "type": "SendEvent", "sequencer": "MAIN", "arguments": [ "onUp:${event.source.id}" ] }, "onCancel": { "type": "SendEvent", "sequencer": "MAIN", "arguments": [ "onCancel:${event.source.id}" ] }, "onPress": { "type": "SendEvent", "arguments": [ "onPress:${event.source.id}" ] } } ] } } })"; TEST_F(NativeGesturesScrollableTest, Scroll) { loadDocument(SCROLL_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,100), false, "onDown:green1")); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,50), true, "onMove:green1")); ASSERT_TRUE(CheckSendEvent(root, "onCancel:green1")); ASSERT_EQ(Point(0, 50), component->scrollPosition()); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 100), component->scrollPosition()); advanceTime(2600); ASSERT_EQ(Point(0, 725), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,100), false)); // Scroll back up ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,100), false, "onDown:yellow8")); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,150), true, "onMove:yellow8")); ASSERT_TRUE(CheckSendEvent(root, "onCancel:yellow8")); ASSERT_EQ(Point(0, 675), component->scrollPosition()); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,200), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,200), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 625), component->scrollPosition()); advanceTime(2600); ASSERT_EQ(Point(0, 0), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, ScrollRotated) { loadDocument(SCROLL_TEST); TransformComponent(root, "scrollings", "rotate", 90); ASSERT_TRUE(CheckDirty(component, kPropertyTransform)); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,100), false, "onDown:yellow2")); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(50,100), true, "onMove:yellow2")); ASSERT_TRUE(CheckSendEvent(root, "onCancel:yellow2")); ASSERT_EQ(Point(0, 50), component->scrollPosition()); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(100,100), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(100,100), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 100), component->scrollPosition()); advanceTime(2600); ASSERT_EQ(Point(0, 725), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, ScrollScaled) { loadDocument(SCROLL_TEST); TransformComponent(root, "scrollings", "scale", 2); ASSERT_TRUE(CheckDirty(component, kPropertyTransform)); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,100), false, "onDown:green1")); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,50), true, "onMove:green1")); ASSERT_TRUE(CheckSendEvent(root, "onCancel:green1")); ASSERT_EQ(Point(0, 25), component->scrollPosition()); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 50), component->scrollPosition()); advanceTime(2600); ASSERT_EQ(Point(0, 362.5), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, ScrollThresholdsRemainInGlobalCoordinateDimensions) { loadDocument(SCROLL_TEST); TransformComponent(root, "scrollings", "scale", 2); ASSERT_TRUE(CheckDirty(component, kPropertyTransform)); ASSERT_EQ(Point(), component->scrollPosition()); // Pointer slop threshold not met ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,100), false, "onDown:green1")); advanceTime(100); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,95), false, "onMove:green1")); ASSERT_EQ(Point(0, 0), component->scrollPosition()); advanceTime(300); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,90), true, "onUp:green1")); ASSERT_FALSE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_TRUE(CheckSendEvent(root, "onPress:green1")); // Min velocity not met advanceTime(600); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,100), false, "onDown:green1")); advanceTime(800); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,90), false, "onMove:green1")); ASSERT_EQ(Point(0, 0), component->scrollPosition()); advanceTime(400); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,90), true, "onUp:green1")); ASSERT_EQ(Point(0, 0), component->scrollPosition()); ASSERT_FALSE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_TRUE(CheckSendEvent(root, "onPress:green1")); // Min velocity and pointer slop thresholds met advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,100), false, "onDown:green1")); advanceTime(100); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,88), true, "onMove:green1")); ASSERT_TRUE(CheckSendEvent(root, "onCancel:green1")); ASSERT_EQ(Point(0, 6), component->scrollPosition()); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,88), true)); advanceTime(2900); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(0, component->scrollPosition().getX()); ASSERT_FLOAT_EQ(156, component->scrollPosition().getY()); } TEST_F(NativeGesturesScrollableTest, ScrollSingularity) { loadDocument(SCROLL_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,100), false, "onDown:green1")); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,50), true, "onMove:green1")); ASSERT_TRUE(CheckSendEvent(root, "onCancel:green1")); ASSERT_EQ(Point(0, 50), component->scrollPosition()); TransformComponent(root, "scrollings", "scale", 0); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_FALSE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_TRUE(session->checkAndClear()); } TEST_F(NativeGesturesScrollableTest, ScrollHover) { loadDocument(SCROLL_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,200), false, "onDown:yellow2")); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,150), true, "onMove:yellow2")); ASSERT_TRUE(CheckSendEvent(root, "onCancel:yellow2")); ASSERT_EQ(Point(0, 50), component->scrollPosition()); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,100), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,100), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 100), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), false)); advanceTime(2600); ASSERT_EQ(Point(0, 725), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, ScrollTerminate) { loadDocument(SCROLL_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,100), false, "onDown:green1")); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,50), true, "onMove:green1")); ASSERT_TRUE(CheckSendEvent(root, "onCancel:green1")); ASSERT_EQ(Point(0, 50), component->scrollPosition()); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 100), component->scrollPosition()); advanceTime(1600); // Interrupted here. ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,100), true, "onDown:red6")); ASSERT_TRUE(CheckSendEvent(root, "onCancel:red6")); advanceTime(1000); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,100), true)); auto currentPosition = component->scrollPosition(); advanceTime(500); ASSERT_EQ(currentPosition, component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, ScrollTapOrScrollTimeout) { config->set(RootProperty::kTapOrScrollTimeout, 60); loadDocument(SCROLL_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(1,100), false, "onDown:green1")); // Under the timeout is not recognized as move that can trigger the gesture advanceTime(50); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(1,75), false, "onMove:green1")); // After actually triggers advanceTime(50); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(1,50), true, "onMove:green1")); ASSERT_TRUE(CheckSendEvent(root, "onCancel:green1")); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(1,50), true)); ASSERT_EQ(Point(0, 50), component->scrollPosition()); advanceTime(2900); ASSERT_EQ(Point(0, 900), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, ScrollCommand) { loadDocument(SCROLL_TEST); ASSERT_EQ(Point(), component->scrollPosition()); auto ptr = executeCommand("Scroll", {{"componentId", "scrollings"}, {"distance", 1}}, false); loop->advanceToEnd(); root->clearPending(); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(300, component->scrollPosition().getY()); } TEST_F(NativeGesturesScrollableTest, ScrollToCommand) { loadDocument(SCROLL_TEST); ASSERT_EQ(Point(), component->scrollPosition()); auto ptr = executeCommand("ScrollToIndex", {{"componentId", "scrollings"}, {"index", 4}}, false); loop->advanceToEnd(); root->clearPending(); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(200, component->scrollPosition().getY()); } static const char *LIVE_SCROLL_TEST = R"({ "type": "APL", "version": "1.4", "mainTemplate": { "items": { "type": "Sequence", "id": "scrollings", "width": 200, "height": 500, "data": "${TestArray}", "items": [ { "type": "TouchWrapper", "id": "${data}${index}", "width": 200, "height": 100, "item": { "type": "Frame", "backgroundColor": "${data}", "width": 200, "height": 100 } } ] } } })"; TEST_F(NativeGesturesScrollableTest, LiveScroll) { config->pointerInactivityTimeout(100); auto myArray = LiveArray::create(ObjectArray{"red", "green", "yellow", "blue", "purple"}); config->liveData("TestArray", myArray); loadDocument(LIVE_SCROLL_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,250), false)); advanceTime(100); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,200), true)); // No update happened as not enough children to scroll ASSERT_EQ(Point(0, 0), component->scrollPosition()); advanceTime(100); // LiveArray got more items here. auto extender = ObjectArray{"red", "green", "yellow", "blue", "purple"}; myArray->insert(myArray->size(), extender.begin(), extender.end()); root->clearPending(); advanceTime(100); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,100), true)); advanceTime(100); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,100), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 100), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, LiveScrollBackwards) { config->pointerInactivityTimeout(100); auto myArray = LiveArray::create(ObjectArray{"red", "green", "yellow", "blue", "purple"}); config->liveData("TestArray", myArray); loadDocument(LIVE_SCROLL_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,150), false)); advanceTime(100); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,200), true)); // No update happened as not enough children to scroll ASSERT_EQ(Point(0, 0), component->scrollPosition()); advanceTime(100); // LiveArray got more items here. auto extender = ObjectArray{"red", "green", "yellow", "blue", "purple"}; myArray->insert(0, extender.begin(), extender.end()); root->clearPending(); ASSERT_EQ(Point(0, 500), component->scrollPosition()); advanceTime(100); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,300), true)); advanceTime(100); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,300), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 400), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, LiveFling) { auto myArray = LiveArray::create(ObjectArray{"red", "green", "yellow", "blue", "purple"}); config->liveData("TestArray", myArray); loadDocument(LIVE_SCROLL_TEST); advanceTime(10); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,200), false)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,150), true)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,100), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,100), true)); ASSERT_EQ(Point(), component->scrollPosition()); // LiveArray got more items here. auto extender = ObjectArray{"red", "green", "yellow", "blue", "purple"}; myArray->insert(0, extender.begin(), extender.end()); myArray->insert(myArray->size(), extender.begin(), extender.end()); myArray->insert(myArray->size(), extender.begin(), extender.end()); root->clearPending(); ASSERT_EQ(Point(0, 500), component->scrollPosition()); advanceTime(100); myArray->insert(0, extender.begin(), extender.end()); root->clearPending(); advanceTime(100); advanceTime(2400); ASSERT_EQ(Point(0, 1225), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, LiveFlingBackwards) { auto myArray = LiveArray::create(ObjectArray{"red", "green", "yellow", "blue", "purple"}); config->liveData("TestArray", myArray); loadDocument(LIVE_SCROLL_TEST); ASSERT_TRUE(CheckChildrenLaidOut(component, {0, 4}, true)); // Give ability to scroll backwards auto extender = ObjectArray{"red", "green", "yellow", "blue", "purple"}; myArray->insert(0, extender.begin(), extender.end()); root->clearPending(); ASSERT_TRUE(CheckChildrenLaidOut(component, {0, 9}, true)); ASSERT_EQ(Point(0, 500), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,100), false)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,150), true)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,200), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,200), true)); ASSERT_EQ(Point(0, 400), component->scrollPosition()); // LiveArray got more items here. myArray->insert(0, extender.begin(), extender.end()); myArray->insert(myArray->size(), extender.begin(), extender.end()); myArray->insert(myArray->size(), extender.begin(), extender.end()); root->clearPending(); ASSERT_TRUE(CheckChildrenLaidOut(component, {0, 2}, false)); ASSERT_TRUE(CheckChildrenLaidOut(component, {3, 19}, true)); ASSERT_TRUE(CheckChildrenLaidOut(component, {20, 24}, false)); ASSERT_EQ(Point(0, 600), component->scrollPosition()); advanceTime(100); ASSERT_EQ(Point(0, 675), component->scrollPosition()); ASSERT_TRUE(CheckChildrenLaidOut(component, {0, 1}, false)); ASSERT_TRUE(CheckChildrenLaidOut(component, {2, 19}, true)); ASSERT_TRUE(CheckChildrenLaidOut(component, {20, 24}, false)); advanceTime(100); ASSERT_EQ(Point(0, 650), component->scrollPosition()); ASSERT_TRUE(CheckChildrenLaidOut(component, {0, 1}, false)); ASSERT_TRUE(CheckChildrenLaidOut(component, {2, 19}, true)); ASSERT_TRUE(CheckChildrenLaidOut(component, {20, 24}, false)); advanceTime(2400); ASSERT_EQ(Point(0, 275), component->scrollPosition()); } static const char *LIVE_SCROLL_SPACED_TEST = R"({ "type": "APL", "version": "1.4", "mainTemplate": { "items": { "type": "Sequence", "id": "scrollings", "width": 200, "height": 500, "data": "${TestArray}", "items": [ { "type": "TouchWrapper", "id": "${data}${index}", "spacing": 20, "width": 200, "height": 100, "item": { "type": "Frame", "backgroundColor": "${data}", "width": 200, "height": 100 } } ] } } })"; TEST_F(NativeGesturesScrollableTest, LiveScrollBackwardsSpaced) { config->pointerInactivityTimeout(100); auto myArray = LiveArray::create(ObjectArray{"red", "green", "yellow", "blue", "purple"}); config->liveData("TestArray", myArray); loadDocument(LIVE_SCROLL_SPACED_TEST); advanceTime(10); auto extender = ObjectArray{"red", "green", "yellow", "blue", "purple"}; myArray->insert(0, extender.begin(), extender.end()); root->clearPending(); ASSERT_EQ(Point(0, 600), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,150), false)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,200), true)); // No update happened as not enough children to scroll ASSERT_EQ(Point(0, 550), component->scrollPosition()); advanceTime(100); // LiveArray got even more items here. myArray->insert(0, extender.begin(), extender.end()); root->clearPending(); ASSERT_EQ(Point(0, 690), component->scrollPosition()); advanceTime(100); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,300), true)); ASSERT_EQ(Point(0, 710), component->scrollPosition()); advanceTime(100); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,300), true)); ASSERT_EQ(Point(0, 710), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, LiveFlingBackwardsSpaced) { auto myArray = LiveArray::create(ObjectArray{"red", "green", "yellow", "blue", "purple"}); config->liveData("TestArray", myArray); loadDocument(LIVE_SCROLL_SPACED_TEST); advanceTime(10); // Give ability to scroll backwards auto extender = ObjectArray{"red", "green", "yellow", "blue", "purple"}; myArray->insert(0, extender.begin(), extender.end()); root->clearPending(); ASSERT_EQ(Point(0, 600), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,100), false)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,150), true)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,200), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,200), true)); ASSERT_EQ(Point(0, 500), component->scrollPosition()); // LiveArray got more items here. myArray->insert(0, extender.begin(), extender.end()); myArray->insert(myArray->size(), extender.begin(), extender.end()); myArray->insert(myArray->size(), extender.begin(), extender.end()); root->clearPending(); ASSERT_EQ(Point(0, 640), component->scrollPosition()); advanceTime(100); advanceTime(100); advanceTime(2400); ASSERT_EQ(Point(0, 475), component->scrollPosition()); } static const char *SCROLL_SNAP_START_TEST = R"({ "type": "APL", "version": "1.4", "mainTemplate": { "items": { "type": "Sequence", "snap": "start", "width": 200, "height": 250, "data": ["red", "green", "yellow", "blue", "purple", "gray", "red", "green", "yellow", "blue", "purple", "gray"], "items": [ { "type": "TouchWrapper", "id": "${data}${index}", "width": 200, "height": 100, "item": { "type": "Frame", "backgroundColor": "${data}", "width": 200, "height": 100 } } ] } } })"; TEST_F(NativeGesturesScrollableTest, ScrollSnapStart) { loadDocument(SCROLL_SNAP_START_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,100), false)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,50), true)); ASSERT_EQ(Point(0, 50), component->scrollPosition()); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 100), component->scrollPosition()); advanceTime(2600); ASSERT_EQ(Point(0, 725), component->scrollPosition()); advanceTime(500); ASSERT_EQ(Point(0, 700), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, ScrollSnapStartLimit) { loadDocument(SCROLL_SNAP_START_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,100), false)); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,50), true)); ASSERT_EQ(Point(0, 50), component->scrollPosition()); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 100), component->scrollPosition()); advanceTime(980); advanceTime(1000); // Should be at the end limit, and not snap to item. ASSERT_EQ(Point(0, 950), component->scrollPosition()); // Go to start ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,0), false)); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,50), true)); ASSERT_EQ(Point(0, 900), component->scrollPosition()); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,100), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,100), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 850), component->scrollPosition()); advanceTime(980); advanceTime(1000); // Should be at the end limit, and not snap to item. ASSERT_EQ(Point(0, 850), component->scrollPosition()); } static const char *HORIZONTAL_SCROLL_SNAP_START_TEST = R"({ "type": "APL", "version": "1.7", "mainTemplate": { "items": { "type": "Sequence", "scrollDirection": "horizontal", "snap": "start", "width": 250, "height": 250, "data": ["red", "green", "yellow", "blue", "purple", "gray", "red", "green", "yellow", "blue", "purple", "gray"], "items": [ { "type": "TouchWrapper", "id": "${data}${index}", "width": 100, "height": 100, "item": { "type": "Frame", "backgroundColor": "${data}", "width": 100, "height": 100 } } ] } } })"; TEST_F(NativeGesturesScrollableTest, HorizontalScrollSnapStart) { loadDocument(HORIZONTAL_SCROLL_SNAP_START_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(100,0), false)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(50,0), true)); ASSERT_EQ(Point(50, 0), component->scrollPosition()); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(100, 0), component->scrollPosition()); advanceTime(2600); ASSERT_EQ(Point(725, 0), component->scrollPosition()); advanceTime(500); ASSERT_EQ(Point(700, 0), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, HorizontalScrollSnapStartRTL) { loadDocument(HORIZONTAL_SCROLL_SNAP_START_TEST); component->setProperty(kPropertyLayoutDirectionAssigned, "RTL"); root->clearPending(); ASSERT_TRUE(CheckDirty(component, kPropertyLayoutDirection, kPropertyNotifyChildrenChanged, kPropertyScrollPosition, kPropertyVisualHash)); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,0), false)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(50,0), true)); ASSERT_EQ(Point(-50, 0), component->scrollPosition()); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(100,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(100,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(-100, 0), component->scrollPosition()); advanceTime(2600); ASSERT_EQ(Point(-725, 0), component->scrollPosition()); advanceTime(500); ASSERT_EQ(Point(-700, 0), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, HorizontalScrollSnapStartLimitRTL) { loadDocument(HORIZONTAL_SCROLL_SNAP_START_TEST); component->setProperty(kPropertyLayoutDirectionAssigned, "RTL"); root->clearPending(); ASSERT_TRUE(CheckDirty(component, kPropertyLayoutDirection, kPropertyNotifyChildrenChanged, kPropertyScrollPosition, kPropertyVisualHash)); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,0), false)); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(50,0), true)); ASSERT_EQ(Point(-50, 0), component->scrollPosition()); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(100,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(100,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(-100, 0), component->scrollPosition()); advanceTime(1000); advanceTime(1000); // Should be at the end limit, and not snap to item. ASSERT_EQ(Point(-950, 0), component->scrollPosition()); // Go to start ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(100,0), false)); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(50,0), true)); ASSERT_EQ(Point(-900, 0), component->scrollPosition()); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(-850, 0), component->scrollPosition()); advanceTime(980); advanceTime(1000); // Should be at the end limit, and not snap to item. ASSERT_EQ(Point(-850, 0), component->scrollPosition()); } static const char *SCROLL_SNAP_FORCE_START_TEST = R"({ "type": "APL", "version": "1.4", "mainTemplate": { "items": { "type": "Sequence", "snap": "forceStart", "width": 200, "height": 250, "data": ["red", "green", "yellow", "blue", "purple", "gray", "red", "green", "yellow", "blue", "purple", "gray"], "items": [ { "type": "TouchWrapper", "id": "${data}${index}", "width": 200, "height": 100, "item": { "type": "Frame", "backgroundColor": "${data}", "width": 200, "height": 100 } } ] } } })"; TEST_F(NativeGesturesScrollableTest, ScrollSnapForceStartLowVelocity) { loadDocument(SCROLL_SNAP_FORCE_START_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,150), false)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,100), true)); ASSERT_EQ(Point(0, 50), component->scrollPosition()); advanceTime(800); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 150), component->scrollPosition()); advanceTime(1000); ASSERT_EQ(Point(0, 100), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, ScrollSnapForceStartLimit) { loadDocument(SCROLL_SNAP_FORCE_START_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,100), false)); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,50), true)); ASSERT_EQ(Point(0, 50), component->scrollPosition()); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 100), component->scrollPosition()); advanceTime(1000); advanceTime(1000); advanceTime(1000); // Should not forcefully snap if scrolled to end of list ASSERT_EQ(Point(0, 950), component->scrollPosition()); // Go to start ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,0), false)); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,50), true)); ASSERT_EQ(Point(0, 900), component->scrollPosition()); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,100), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,100), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 850), component->scrollPosition()); advanceTime(980); advanceTime(1000); // Should be at the end limit (which is accidentally snap). ASSERT_EQ(Point(), component->scrollPosition()); } static const char *HORIZONTAL_SCROLL_SNAP_FORCE_START_TEST = R"({ "type": "APL", "version": "1.7", "mainTemplate": { "items": { "type": "Sequence", "scrollDirection": "horizontal", "snap": "forceStart", "width": 250, "height": 250, "data": ["red", "green", "yellow", "blue", "purple", "gray", "red", "green", "yellow", "blue", "purple", "gray"], "items": [ { "type": "TouchWrapper", "id": "${data}${index}", "width": 100, "height": 100, "item": { "type": "Frame", "backgroundColor": "${data}", "width": 100, "height": 100 } } ] } } })"; TEST_F(NativeGesturesScrollableTest, HorizontalScrollSnapForceStartLowVelocity) { loadDocument(HORIZONTAL_SCROLL_SNAP_FORCE_START_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(150,0), false)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(100,0), true)); ASSERT_EQ(Point(50, 0), component->scrollPosition()); advanceTime(800); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(150, 0), component->scrollPosition()); advanceTime(1000); ASSERT_EQ(Point(100, 0), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, HorizontalScrollSnapForceStartLowVelocityRTL) { loadDocument(HORIZONTAL_SCROLL_SNAP_FORCE_START_TEST); component->setProperty(kPropertyLayoutDirectionAssigned, "RTL"); root->clearPending(); ASSERT_TRUE(CheckDirty(component, kPropertyLayoutDirection, kPropertyNotifyChildrenChanged, kPropertyScrollPosition, kPropertyVisualHash)); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,0), false)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(50,0), true)); ASSERT_EQ(Point(-50, 0), component->scrollPosition()); advanceTime(800); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(150,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(150,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(-150, 0), component->scrollPosition()); advanceTime(1000); ASSERT_EQ(Point(-100, 0), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, HorizontalScrollSnapForceStartLimitRTL) { loadDocument(HORIZONTAL_SCROLL_SNAP_FORCE_START_TEST); component->setProperty(kPropertyLayoutDirectionAssigned, "RTL"); root->clearPending(); ASSERT_TRUE(CheckDirty(component, kPropertyLayoutDirection, kPropertyNotifyChildrenChanged, kPropertyScrollPosition, kPropertyVisualHash)); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,0), false)); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(50,0), true)); ASSERT_EQ(Point(-50, 0), component->scrollPosition()); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(100,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(100,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(-100, 0), component->scrollPosition()); advanceTime(980); advanceTime(1000); advanceTime(1000); // Should not forcefully snap if scrolled to end of list ASSERT_EQ(Point(-950, 0), component->scrollPosition()); // Go to start ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(100,0), false)); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(50,0), true)); ASSERT_EQ(Point(-900, 0), component->scrollPosition()); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(-850, 0), component->scrollPosition()); advanceTime(980); advanceTime(1000); // Should be at the end limit (which is accidentally snap). ASSERT_EQ(Point(), component->scrollPosition()); } static const char *SCROLL_SNAP_CENTER_TEST = R"({ "type": "APL", "version": "1.4", "mainTemplate": { "items": { "type": "Sequence", "snap": "center", "width": 200, "height": 250, "data": ["red", "green", "yellow", "blue", "purple", "gray", "red", "green", "yellow", "blue", "purple", "gray"], "items": [ { "type": "TouchWrapper", "id": "${data}${index}", "width": 200, "height": 100, "item": { "type": "Frame", "backgroundColor": "${data}", "width": 200, "height": 100 } } ] } } })"; TEST_F(NativeGesturesScrollableTest, ScrollSnapCenter) { loadDocument(SCROLL_SNAP_CENTER_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,110), false)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,50), true)); ASSERT_EQ(Point(0, 60), component->scrollPosition()); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 110), component->scrollPosition()); advanceTime(2600); ASSERT_EQ(Point(0, 785), component->scrollPosition()); advanceTime(500); ASSERT_EQ(Point(0, 825), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, ScrollSnapCenterLimit) { loadDocument(SCROLL_SNAP_CENTER_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,100), false)); // root->updateTime(10); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,50), true)); ASSERT_EQ(Point(0, 50), component->scrollPosition()); // root->updateTime(20); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 100), component->scrollPosition()); // root->updateTime(1000); advanceTime(980); // root->updateTime(2000); advanceTime(1000); // Should be at the end limit, and not snap to item. ASSERT_EQ(Point(0, 950), component->scrollPosition()); // Go to start ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,0), false)); // root->updateTime(2010); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,50), true)); ASSERT_EQ(Point(0, 900), component->scrollPosition()); // root->updateTime(20); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,100), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,100), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 850), component->scrollPosition()); // root->updateTime(3000); advanceTime(980); // root->updateTime(4000); advanceTime(1000); // Should be at the end limit, and not snap to item. ASSERT_EQ(Point(), component->scrollPosition()); } static const char *HORIZONTAL_SCROLL_SNAP_CENTER_TEST = R"({ "type": "APL", "version": "1.7", "mainTemplate": { "items": { "type": "Sequence", "scrollDirection": "horizontal", "snap": "center", "width": 250, "height": 250, "data": ["red", "green", "yellow", "blue", "purple", "gray", "red", "green", "yellow", "blue", "purple", "gray"], "items": [ { "type": "TouchWrapper", "id": "${data}${index}", "width": 100, "height": 100, "item": { "type": "Frame", "backgroundColor": "${data}", "width": 100, "height": 100 } } ] } } })"; TEST_F(NativeGesturesScrollableTest, HorizontalScrollSnapCenter) { loadDocument(HORIZONTAL_SCROLL_SNAP_CENTER_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(110,0), false)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(50,0), true)); ASSERT_EQ(Point(60, 0), component->scrollPosition()); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(110, 0), component->scrollPosition()); advanceTime(2600); ASSERT_EQ(Point(785, 0), component->scrollPosition()); advanceTime(500); ASSERT_EQ(Point(825, 0), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, HorizontalScrollSnapCenterRTL) { loadDocument(HORIZONTAL_SCROLL_SNAP_CENTER_TEST); component->setProperty(kPropertyLayoutDirectionAssigned, "RTL"); root->clearPending(); ASSERT_TRUE(CheckDirty(component, kPropertyLayoutDirection, kPropertyNotifyChildrenChanged, kPropertyScrollPosition, kPropertyVisualHash)); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,0), false)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(60,0), true)); ASSERT_EQ(Point(-60, 0), component->scrollPosition()); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(110,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(110,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(-110, 0), component->scrollPosition()); advanceTime(2600); ASSERT_EQ(Point(-785, 0), component->scrollPosition()); advanceTime(500); ASSERT_EQ(Point(-825, 0), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, HorizontalScrollSnapCenterLimitRTL) { loadDocument(HORIZONTAL_SCROLL_SNAP_CENTER_TEST); component->setProperty(kPropertyLayoutDirectionAssigned, "RTL"); root->clearPending(); ASSERT_TRUE(CheckDirty(component, kPropertyLayoutDirection, kPropertyNotifyChildrenChanged, kPropertyScrollPosition, kPropertyVisualHash)); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,0), false)); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(50,0), true)); ASSERT_EQ(Point(-50, 0), component->scrollPosition()); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(100,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(10,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(-100, 0), component->scrollPosition()); advanceTime(980); advanceTime(1000); // Should be at the end limit, and not snap to item. ASSERT_EQ(Point(-950, 0), component->scrollPosition()); // Go to start ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(100,0), false)); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(50,0), true)); ASSERT_EQ(Point(-900, 0), component->scrollPosition()); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(-850, 0), component->scrollPosition()); advanceTime(980); advanceTime(1000); // Should be at the end limit, and not snap to item. ASSERT_EQ(Point(), component->scrollPosition()); } static const char *SCROLL_SNAP_FORCE_CENTER_TEST = R"({ "type": "APL", "version": "1.4", "mainTemplate": { "items": { "type": "Sequence", "snap": "forceCenter", "width": 200, "height": 250, "data": ["red", "green", "yellow", "blue", "purple", "gray", "red", "green", "yellow", "blue", "purple", "gray"], "items": [ { "type": "TouchWrapper", "id": "${data}${index}", "width": 200, "height": 100, "item": { "type": "Frame", "backgroundColor": "${data}", "width": 200, "height": 100 } } ] } } })"; TEST_F(NativeGesturesScrollableTest, ScrollSnapForceCenterLowVelocity) { loadDocument(SCROLL_SNAP_FORCE_CENTER_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,150), false)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,100), true)); ASSERT_EQ(Point(0, 50), component->scrollPosition()); advanceTime(800); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 150), component->scrollPosition()); advanceTime(1000); ASSERT_EQ(Point(0, 125), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, ScrollSnapForceCenterLimit) { loadDocument(SCROLL_SNAP_FORCE_CENTER_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,100), false)); advanceTime(5); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,50), true)); ASSERT_EQ(Point(0, 50), component->scrollPosition()); advanceTime(5); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 100), component->scrollPosition()); advanceTime(1490); ASSERT_EQ(Point(0, 950), component->scrollPosition()); advanceTime(1000); advanceTime(500); // Should not forcefully snap if scrolled to end of list ASSERT_EQ(Point(0, 950), component->scrollPosition()); // Go to start ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,0), false)); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,50), true)); ASSERT_EQ(Point(0, 900), component->scrollPosition()); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,100), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,100), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 850), component->scrollPosition()); advanceTime(980); ASSERT_EQ(Point(), component->scrollPosition()); advanceTime(1000); // Should not forcefully snap if scrolled to start of list ASSERT_EQ(Point(0, 0), component->scrollPosition()); } static const char *HORIZONTAL_SCROLL_SNAP_FORCE_CENTER_TEST = R"({ "type": "APL", "version": "1.7", "mainTemplate": { "items": { "type": "Sequence", "scrollDirection": "horizontal", "snap": "forceCenter", "width": 250, "height": 250, "data": ["red", "green", "yellow", "blue", "purple", "gray", "red", "green", "yellow", "blue", "purple", "gray"], "items": [ { "type": "TouchWrapper", "id": "${data}${index}", "width": 100, "height": 100, "item": { "type": "Frame", "backgroundColor": "${data}", "width": 100, "height": 100 } } ] } } })"; TEST_F(NativeGesturesScrollableTest, HorizontalScrollSnapForceCenterLowVelocity) { loadDocument(HORIZONTAL_SCROLL_SNAP_FORCE_CENTER_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(150,0), false)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(100,0), true)); ASSERT_EQ(Point(50, 0), component->scrollPosition()); advanceTime(800); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(150, 0), component->scrollPosition()); advanceTime(1000); ASSERT_EQ(Point(125, 0), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, HorizontalScrollSnapForceCenterLowVelocityRTL) { loadDocument(HORIZONTAL_SCROLL_SNAP_FORCE_CENTER_TEST); component->setProperty(kPropertyLayoutDirectionAssigned, "RTL"); root->clearPending(); ASSERT_TRUE(CheckDirty(component, kPropertyLayoutDirection, kPropertyNotifyChildrenChanged, kPropertyScrollPosition, kPropertyVisualHash)); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,0), false)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(50,0), true)); ASSERT_EQ(Point(-50, 0), component->scrollPosition()); advanceTime(800); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(150,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(150,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(-150, 0), component->scrollPosition()); advanceTime(1000); ASSERT_EQ(Point(-125, 0), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, HorizontalScrollSnapForceCenterLimitRTL) { loadDocument(HORIZONTAL_SCROLL_SNAP_FORCE_CENTER_TEST); component->setProperty(kPropertyLayoutDirectionAssigned, "RTL"); root->clearPending(); ASSERT_TRUE(CheckDirty(component, kPropertyLayoutDirection, kPropertyNotifyChildrenChanged, kPropertyScrollPosition, kPropertyVisualHash)); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,0), false)); advanceTime(5); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(50,0), true)); ASSERT_EQ(Point(-50, 0), component->scrollPosition()); advanceTime(5); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(100,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(100,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(-100, 0), component->scrollPosition()); advanceTime(1490); ASSERT_EQ(Point(-950, 0), component->scrollPosition()); advanceTime(1000); advanceTime(500); // Should not forcefully snap if scrolled to end of list ASSERT_EQ(Point(-950, 0), component->scrollPosition()); // Go to start ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(100,0), false)); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(50,0), true)); ASSERT_EQ(Point(-900, 0), component->scrollPosition()); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(-850, 0), component->scrollPosition()); advanceTime(980); ASSERT_EQ(Point(), component->scrollPosition()); advanceTime(1000); // Should not forcefully snap if scrolled to start of list ASSERT_EQ(Point(0, 0), component->scrollPosition()); } static const char *SCROLL_SNAP_END_TEST = R"({ "type": "APL", "version": "1.4", "mainTemplate": { "items": { "type": "Sequence", "snap": "end", "width": 200, "height": 250, "data": ["red", "green", "yellow", "blue", "purple", "gray", "red", "green", "yellow", "blue", "purple", "gray"], "items": [ { "type": "TouchWrapper", "id": "${data}${index}", "width": 200, "height": 100, "item": { "type": "Frame", "backgroundColor": "${data}", "width": 200, "height": 100 } } ] } } })"; TEST_F(NativeGesturesScrollableTest, ScrollSnapEnd) { loadDocument(SCROLL_SNAP_END_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,110), false)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,50), true)); ASSERT_EQ(Point(0, 60), component->scrollPosition()); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 110), component->scrollPosition()); advanceTime(2600); ASSERT_EQ(Point(0, 785), component->scrollPosition()); advanceTime(500); ASSERT_EQ(Point(0, 750), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, ScrollSnapEndLimit) { loadDocument(SCROLL_SNAP_END_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,100), false)); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,50), true)); ASSERT_EQ(Point(0, 50), component->scrollPosition()); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 100), component->scrollPosition()); advanceTime(980); advanceTime(1000); // Should be at the end limit, and not snap to item. ASSERT_EQ(Point(0, 950), component->scrollPosition()); // Go to start ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,0), false)); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,50), true)); ASSERT_EQ(Point(0, 900), component->scrollPosition()); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,100), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,100), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 850), component->scrollPosition()); advanceTime(980); advanceTime(1000); // Should be at the end limit, and not snap to item. ASSERT_EQ(Point(), component->scrollPosition()); } static const char *HORIZONTAL_SCROLL_SNAP_END_TEST = R"({ "type": "APL", "version": "1.7", "mainTemplate": { "items": { "type": "Sequence", "scrollDirection": "horizontal", "snap": "end", "width": 250, "height": 250, "data": ["red", "green", "yellow", "blue", "purple", "gray", "red", "green", "yellow", "blue", "purple", "gray"], "items": [ { "type": "TouchWrapper", "id": "${data}${index}", "width": 100, "height": 100, "item": { "type": "Frame", "backgroundColor": "${data}", "width": 100, "height": 100 } } ] } } })"; TEST_F(NativeGesturesScrollableTest, HorizontalScrollSnapEnd) { loadDocument(HORIZONTAL_SCROLL_SNAP_END_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(110,0), false)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(50,0), true)); ASSERT_EQ(Point(60, 0), component->scrollPosition()); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(110, 0), component->scrollPosition()); advanceTime(2600); ASSERT_EQ(Point(785, 0), component->scrollPosition()); advanceTime(500); ASSERT_EQ(Point(750, 0), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, HorizontalScrollSnapEndRTL) { loadDocument(HORIZONTAL_SCROLL_SNAP_END_TEST); component->setProperty(kPropertyLayoutDirectionAssigned, "RTL"); root->clearPending(); ASSERT_TRUE(CheckDirty(component, kPropertyLayoutDirection, kPropertyNotifyChildrenChanged, kPropertyScrollPosition, kPropertyVisualHash)); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,0), false)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(60,0), true)); ASSERT_EQ(Point(-60, 0), component->scrollPosition()); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(110,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(110,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(-110, 0), component->scrollPosition()); advanceTime(2600); ASSERT_EQ(Point(-785, 0), component->scrollPosition()); advanceTime(500); ASSERT_EQ(Point(-750, 0), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, HorizontalScrollSnapEndLimitRTL) { loadDocument(HORIZONTAL_SCROLL_SNAP_END_TEST); component->setProperty(kPropertyLayoutDirectionAssigned, "RTL"); root->clearPending(); ASSERT_TRUE(CheckDirty(component, kPropertyLayoutDirection, kPropertyNotifyChildrenChanged, kPropertyScrollPosition, kPropertyVisualHash)); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,0), false)); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(50,0), true)); ASSERT_EQ(Point(-50, 0), component->scrollPosition()); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(100,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(100,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(-100, 0), component->scrollPosition()); advanceTime(980); advanceTime(1000); // Should be at the end limit, and not snap to item. ASSERT_EQ(Point(-950, 0), component->scrollPosition()); // Go to start ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(100,0), false)); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(50,0), true)); ASSERT_EQ(Point(-900, 0), component->scrollPosition()); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(-850, 0), component->scrollPosition()); advanceTime(1980); advanceTime(1000); // Should be at the end limit, and not snap to item. ASSERT_EQ(Point(), component->scrollPosition()); } static const char *HORIZONTAL_SCROLL_SNAP_FORCE_END_TEST = R"({ "type": "APL", "version": "1.7", "mainTemplate": { "items": { "type": "Sequence", "scrollDirection": "horizontal", "snap": "forceEnd", "width": 250, "height": 250, "data": ["red", "green", "yellow", "blue", "purple", "gray", "red", "green", "yellow", "blue", "purple", "gray"], "items": [ { "type": "TouchWrapper", "id": "${data}${index}", "width": 100, "height": 100, "item": { "type": "Frame", "backgroundColor": "${data}", "width": 100, "height": 100 } } ] } } })"; TEST_F(NativeGesturesScrollableTest, HorizontalScrollSnapForceEndLowVelocity) { loadDocument(HORIZONTAL_SCROLL_SNAP_FORCE_END_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(100,0), false)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(50,0), true)); ASSERT_EQ(Point(50, 0), component->scrollPosition()); advanceTime(800); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(100, 0), component->scrollPosition()); advanceTime(1000); ASSERT_EQ(Point(150, 0), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, HorizontalScrollSnapForceEndLowVelocityRTL) { loadDocument(HORIZONTAL_SCROLL_SNAP_FORCE_END_TEST); component->setProperty(kPropertyLayoutDirectionAssigned, "RTL"); root->clearPending(); ASSERT_TRUE(CheckDirty(component, kPropertyLayoutDirection, kPropertyNotifyChildrenChanged, kPropertyScrollPosition, kPropertyVisualHash)); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,0), false)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(50,0), true)); ASSERT_EQ(Point(-50, 0), component->scrollPosition()); advanceTime(800); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(100,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(100,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(-100, 0), component->scrollPosition()); advanceTime(1000); ASSERT_EQ(Point(-150, 0), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, HorizontalScrollSnapForceEndLimitRTL) { loadDocument(HORIZONTAL_SCROLL_SNAP_FORCE_END_TEST); component->setProperty(kPropertyLayoutDirectionAssigned, "RTL"); root->clearPending(); ASSERT_TRUE(CheckDirty(component, kPropertyLayoutDirection, kPropertyNotifyChildrenChanged, kPropertyScrollPosition, kPropertyVisualHash)); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,0), false)); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(50,0), true)); ASSERT_EQ(Point(-50, 0), component->scrollPosition()); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(100,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(100,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(-100, 0), component->scrollPosition()); advanceTime(1480); ASSERT_EQ(Point(-950, 0), component->scrollPosition()); advanceTime(500); // Should forcefully snap ASSERT_EQ(Point(-950, 0), component->scrollPosition()); // Go to start ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(100,0), false)); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(50,0), true)); ASSERT_EQ(Point(-900, 0), component->scrollPosition()); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(-850, 0), component->scrollPosition()); advanceTime(1980); ASSERT_EQ(Point(), component->scrollPosition()); advanceTime(1000); // Should not forcefully snap if scrolled to end of list ASSERT_EQ(Point(0, 0), component->scrollPosition()); } static const char *SCROLL_SNAP_FORCE_END_TEST = R"({ "type": "APL", "version": "1.4", "mainTemplate": { "items": { "type": "Sequence", "snap": "forceEnd", "width": 200, "height": 250, "data": ["red", "green", "yellow", "blue", "purple", "gray", "red", "green", "yellow", "blue", "purple", "gray"], "items": [ { "type": "TouchWrapper", "id": "${data}${index}", "width": 200, "height": 100, "item": { "type": "Frame", "backgroundColor": "${data}", "width": 200, "height": 100 } } ] } } })"; TEST_F(NativeGesturesScrollableTest, ScrollSnapForceEndLowVelocity) { loadDocument(SCROLL_SNAP_FORCE_END_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,100), false)); advanceTime(200); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,50), true)); ASSERT_EQ(Point(0, 50), component->scrollPosition()); advanceTime(800); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 100), component->scrollPosition()); advanceTime(1000); ASSERT_EQ(Point(0, 150), component->scrollPosition()); } TEST_F(NativeGesturesScrollableTest, ScrollSnapForceEndLimit) { loadDocument(SCROLL_SNAP_FORCE_END_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,100), false)); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,50), true)); ASSERT_EQ(Point(0, 50), component->scrollPosition()); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,0), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,0), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 100), component->scrollPosition()); advanceTime(1480); ASSERT_EQ(Point(0, 950), component->scrollPosition()); advanceTime(500); // Should forcefully snap ASSERT_EQ(Point(0, 950), component->scrollPosition()); // Go to start ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,0), false)); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,50), true)); ASSERT_EQ(Point(0, 900), component->scrollPosition()); advanceTime(10); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,100), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,100), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 850), component->scrollPosition()); advanceTime(1980); ASSERT_EQ(Point(), component->scrollPosition()); advanceTime(1000); // Should not forcefully snap if scrolled to limit ASSERT_EQ(Point(0, 0), component->scrollPosition()); } static const char *SCROLL_SNAP_SPACED_CENTER_TEST = R"({ "type": "APL", "version": "1.4", "mainTemplate": { "items": { "type": "Sequence", "snap": "center", "width": 200, "height": 250, "data": ["red", "green", "yellow", "blue", "purple", "gray", "red", "green", "yellow", "blue", "purple", "gray"], "items": [ { "type": "TouchWrapper", "id": "${data}${index}", "spacing": 50, "width": 200, "height": 100, "item": { "type": "Frame", "backgroundColor": "${data}", "width": 200, "height": 100 } } ] } } })"; TEST_F(NativeGesturesScrollableTest, ScrollSnapSpacedCenter) { config->pointerInactivityTimeout(600); loadDocument(SCROLL_SNAP_SPACED_CENTER_TEST); ASSERT_EQ(Point(), component->scrollPosition()); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(0,100), false)); advanceTime(500); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerMove, Point(0,50), true)); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerUp, Point(0,50), true)); ASSERT_TRUE(CheckDirty(component, kPropertyScrollPosition, kPropertyNotifyChildrenChanged)); ASSERT_EQ(Point(0, 50), component->scrollPosition()); advanceTime(2500); ASSERT_EQ(Point(0, 300), component->scrollPosition()); advanceTime(1); advanceTime(1000); ASSERT_EQ(Point(0, 225), component->scrollPosition()); } static const char *SCROLL_TRIGGERS_SCROLL = R"apl( { "type": "APL", "version": "1.5", "mainTemplate": { "item": { "type": "ScrollView", "id": "SCROLLER", "width": 200, "height": 200, "item": { "type": "Frame", "width": 100, "height": 600 }, "onScroll": { "when": "${event.source.position > 0.5}", "type": "Scroll", "distance": 0.5, "sequencer": "OTHER" } } } } )apl"; // Execute a "Scroll" command, which will trigger a _second_ "Scroll" command. TEST_F(NativeGesturesScrollableTest, ScrollTriggersScroll) { metrics.size(200,200); loadDocument(SCROLL_TRIGGERS_SCROLL); ASSERT_TRUE(component); ASSERT_EQ(Point(0,0), component->scrollPosition()); auto action = executeCommand("Scroll", {{"componentId", "SCROLLER"}, {"distance", 1}}, false); ASSERT_TRUE(action); // Skip ahead TWO scroll delays. The first scroll command will complete in a single step and trigger // the second scroll command, which will ALSO complete in a single step. The second scroll command // will trigger a THIRD scroll command. auto delta = config->getScrollCommandDuration(); // How long the scroll command should take advanceTime(delta * 2); ASSERT_EQ(Point(0,300), component->scrollPosition()); // distance = 100% + 50% = 300 dp ASSERT_FALSE(action->isPending()); // The THIRD scroll command will complete within this time. It will try to trigger a FOURTH scroll command, // but that will be dropped because the scroll view is already at the maximum scroll position advanceTime(delta * 2); ASSERT_EQ(Point(0,400), component->scrollPosition()); } // When native scrolling (using touch), once we trigger the "Scroll" command the touch interaction terminates. TEST_F(NativeGesturesScrollableTest, ScrollViewCancelNativeScrolling) { metrics.size(200,200); loadDocument(SCROLL_TRIGGERS_SCROLL); ASSERT_FALSE(root->handlePointerEvent({PointerEventType::kPointerDown, Point(10,190)})); // Scroll up 90 units advanceTime(100); ASSERT_TRUE(root->handlePointerEvent({PointerEventType::kPointerMove, Point(10,100)})); ASSERT_EQ(Point(0,90), component->scrollPosition()); // Scroll up another 50 units. The Scroll method should execute and cancel the manual scrolling advanceTime(100); ASSERT_TRUE(root->handlePointerEvent({PointerEventType::kPointerMove, Point(10,50)})); ASSERT_EQ(Point(0,140), component->scrollPosition()); // Keep scrolling - but the gesture should be cancelled now, so nothing happens ASSERT_TRUE(root->handlePointerEvent({PointerEventType::kPointerMove, Point(10,10)})); ASSERT_EQ(Point(0,140), component->scrollPosition()); // Now delay until the Scroll command has finished auto delta = config->getScrollCommandDuration(); // How long the scroll command should take advanceTime(delta); ASSERT_EQ(Point(0,240), component->scrollPosition()); // Releasing the pointer should not do anything ASSERT_TRUE(root->handlePointerEvent({PointerEventType::kPointerUp, Point(10,0)})); ASSERT_EQ(Point(0,240), component->scrollPosition()); } static const char *EDIT_TEXT_IN_TAP_TOUCHABLE = R"apl({ "type": "APL", "version": "1.6", "theme": "dark", "mainTemplate": { "items": [ { "type": "Sequence", "width": "100%", "height": "100%", "alignItems": "center", "justifyContent": "spaceAround", "data": [{"color": "blue", "text": "Magic"}], "items": [ { "type": "Frame", "backgroundColor": "white", "items": [ { "type": "TouchWrapper", "width": 500, "item": { "type": "Frame", "backgroundColor": "${data.color}", "height": 200, "items": { "type": "EditText", "id": "targetEdit", "text": "${data.text}", "width": 500, "height": 100, "fontSize": 60 } }, "onDown": { "type": "SendEvent", "arguments": "onDown", "sequencer": "MAIN" }, "onUp": { "type": "SendEvent", "arguments": "onUp", "sequencer": "MAIN" } } ] } ] } ] } })apl"; TEST_F(NativeGesturesScrollableTest, WrappedEditTextTap) { config->enableExperimentalFeature(apl::RootConfig::kExperimentalFeatureRequestKeyboard); loadDocument(EDIT_TEXT_IN_TAP_TOUCHABLE); ASSERT_TRUE(HandlePointerEvent(root, PointerEventType::kPointerDown, Point(400,50), false, "onDown")); advanceTime(20); ASSERT_TRUE(root->handlePointerEvent(PointerEvent(PointerEventType::kPointerUp, Point(400,50)))); ASSERT_TRUE(root->hasEvent()); auto event = root->popEvent(); ASSERT_EQ(apl::kEventTypeOpenKeyboard, event.getType()); ASSERT_TRUE(CheckSendEvent(root, "onUp")); } static const char *EDIT_TEXT_IN_UP_TOUCHABLE = R"apl({ "type": "APL", "version": "1.6", "theme": "dark", "mainTemplate": { "items": [ { "type": "Sequence", "width": "100%", "height": "100%", "alignItems": "center", "justifyContent": "spaceAround", "data": [{"color": "blue", "text": "Magic"}], "items": [ { "type": "Frame", "backgroundColor": "white", "items": [ { "type": "TouchWrapper", "width": 500, "item": { "type": "Frame", "backgroundColor": "${data.color}", "height": 200, "items": { "type": "EditText", "id": "targetEdit", "text": "${data.text}", "width": 500, "height": 100, "fontSize": 60 } }, "onUp": { "type": "SendEvent", "arguments": "onUp", "sequencer": "MAIN" } } ] } ] } ] } })apl"; TEST_F(NativeGesturesScrollableTest, WrappedEditTextUp) { config->enableExperimentalFeature(apl::RootConfig::kExperimentalFeatureRequestKeyboard); loadDocument(EDIT_TEXT_IN_UP_TOUCHABLE); ASSERT_FALSE(root->handlePointerEvent(PointerEvent(PointerEventType::kPointerDown, Point(400,50)))); advanceTime(20); ASSERT_TRUE(root->handlePointerEvent(PointerEvent(PointerEventType::kPointerUp, Point(400,50)))); ASSERT_TRUE(root->hasEvent()); auto event = root->popEvent(); ASSERT_EQ(apl::kEventTypeOpenKeyboard, event.getType()); ASSERT_TRUE(CheckSendEvent(root, "onUp")); } static const char *EDIT_TEXT_IN_NESTED_TOUCHABLES = R"apl({ "type": "APL", "version": "1.6", "theme": "dark", "mainTemplate": { "items": [ { "type": "Sequence", "width": "100%", "height": "100%", "alignItems": "center", "justifyContent": "spaceAround", "data": [{"color": "blue", "text": "Magic"}], "items": [ { "type": "Frame", "backgroundColor": "white", "items": [ { "type": "TouchWrapper", "width": 500, "item": { "type": "Frame", "backgroundColor": "${data.color}", "height": 200, "items": { "type": "TouchWrapper", "item": { "type": "EditText", "id": "targetEdit", "text": "${data.text}", "width": 500, "height": 100, "fontSize": 60 }, "onUp": { "type": "SendEvent", "arguments": "onUpInner", "sequencer": "MAIN" } } }, "onUp": { "type": "SendEvent", "arguments": "onUpOuter", "sequencer": "MAIN" } } ] } ] } ] } })apl"; TEST_F(NativeGesturesScrollableTest, WrappedEditTextNestedTouchWrappers) { config->enableExperimentalFeature(apl::RootConfig::kExperimentalFeatureRequestKeyboard); loadDocument(EDIT_TEXT_IN_NESTED_TOUCHABLES); ASSERT_FALSE(root->handlePointerEvent(PointerEvent(PointerEventType::kPointerDown, Point(400,50)))); advanceTime(20); ASSERT_TRUE(root->handlePointerEvent(PointerEvent(PointerEventType::kPointerUp, Point(400,50)))); ASSERT_TRUE(root->hasEvent()); auto event = root->popEvent(); ASSERT_EQ(apl::kEventTypeOpenKeyboard, event.getType()); ASSERT_TRUE(CheckSendEvent(root, "onUpInner")); ASSERT_FALSE(root->hasEvent()); } static const char *EDIT_TEXT_IN_SWIPE_TOUCHABLE = R"apl({ "type": "APL", "version": "1.6", "theme": "dark", "mainTemplate": { "items": [ { "type": "Sequence", "width": "100%", "height": "100%", "alignItems": "center", "justifyContent": "spaceAround", "data": [{"color": "blue", "text": "Magic"}], "items": [ { "type": "Frame", "backgroundColor": "white", "items": [ { "type": "TouchWrapper", "width": 500, "item": { "type": "Frame", "backgroundColor": "${data.color}", "height": 200, "items": { "type": "EditText", "id": "targetEdit", "text": "${data.text}", "width": 500, "height": 100, "fontSize": 60 } }, "gestures": [ { "type": "SwipeAway", "direction": "left", "action": "reveal", "items": { "type": "Frame", "backgroundColor": "purple", "width": "100%", "items": { "type": "Frame", "width": "50%", "backgroundColor": "red", "items": { "type": "Text", "text": "You've swiped", "fontSize": 60, "fontColor": "white" } } }, "onSwipeDone": { "type": "SendEvent", "arguments": ["delete", "${index}"] } } ] } ] } ] } ] } })apl"; TEST_F(NativeGesturesScrollableTest, WrappedEditTextSwipe) { config->enableExperimentalFeature(apl::RootConfig::kExperimentalFeatureRequestKeyboard); loadDocument(EDIT_TEXT_IN_SWIPE_TOUCHABLE); ASSERT_FALSE(root->handlePointerEvent(PointerEvent(PointerEventType::kPointerDown, Point(400,50)))); advanceTime(2000); ASSERT_TRUE(root->handlePointerEvent(PointerEvent(PointerEventType::kPointerMove, Point(50,50)))); ASSERT_TRUE(root->handlePointerEvent(PointerEvent(PointerEventType::kPointerUp, Point(50,50)))); advanceTime(2000); ASSERT_TRUE(root->hasEvent()); auto event = root->popEvent(); ASSERT_EQ(kEventTypeSendEvent, event.getType()); } static const char *EDITTEXT = R"({ "type": "APL", "version": "1.6", "theme": "dark", "mainTemplate": { "item": { "type": "EditText", "height": 100, "hint": "Example EditText", "hintWeight": "100", "hintColor": "grey" } } })"; TEST_F(NativeGesturesScrollableTest, KeyboardRequestedOnTap) { config->enableExperimentalFeature(RootConfig::kExperimentalFeatureRequestKeyboard); loadDocument(EDITTEXT); ASSERT_FALSE(root->handlePointerEvent(PointerEvent(apl::kPointerDown, Point(10, 10)))); ASSERT_TRUE(root->handlePointerEvent(PointerEvent(apl::kPointerUp, Point(10, 10)))); ASSERT_TRUE(root->hasEvent()); auto event = root->popEvent(); ASSERT_EQ(apl::kEventTypeOpenKeyboard, event.getType()); } static const char *DISPLAY_CONDITIONAL = R"({ "type": "APL", "version": "1.7", "layouts": { "AlexaTextListItem": { "parameters": [ { "name": "primaryText", "type": "string" }, { "name": "secondaryText", "type": "string" }, { "name": "primaryAction", "type": "any" } ], "items": [ { "type": "TouchWrapper", "width": "100%", "height": 150, "onPress": "${primaryAction}", "item": { "type": "Container", "width": "100%", "inheritParentState": true, "items": [ { "type": "Container", "grow": 1, "shrink": 1, "width": "100%", "items": [ { "type": "Text", "text": "${primaryText}", "fontSize": 80 }, { "type": "Text", "text": "${secondaryText}", "fontSize": 50 } ] } ] } } ] } }, "mainTemplate": { "items": [ { "type": "Container", "height": "100%", "width": "100%", "items": [ { "type": "Text", "text": "Recently Played", "fontSize": "25", "paddingLeft": 20, "paddingBottom": 50, "paddingTop": 20 }, { "type": "Sequence", "id": "scrollable", "height": "100%", "shrink": 1, "data": [ "I am string One", "I am string Two", "I am string Three", "I am string Four", "I am string Five", "I am string Six", "I am string Seven", "I am string Eight", "I am string Nine" ], "scrollDirection": "vertical", "items": [ { "type": "AlexaTextListItem", "display": "${index <= 5 ? 'normal' : 'none'}", "primaryText": "${data}", "secondaryText": "${index}", "primaryAction": { "type": "SendEvent", "arguments": ["${index}"] } } ] } ] } ] } })"; TEST_F(NativeGesturesScrollableTest, DisplayConditional) { metrics.size(1280, 800); loadDocument(DISPLAY_CONDITIONAL); auto scrollable = component->getCoreChildAt(1); ASSERT_EQ(9, scrollable->getChildCount()); ASSERT_EQ(Point(0,0), scrollable->scrollPosition()); ASSERT_EQ(kDisplayNormal, scrollable->getCoreChildAt(0)->getCalculated(kPropertyDisplay).getInteger()); ASSERT_EQ(kDisplayNormal, scrollable->getCoreChildAt(1)->getCalculated(kPropertyDisplay).getInteger()); ASSERT_EQ(kDisplayNormal, scrollable->getCoreChildAt(2)->getCalculated(kPropertyDisplay).getInteger()); ASSERT_EQ(kDisplayNormal, scrollable->getCoreChildAt(3)->getCalculated(kPropertyDisplay).getInteger()); ASSERT_EQ(kDisplayNormal, scrollable->getCoreChildAt(4)->getCalculated(kPropertyDisplay).getInteger()); ASSERT_EQ(kDisplayNormal, scrollable->getCoreChildAt(5)->getCalculated(kPropertyDisplay).getInteger()); ASSERT_EQ(kDisplayNone, scrollable->getCoreChildAt(6)->getCalculated(kPropertyDisplay).getInteger()); ASSERT_EQ(kDisplayNone, scrollable->getCoreChildAt(7)->getCalculated(kPropertyDisplay).getInteger()); ASSERT_EQ(kDisplayNone, scrollable->getCoreChildAt(8)->getCalculated(kPropertyDisplay).getInteger()); ASSERT_FALSE(root->handlePointerEvent(PointerEvent(apl::kPointerDown, Point(10, 400)))); advanceTime(50); ASSERT_TRUE(root->handlePointerEvent(PointerEvent(apl::kPointerMove, Point(10, 100)))); advanceTime(500); ASSERT_TRUE(root->handlePointerEvent(PointerEvent(apl::kPointerUp, Point(10, 100)))); advanceTime(50); ASSERT_EQ(Point(0,180), scrollable->scrollPosition()); }
def Vegas(integrand, ndim, userdata=NULL, epsrel=EPSREL, epsabs=EPSABS, flags=0, ncomp=1, seed=None, mineval=MINEVAL, maxeval=MAXEVAL, nstart=NSTART, nincrease=NINCREASE, nbatch=NBATCH, gridno=GRIDNO, statefile=NULL, nvec=1): neval = c_int() fail = c_int() comp = c_int() ARR = c_double * ncomp integral = ARR() error = ARR() prob = ARR() if seed is None: seed = 0 import madgraph.various.misc as misc lib.Vegas(ndim, ncomp, wrap_integrand_vegas(integrand), userdata, c_int(nvec), c_double(epsrel), c_double(epsabs), flags, seed, mineval, maxeval, nstart, nincrease, nbatch, gridno, statefile, spin, byref(neval), byref(fail), integral, error, prob) return dict(neval=neval.value, fail=fail.value, comp=comp.value, results=[{ 'integral':integral[comp], 'error':error[comp], 'prob':prob[comp] } for comp in range(ncomp)])
<reponame>aeriksson/MeiliSearch use std::borrow::Cow; use std::collections::HashMap; use std::hash::Hash; use std::ops::Deref; use cow_utils::CowUtils; use either::Either; use heed::types::{Str, OwnedType}; use indexmap::IndexMap; use serde_json::Value; use meilisearch_schema::{FieldId, Schema}; use meilisearch_types::DocumentId; use crate::database::MainT; use crate::error::{FacetError, MResult}; use crate::store::BEU16; /// Data structure used to represent a boolean expression in the form of nested arrays. /// Values in the outer array are and-ed together, values in the inner arrays are or-ed together. #[derive(Debug, PartialEq)] pub struct FacetFilter(Vec<Either<Vec<FacetKey>, FacetKey>>); impl Deref for FacetFilter { type Target = Vec<Either<Vec<FacetKey>, FacetKey>>; fn deref(&self) -> &Self::Target { &self.0 } } impl FacetFilter { pub fn from_str( s: &str, schema: &Schema, attributes_for_faceting: &[FieldId], ) -> MResult<FacetFilter> { if attributes_for_faceting.is_empty() { return Err(FacetError::NoAttributesForFaceting.into()); } let parsed = serde_json::from_str::<Value>(s).map_err(|e| FacetError::ParsingError(e.to_string()))?; let mut filter = Vec::new(); match parsed { Value::Array(and_exprs) => { if and_exprs.is_empty() { return Err(FacetError::EmptyArray.into()); } for expr in and_exprs { match expr { Value::String(s) => { let key = FacetKey::from_str( &s, schema, attributes_for_faceting)?; filter.push(Either::Right(key)); } Value::Array(or_exprs) => { if or_exprs.is_empty() { return Err(FacetError::EmptyArray.into()); } let mut inner = Vec::new(); for expr in or_exprs { match expr { Value::String(s) => { let key = FacetKey::from_str( &s, schema, attributes_for_faceting)?; inner.push(key); } bad_value => return Err(FacetError::unexpected_token(&["String"], bad_value).into()), } } filter.push(Either::Left(inner)); } bad_value => return Err(FacetError::unexpected_token(&["Array", "String"], bad_value).into()), } } Ok(Self(filter)) } bad_value => Err(FacetError::unexpected_token(&["Array"], bad_value).into()), } } } #[derive(Debug, Eq, PartialEq, Hash)] #[repr(C)] pub struct FacetKey(FieldId, String); impl FacetKey { pub fn new(field_id: FieldId, value: String) -> Self { let value = match value.cow_to_lowercase() { Cow::Borrowed(_) => value, Cow::Owned(s) => s, }; Self(field_id, value) } pub fn key(&self) -> FieldId { self.0 } pub fn value(&self) -> &str { &self.1 } // TODO improve parser fn from_str( s: &str, schema: &Schema, attributes_for_faceting: &[FieldId], ) -> Result<Self, FacetError> { let mut split = s.splitn(2, ':'); let key = split .next() .ok_or_else(|| FacetError::InvalidFormat(s.to_string()))? .trim(); let field_id = schema .id(key) .ok_or_else(|| FacetError::AttributeNotFound(key.to_string()))?; if !attributes_for_faceting.contains(&field_id) { return Err(FacetError::attribute_not_set( attributes_for_faceting .iter() .filter_map(|&id| schema.name(id)) .map(str::to_string) .collect::<Vec<_>>(), key)) } let value = split .next() .ok_or_else(|| FacetError::InvalidFormat(s.to_string()))? .trim(); // unquoting the string if need be: let mut indices = value.char_indices(); let value = match (indices.next(), indices.last()) { (Some((s, '\'')), Some((e, '\''))) | (Some((s, '\"')), Some((e, '\"'))) => value[s + 1..e].to_string(), _ => value.to_string(), }; Ok(Self::new(field_id, value)) } } impl<'a> heed::BytesEncode<'a> for FacetKey { type EItem = FacetKey; fn bytes_encode(item: &'a Self::EItem) -> Option<Cow<'a, [u8]>> { let mut buffer = Vec::with_capacity(2 + item.1.len()); let id = BEU16::new(item.key().into()); let id_bytes = OwnedType::bytes_encode(&id)?; let value_bytes = Str::bytes_encode(item.value())?; buffer.extend_from_slice(id_bytes.as_ref()); buffer.extend_from_slice(value_bytes.as_ref()); Some(Cow::Owned(buffer)) } } impl<'a> heed::BytesDecode<'a> for FacetKey { type DItem = FacetKey; fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> { let (id_bytes, value_bytes) = bytes.split_at(2); let id = OwnedType::<BEU16>::bytes_decode(id_bytes)?; let id = id.get().into(); let string = Str::bytes_decode(&value_bytes)?; Some(FacetKey(id, string.to_string())) } } pub fn add_to_facet_map( facet_map: &mut HashMap<FacetKey, Vec<DocumentId>>, field_id: FieldId, value: Value, document_id: DocumentId, ) -> Result<(), FacetError> { let value = match value { Value::String(s) => s, // ignore null Value::Null => return Ok(()), value => return Err(FacetError::InvalidDocumentAttribute(value.to_string())), }; let key = FacetKey::new(field_id, value); facet_map.entry(key).or_insert_with(Vec::new).push(document_id); Ok(()) } pub fn facet_map_from_docids( rtxn: &heed::RoTxn<MainT>, index: &crate::Index, document_ids: &[DocumentId], attributes_for_facetting: &[FieldId], ) -> MResult<HashMap<FacetKey, Vec<DocumentId>>> { let mut facet_map = HashMap::new(); for document_id in document_ids { for result in index .documents_fields .document_fields(rtxn, *document_id)? { let (field_id, bytes) = result?; if attributes_for_facetting.contains(&field_id) { match serde_json::from_slice(bytes)? { Value::Array(values) => { for v in values { add_to_facet_map(&mut facet_map, field_id, v, *document_id)?; } } v => add_to_facet_map(&mut facet_map, field_id, v, *document_id)?, }; } } } Ok(facet_map) } pub fn facet_map_from_docs( schema: &Schema, documents: &HashMap<DocumentId, IndexMap<String, Value>>, attributes_for_facetting: &[FieldId], ) -> MResult<HashMap<FacetKey, Vec<DocumentId>>> { let mut facet_map = HashMap::new(); let attributes_for_facetting = attributes_for_facetting .iter() .filter_map(|&id| schema.name(id).map(|name| (id, name))) .collect::<Vec<_>>(); for (id, document) in documents { for (field_id, name) in &attributes_for_facetting { if let Some(value) = document.get(*name) { match value { Value::Array(values) => { for v in values { add_to_facet_map(&mut facet_map, *field_id, v.clone(), *id)?; } } v => add_to_facet_map(&mut facet_map, *field_id, v.clone(), *id)?, } } } } Ok(facet_map) } #[cfg(test)] mod test { use super::*; use meilisearch_schema::Schema; #[test] fn test_facet_key() { let mut schema = Schema::new(); let id = schema.insert_and_index("hello").unwrap(); let facet_list = [schema.id("hello").unwrap()]; assert_eq!( FacetKey::from_str("hello:12", &schema, &facet_list).unwrap(), FacetKey::new(id, "12".to_string()) ); assert_eq!( FacetKey::from_str("hello:\"foo bar\"", &schema, &facet_list).unwrap(), FacetKey::new(id, "foo bar".to_string()) ); assert_eq!( FacetKey::from_str("hello:'foo bar'", &schema, &facet_list).unwrap(), FacetKey::new(id, "foo bar".to_string()) ); // weird case assert_eq!( FacetKey::from_str("hello:blabla:machin", &schema, &facet_list).unwrap(), FacetKey::new(id, "blabla:machin".to_string()) ); assert_eq!( FacetKey::from_str("hello:\"\"", &schema, &facet_list).unwrap(), FacetKey::new(id, "".to_string()) ); assert_eq!( FacetKey::from_str("hello:'", &schema, &facet_list).unwrap(), FacetKey::new(id, "'".to_string()) ); assert_eq!( FacetKey::from_str("hello:''", &schema, &facet_list).unwrap(), FacetKey::new(id, "".to_string()) ); assert!(FacetKey::from_str("hello", &schema, &facet_list).is_err()); assert!(FacetKey::from_str("toto:12", &schema, &facet_list).is_err()); } #[test] fn test_parse_facet_array() { use either::Either::{Left, Right}; let mut schema = Schema::new(); let _id = schema.insert_and_index("hello").unwrap(); let facet_list = [schema.id("hello").unwrap()]; assert_eq!( FacetFilter::from_str("[[\"hello:12\"]]", &schema, &facet_list).unwrap(), FacetFilter(vec![Left(vec![FacetKey(FieldId(0), "12".to_string())])]) ); assert_eq!( FacetFilter::from_str("[\"hello:12\"]", &schema, &facet_list).unwrap(), FacetFilter(vec![Right(FacetKey(FieldId(0), "12".to_string()))]) ); assert_eq!( FacetFilter::from_str("[\"hello:12\", \"hello:13\"]", &schema, &facet_list).unwrap(), FacetFilter(vec![ Right(FacetKey(FieldId(0), "12".to_string())), Right(FacetKey(FieldId(0), "13".to_string())) ]) ); assert_eq!( FacetFilter::from_str("[[\"hello:12\", \"hello:13\"]]", &schema, &facet_list).unwrap(), FacetFilter(vec![Left(vec![ FacetKey(FieldId(0), "12".to_string()), FacetKey(FieldId(0), "13".to_string()) ])]) ); assert_eq!( FacetFilter::from_str( "[[\"hello:12\", \"hello:13\"], \"hello:14\"]", &schema, &facet_list ) .unwrap(), FacetFilter(vec![ Left(vec![ FacetKey(FieldId(0), "12".to_string()), FacetKey(FieldId(0), "13".to_string()) ]), Right(FacetKey(FieldId(0), "14".to_string())) ]) ); // invalid array depths assert!(FacetFilter::from_str( "[[[\"hello:12\", \"hello:13\"], \"hello:14\"]]", &schema, &facet_list ) .is_err()); assert!(FacetFilter::from_str( "[[[\"hello:12\", \"hello:13\"]], \"hello:14\"]]", &schema, &facet_list ) .is_err()); assert!(FacetFilter::from_str("\"hello:14\"", &schema, &facet_list).is_err()); // unexisting key assert!(FacetFilter::from_str("[\"foo:12\"]", &schema, &facet_list).is_err()); // invalid facet key assert!(FacetFilter::from_str("[\"foo=12\"]", &schema, &facet_list).is_err()); assert!(FacetFilter::from_str("[\"foo12\"]", &schema, &facet_list).is_err()); assert!(FacetFilter::from_str("[\"\"]", &schema, &facet_list).is_err()); // empty array error assert!(FacetFilter::from_str("[]", &schema, &facet_list).is_err()); assert!(FacetFilter::from_str("[\"hello:12\", []]", &schema, &facet_list).is_err()); } }
ETS-1-mediated Transcriptional Up-regulation of CD44 Is Required for Sphingosine-1-phosphate Receptor Subtype 3-stimulated Chemotaxis* Background: S1P3-mediated chemotaxis plays a pivotal role in various physiological and pathophysiological activities. Results: S1P/S1P3 signaling activates ROCK/JNK/ETS-1/CD44 pathway, and inhibition of this pathway abrogates S1P3-stimulated chemotaxis. Conclusion: ETS-1/CD44 signaling mediates S1P/S1P3-regulated chemotaxis. Significance: Therapeutically manipulating S1P3-mediated chemotaxis requires a molecular understanding of its regulated signaling pathway. Sphingosine-1-phosphate (S1P)-regulated chemotaxis plays critical roles in various physiological and pathophysiological conditions. S1P-regulated chemotaxis is mediated by the S1P family of G-protein-coupled receptors. However, molecular details of the S1P-regulated chemotaxis are incompletely understood. Cultured human lung adenocarcinoma cell lines abundantly express S1P receptor subtype 3 (S1P3), thus providing a tractable in vitro system to characterize molecular mechanism(s) underlying the S1P3 receptor-regulated chemotactic response. S1P treatment enhances CD44 expression and induces membrane localization of CD44 polypeptides via the S1P3/Rho kinase (ROCK) signaling pathway. Knockdown of CD44 completely diminishes the S1P-stimulated chemotaxis. Promoter analysis suggests that the CD44 promoter contains binding sites of the ETS-1 (v-ets erythroblastosis virus E26 oncogene homolog 1) transcriptional factor. ChIP assay confirms that S1P treatment stimulates the binding of ETS-1 to the CD44 promoter region. Moreover, S1P induces the expression and nuclear translocation of ETS-1. Knockdown of S1P3 or inhibition of ROCK abrogates the S1P-induced ETS-1 expression. Furthermore, knockdown of ETS-1 inhibits the S1P-induced CD44 expression and cell migration. In addition, we showed that S1P3/ROCK signaling up-regulates ETS-1 via the activity of JNK. Collectively, we characterized a novel signaling axis, i.e., ROCK-JNK-ETS-1-CD44 pathway, which plays an essential role in the S1P3-regulated chemotactic response. Sphingosine-1-phosphate (S1P), 4 a critical serum-borne lipid mediator, regulates a wide array of biological processes, such as cell proliferation and survival (1)(2)(3), immune cell trafficking (4), suppression of apoptosis (5,6), and chemotaxis (7)(8)(9). S1P can function either as an extracellular ligand or as an intracellular mediator (10 -12). When functioning as an extracellular ligand, S1P-regulated biological activities are mediated by the S1P family of G-protein-coupled receptors (10,13,14). Five members of the S1P family G-protein-coupled receptors (S1P 1-5 receptors) have been identified. We and others have shown that S1P 1 and S1P 3 receptor-mediated signaling pathways play critical roles in endothelial cell chemotaxis, adherens junction assembly, endothelial morphogenesis, and angiogenic responses (10,15,16). The balance between S1P 1 and S1P 2 signaling is important in the regulation of endothelial integrity (17,18) and vascular inflammation (19). In addition, S1P can also function as an intracellular lipid mediator to regulate Ca 2ϩ mobilization and suppress apoptosis (20 -22). It was recently demonstrated that intracellular S1P interacts with histone deacetylases, HDAC1 and HDAC2, and modulates enzymatic activity of HDACs, as well as gene expression regulated by HDACs (11). Moreover, S1P was shown to bind to the N-terminal RING domain of TNF receptor-associated factor 2, leading to the activation of the E3 ligase activity of TNF receptorassociated factor 2 (12). S1P is an important regulator of cell chemotactic response (16,24,25). S1P-regulated chemotactic response has been shown to play critical roles in various physiological and pathophysiological conditions, including angiogenesis (15,16), vascular maturation (26), atherosclerosis (19), lymphocyte egress from lymphoid organs (27,28), multiple sclerosis (29), and the invasion and metastasis of tumor cells (30,31). Chemotactic response regulated by S1P is mediated by the cell membrane S1P family of G-protein-coupled receptors. It was shown that S1P treatment can either stimulate or inhibit cell migration (16,25). The stimulatory and inhibitory effects of S1P are mediated by the S1P 1 /S1P 3 and S1P 2 receptor subtypes, respectively (16,24,25,32). S1P 1 -stimulated chemotaxis requires the activation of the Rho family small GTPases (16,32). In contrast, S1P 2 signaling was shown to suppress the activity of Rac GTPase, leading to the inhibition of cell migration (25). However, molecular details of the S1P 3 -mediated chemotactic response are poorly understood and remain to be elucidated. S1P/S1P 3 -mediated chemotaxis was shown to play important roles in the invasion/migration phenotype of MCF10A human breast epithelial cells (33), prostate migration of carcinoma cells (30), B-cell development, egress and positioning within the bone marrow (34), homing of bone marrow-derived cells (35), dissemination of inflamed dendritic cells (36), shuttle of B cells from splenic follicular areas to marginal zone (37), and high density lipoprotein-stimulated endothelial cell migration (38), among others. We recently showed that S1P 3 receptors are abundantly expressed in a panel of human lung adenocarcinoma cell lines, and S1P 3 signaling plays an essential role in stimulating the migration/invasion of these cell lines (39). In the present study, we investigated the molecular mechanism underlying the S1P/S1P 3 -mediated cell chemotaxis by utilizing these lung adenocarcinoma cell systems. We observed that S1P/S1P 3 signaling markedly increased CD44 expression and membrane localization of CD44 polypeptides. Knockdown of CD44 abrogated the S1P/S1P 3 -mediated chemotaxis. Moreover, we demonstrated that the S1P/S1P 3 -induced CD44 expression depends on ETS-1 (v-ets erythroblastosis virus E26 oncogene homolog 1) activity. Collectively, our data elucidate for the first time that the novel ETS-1/CD44 signaling pathway plays a critical role in S1P 3 -stimulated chemotactic response. EXPERIMENTAL PROCEDURES Reagents-Sphingosine-1-phosphate (Biomol) was dissolved in methanol, aliquoted, vacuum-dried, and stored at Ϫ20°C. When needed, an aliquot was resuspended in 4% fatty acid-free BSA (Sigma) by sonication to make a stock solution of 200 M. RPMI 1640, keratinocyte serum-free medium, trypsin, FBS, goat anti-mouse IgG, and goat anti-rabbit IgG were obtained from Invitrogen. CD44, c-Jun, and phospho-JNK antibodies were purchased from Cell Signaling. ETS-1 antibody was obtained from Santa Cruz Biotechnology. RNeasy Mini-Kit, si-ROCK1, and nontargeting siRNA control were purchased from Qiagen. si-JNK1 was from Ambion. ROCK inhibitor Y-27632 and PI3K inhibitor LY 294002 were purchased from EMD Chemicals. NFB inhibitor BAY 11-7085 was obtained from Biomol. Unless otherwise specified, all chemicals and reagents were purchased from Sigma. HBEC2-KT and HBEC3-KT cells were cultured in keratinocyte serum-free medium. Cells were serum-starved overnight followed by the treatment of S1P or vehicle for various times. Then the cells were collected for RNA or protein extraction or subjected to functional analysis as described below. RNA Isolation, RT-PCR, and Real Time PCR-Total RNAs were isolated from cells using an RNeasy mini-kit (Qiagen) according to the manufacturer's instructions. RNA quality and concentration were assessed with a NanoDrop ND-1000 spectrophotometer. Total RNAs were reverse transcribed with an oligo(dT) primer (Promega) by Moloney murine leukemia virus reverse transcriptase (Promega) for the first strand cDNA synthesis. For real time PCR quantitation, 50 ng of reverse transcribed cDNAs were amplified with the ABI 7500 system (Applied Biosystems) in the presence of TaqMan DNA polymerase. The sense and antisense primers of CD44, ETS-1, ROCK1, S1P receptors, and GAPDH were purchased from Applied Biosystems. Real time PCRs were performed by using a universal PCR Master Mix (Applied Biosystems) according to the manufacturer's instructions. Relative quantification (RQ) was calculated using the Applied Biosystems SDS software based on the equation RQ ϭ 2 Ϫ⌬⌬Ct , where Ct is the threshold cycle to detect fluorescence. Ct data were normalized to the internal standard GAPDH. shRNA-mediated Gene Knockdown-Stable knockdown of S1P 3 receptor in cultured cells was performed essentially as we described (39). For knocking down CD44 and ETS-1, cells were plated in 6-well plates (2 ϫ 10 5 cells/well) and cultured at 37°C for 20 h in a humidified atmosphere of 5% CO 2 . Cells were transfected with human GIPZ lentiviral shRNAmir vector, RHS4430-99158569 and RHS4430-100995224 (Open Biosystems) specific to silence CD44 and ETS-1, respectively. Transfection with nontargeting GIPZ lentiviral shRNAmir RHS4346 vector was used as a control. Transfection was performed by using Lipofectamine 2000 reagent (Invitrogen). Seventy-two hours later, stably transfected cells were selected with puromycin (1 g/ml). The efficacy and specificity of CD44 and ETS-1 knockdown were assessed by both real time PCR and Western blot analysis. Chemotaxis Analysis-Cell chemotaxis was measured by using the Neuro Probe A series 96-well chamber with standard framed filters (8-m pore size) (Neuro Probe), as previously described (39,40). The cells were grown to confluence, washed three times with PBS, and serum-starved in plain medium supplemented with 0.01% FBS for 16 h. The cells were collected by brief trypsinization, washed, and resuspended in plain RPMI 1640 medium (2 ϫ 10 5 cells/ml). Standard framed filters were precoated with fibronectin (5 g/ml) (39,40) at 37°C for 1 h and then air-dried. Cell suspensions (400 l) were plated in upper chambers, and chemoattractants were added to lower chambers. Cells were allowed to migrate for 8 h at 37°C. Subsequently, the cells that remained on the upper surface of filters were removed by gently wiping with a cotton swab. The cells that migrated to the lower surface were fixed with 4% paraformaldehyde and stained with 0.5% crystal violet for 30 min. After washing, crystal violet dye was eluted with 10% acetic acid, and absorbance was measured at 595 nm. ChIP Assay-ChIP assays were performed using a Pierce chromatin prep module and Pierce agarose ChIP kit following the manufacturer's instructions (Thermo Scientific). Briefly, cells were cross-linked with 1% formaldehyde and collected into lysis buffer (1% SDS, 10 mM EDTA, 50 mM Tris-HCl, pH 8.0, 1ϫ protease inhibitor mixture). Cell lysates were digested with micrococcal nuclease, followed by immunoprecipitating with rabbit ETS-1 or c-Jun antibody. Immunoprecipitation with a normal rabbit IgG (Thermo Scientific) was used as a negative control. After incubation with the protein A/G Plus agarose resin, immunoprecipitates were washed and then heated at 65°C for 1.5 h to reverse the formaldehyde crosslinking. DNA fragments were purified with the DNA clean-up column and reagents included in the Pierce agarose ChIP kit. Immunostaining Analysis-Cells cultured in glass-bottomed Petri dishes (MatTek) were treated with or without S1P (200 nM), followed by fixation with 4% paraformaldehyde for 30 min at room temperature. Cultures were permeabilized with 0.05% Triton X-100 and blocked with 1% bovine serum albumin for 30 min. Subsequently, cells were incubated with the indicated primary antibody (1:100) followed by the FITC-conjugated secondary antibody (1:500). Nuclei were stained with DAPI (Sigma-Aldrich). Fluorescent images were analyzed by the Leica TCS SP5 confocal system (Leica, Wetzlar, Germany). microRNA Quantification-Levels of miR-34a were quantified using TaqMan microRNA assays (Applied Biosystems) (41,42). Total RNAs were isolated from cells, and small RNA fractions (Ͻ200 nucleotides) were recovered using the mirVANA PARIS miRNA isolation kit (Ambion). RQ was calculated using the Applied Biosystems SDS software based on the equation RQ ϭ 2 Ϫ⌬⌬Ct , where Ct is the threshold cycle to detect fluorescence. Ct data were normalized to the internal standard, miR-103. Statistical Analysis-The results are presented as means Ϯ S.D. The difference between various treatments was analyzed by Student's t test with p values Ͻ 0.05 considered significant. A conventional two-way analysis of variance (GraphPad Prism 5) was performed for migration data to compare the migratory capability before and after the silence of ETS-1 and CD44. S1P Stimulates CD44 Expression and Induces Membrane Localization of CD44 Proteins-Many of CD44-mediated biological responses, such as cell-cell and cell-matrix interactions (43) as well as cell migration (44,45), are regulated by S1P signaling pathways. Thus, we examined whether S1P stimulation activates CD44 and whether CD44 mediates S1P-regulated responses. H1793 cells, a human lung adenocarcinoma cell line abundantly expressing S1P 3 receptor subtype of the S1P family of G-protein-coupled receptors (39), were treated with or without S1P (200 nM) for various times. The expression of CD44 at the mRNA and protein levels was measured. As shown in Fig. 1A, S1P treatment increased levels of CD44 mRNA in a timedependent manner. The CD44 mRNA expression increased 1.5-fold at 30 min after S1P treatment and 3.3-fold at 24 h after S1P treatment. Levels of CD44 proteins were also profoundly increased after S1P treatment (Fig. 1B). Immunostaining analysis showed that CD44 proteins significantly increased and were located at plasma membrane regions following S1P treatment (Fig. 1C). In addition, we examined the S1P-induced CD44 expression in several other human lung-derived cell lines, including H1792, H1650, and H23 lung adenocarcinoma cells as well as HBEC2-KT immortalized normal bronchial epithelial cells (46,47). We found that S1P stimulated CD44 expression in H1792, H1650, and H23 lung adenocarcinoma cells. In contrast, S1P was unable to enhance CD44 expression in HBEC2-KT normal lung epithelial cells (Fig. 1D). S1P-stimulated CD44 Expression Is Mediated by the S1P 3 Receptor/ROCK Signaling Pathway-Previously, we showed that S1P 3 receptors are abundantly expressed in H1793, H1792, H23, and H1650 human lung adenocarcinoma cell lines, whereas S1P 3 receptors are barely detected in HBEC2-KT normal lung epithelial cells (39). S1P was able to induce CD44 expression in lung adenocarcinoma cells and not in normal lung epithelial cells (Fig. 1), suggesting that S1P-stimulated CD44 expression is mediated by S1P 3 receptors. Therefore, we employed the shRNA-mediated gene silencing technique to specifically knockdown S1P 3 receptors in H1793 cells. As shown in Fig. 2A, knockdown of S1P 3 completely abolished CD44 induction following S1P stimulation. In contrast, cells stably transfected with pRS control vector (sh-Ctrl) had no effect on S1P-stimulated CD44 expression. The sh-S1P 3 -RNA is highly specific, because it only knocked down S1P 3 receptors and had no effect on the other S1P receptor subtypes present in H1793 cells (Fig. 2B). Moreover, immunostaining analysis showed that S1P induced a significant increase of CD44 proteins in the membrane regions of H1793 cells stably transfected with sh-Ctrl vector, whereas the S1P-induced CD44 up-regulation was completely inhibited in the S1P 3 knockdown H1793 cells (Fig. 2C). These results indicate that the S1P 3 -transduced signaling pathway plays an essential role in the S1P-stimulated CD44 expression. We next utilized pharmacological inhibitors to characterize signaling pathways involved in the S1P 3 -mediated CD44 upregulation. S1P was shown to activate signaling molecules such as PI3K (48,49) and NFB (12,50), among others. However, treatment with LY294002 (inhibitor of PI3K) or BAY 11-7085 (inhibitor of NFB) did not significantly inhibit the S1P-in-duced CD44 up-regulation (Fig. 2D). In contrast, treatment of H1793 cells with Y-27632, a specific inhibitor of ROCK, completely abrogated the S1P-increased CD44 expression (p Ͻ 0.01, t test) (Fig. 2D). Moreover, we used siRNA to specifically knockdown ROCK1 in H1793 cells (Fig. 2E). The S1P-stimulated CD44 expression was significantly diminished in si-ROCK1 transfected cells (Fig. 2F). In agreement, we and others have shown that S1P 3 signaling leads to the activation of ROCK, which plays a critical role in S1P 3 -regulated biological activities (39,51). In addition, immunostaining analysis showed that pretreatment of H1793 cells with a ROCK inhibitor completely abolished the S1P-increased CD44 proteins in membrane regions (Fig. 2G). Collectively, these data suggest that S1P-induced CD44 expression is mediated by the S1P 3 /ROCK signaling pathway. CD44 Proteins Play a Critical Role in S1P/S1P 3 -stimulated Cell Migration-We have shown that levels of S1P 3 receptors are significantly increased in a panel of lung adenocarcinoma cell lines, compared with that in immortalized normal lung epithelial cells (39). We have also shown that S1P-stimulated cell migration and invasion is mediated by S1P 3 receptors in these lung adenocarcinoma cells (39). Recently, it was elegantly demonstrated that CD44 proteins play a key role in tumor invasion and metastasis (52). Therefore, we investigated whether S1Penhanced CD44 expression plays an essential role in S1P/S1P 3 signaling-stimulated cell migration. We employed an shRNAmediated gene knockdown technique to specifically diminish CD44 expression. As shown in Fig. 3A, 78% of CD44 mRNA was successfully knocked down in H1793 cells stably transfected with sh-CD44, compared with that in cells stably transfected with sh-Ctrl vector. Next, H1793 cells stably transfected with sh-CD44 or sh-Ctrl vector were treated with or without S1P (200 nM) for 4 h. Levels of CD44 proteins were measured by Western blotting analysis. As shown in Fig. 3B, S1P treatment substantially increased levels of CD44 proteins in the cells transfected with the sh-Ctrl vector. In contrast, S1P stimulation failed to increase CD44 proteins in sh-CD44 transfected cells. Transfection of sh-CD44 had no effect on endogenous ␤-actin proteins (Fig. 3B), suggesting that sh-CD44 is specific in knocking down CD44 molecules. Subsequently, we examined the role of CD44 in S1P/S1P 3 signaling-stimulated cell migration. As shown in Fig. 3C, S1P treatment dose-dependently stimulated migration of sh-Ctrl vector transfected cells. In sharp contrast, S1P-induced chemotaxis was completely inhibited in CD44 knocked down H1793 cells (p Ͻ 0.01, sh-Ctrl versus sh-CD44, analysis of variance) (Fig. 3C). Also, we showed that S1P 3 -mediated CD44 upregulation requires ROCK1 activity (Fig. 2F). In agreement, knockdown of ROCK1 significantly diminished S1P-stimulated cell migration (Fig. 3D). These results suggest that the S1P/ S1P 3 /ROCK1 signaling axis induces the expression of CD44, which plays an essential role in S1P/S1P 3 -mediated cell chemotactic response. S1P Treatment Stimulates ETS-1 Binding to CD44 Promoter-It was recently shown that miR-34a inhibits metastasis of prostate cancer cells by directly suppressing CD44 expression (52). Thus, we examined whether S1P/S1P 3 signaling diminishes miR-34a expression, consequently leading to the up-regulation A and B, levels of CD44 mRNAs (A) and proteins (B) were measured by qPCR and Western blot analysis, respectively. Note that S1P treatment significantly enhances CD44 expression. Lower panel in B, levels of CD44 proteins were quantitated by the National Institutes of Health ImageJ software. C, H1793 cells were treated with or without S1P (200 nM) for 4 h. CD44 proteins (green) were detected by immunofluorescence staining. Nuclei were stained with DAPI (lower panels). Note that S1P treatment markedly increases membrane localization of CD44 proteins. D, HBEC2-KT immortalized normal lung epithelial (2KT) and H1792, H23, and H1650 lung adenocarcinoma cells were treated with or without S1P (200 nM) for 4 h, followed by qPCR quantitation of CD44 expression. Note that CD44 expression was significantly elevated in lung adenocarcinoma cells after S1P stimulation, whereas S1P was unable to stimulate CD44 expression in HBEC2-KT cells. A and D, data represent means Ϯ S.D. from three individual experiments performed in triplicate. *, p Ͻ 0.05; **, p Ͻ 0.01 (S1P versus control vehicle treatment), t test. Ctrl, control; IB, immunoblotting. . S1P-induced CD44 expression is mediated by S1P 3 /ROCK signaling pathway. A, S1P-increased CD44 expression was measured in H1793 cells transfected with sh-S1P 3 or sh-Ctrl (control) vector. B, expression of S1P receptor subtypes was measured in H1793 cells stably transfected with sh-S1P 3 or sh-Ctrl vector by real time PCR. C, immunofluorescence staining of CD44 (green, panels a, c, e, and g) in sh-S1P 3 (panels e-h) or sh-Ctrl (panels a-d) transfected H1793 cells, treated with or without S1P for 4 h. Cell nuclei were stained with DAPI (panels b, d, f, and h). D, H1793 were pretreated for 1 h with or without indicated pharmacological inhibitors, followed by stimulating in the presence or absence of S1P for 4 h. The expression of CD44 was quantitated by qPCR. LY, LY294002 for PI3K; BAY, Bay 11-7085 for NFB; Y, Y-27632 for ROCK. Note that pretreatment with ROCK inhibitor completely diminished the S1P-induced CD44 expression. E, H1793 were transfected with nontargeting siRNA control or si-ROCK1 (Qiagen). Levels of ROCK1 were quantitated by qPCR. F, si-Ctrl or si-ROCK1 transfected H1793 cells were treated with or without S1P for 4 h. Levels of CD44 were measured by qPCR. G, H1793 cells were pretreated with control vehicle (panels a-d) or Y-27632 (Y) (panels e-h) for 1 h, followed by stimulating with or without S1P for 4 h. CD44 proteins were analyzed by immunofluorescence staining (panels a, c, e, and g), and cell nuclei were stained with DAPI (panels b, d, f, and h). Note that the S1P-increased CD44 at membrane regions was inhibited by pretreatment of ROCK inhibitor. The data represent means Ϯ S.D. from three (A, B, and D) or two (E and F) individual experiments performed in triplicate. *, p Ͻ 0.01; **, p Ͻ 0.05; t test. Ctrl, control. of CD44 proteins. H1793 cells were treated with or without S1P for various times. Levels of miR-34a were quantified by using a TaqMan microRNA assay kit (Applied Biosystems) (41,42). We observed that miR-34a levels were significantly increased (ϳ2-3-fold; p Ͻ 0.05, t test, n ϭ 3), in a time-dependent manner, after S1P stimulation (data not shown). This result suggests that the S1P/S1P 3 -increased CD44 expression is not directly regulated by miR-34a-mediated suppression mechanism. Next, we employed the cis-element Cluster Finder software (53) to analyze the CD44 promoter region for potential binding sites of transcriptional factors. Four potential binding sites of ETS-1 (v-ets erythroblastosis virus E26 oncogene homolog 1) transcriptional factor were revealed by this promoter analysis. ETS-1 was shown to be involved in VEGF-stimulated endothelial chemotaxis (54), however, the underlying mechanism is completely unknown. Therefore, we investigated whether ETS-1 activity is required for S1P/S1P 3 -stimulated CD44 upregulation and chemotaxis. ETS-1 binding site 1 is located at nucleotides Ϫ1384 to Ϫ1374, site 2 at nucleotides Ϫ1359 to Ϫ1349, site 3 at nucleotides Ϫ1148 to Ϫ1138, and site 4 at nucleotides Ϫ1107 to Ϫ1097 in the 5Ј-up-stream region of the CD44 translational initiation site (designated as nucleotide ϩ1) (Fig. 4A). Sites 1 and 2 are separated by only 15 nucleotides, and sites 3 and 4 are separated by only 31 nucleotides. Therefore, three pairs of primers were designed to assess the binding of ETS-1 to these candidate sites by chromatin immunoprecipitation analysis. P1 amplified sites 1 and 2 (amplicon size 220 bp), P2 amplified sites 3 and 4 (amplicon size 230 bp), and P3 amplified sites 1-4 (amplicon size 429 bp). Initially, we examined the binding capa-bilities of ETS-1 to sites 1 and 2 and/or sites 3 and 4 by using primer pair P1 or P2, respectively, following chromatin immunoprecipitation with anti-ETS-1. PCR amplification with P2 primer pair showed an amplicon of 230 bp, whereas no PCR amplicon was observed by using P1 primer pair (Fig. 4B, upper panel). Anti-ETS-1 ChIP assay is specific, because there was no amplicon when the ChIP assay was performed using normal IgG as a control (Fig. 4B, lower panel). These results suggest that sites 3 and 4 may be the bona fide binding sites of ETS-1. Next, we examined whether S1P treatment stimulates the binding of ETS-1 to sites 3 and 4 in the CD44 promoter region. H1793 cells were treated with S1P (200 nM) for various times. The binding of ETS-1 to CD44 promoter region was assessed by ChIP analysis. By utilizing the P3 primer pair, we found a detectable increase of ETS-1 binding at 1 h after S1P stimulation (Fig. 4C). Also, the S1P-enhanced ETS-1 binding was increased in a time-dependent manner. The same kinetics of S1P-increased ETS-1 binding was observed by using the P2 primer pair, which specifically amplifies sites 3 and 4 (Fig. 4D). These data together suggest that S1P treatment stimulates the binding of ETS-1 to sites 3 and 4, which may contribute to the S1P-increased CD44 expression. S1P Induces ETS-1 Expression via S1P 3 /ROCK Pathway-ChIP analysis suggests that S1P treatment activates ETS-1. Thus, we investigated the signaling pathway in S1P-mediated ETS-1 activation. H1793 cells were treated with or without S1P (200 nM) for various times, and the expression of ETS-1 at the mRNA level was measured by qPCR analysis. We observed that ETS-1 mRNA was significantly increased (1.7-fold) at 1 h after S1P treatment (Fig. 5A). The S1P-mediated ETS-1 up-regulation steadily increased up to 24 h of S1P stimulation. Also, S1Pincreased ETS-1 expression was observed in H1792, H23, and H1650 lung adenocarcinoma cells (Fig. 5B), which were also shown to abundantly express S1P 3 receptors (39). In contrast, ETS-1 up-regulation was not observed in HBEC2-KT immortalized normal lung epithelial cells (Fig. 5B), which express low levels of S1P 3 receptors (39). Moreover, Western blot analysis showed that ETS-1 proteins were profoundly increased (5.0 Ϯ 2.5-fold) at 4 h after S1P stimulation. The increment of ETS-1 up-regulation was sustained at 24 h after treatment (Fig. 5C). Furthermore, analysis with anti-ETS-1 immunostaining technique showed that S1P treatment not only increased ETS-1 expression but also stimulated the nuclear localization of ETS-1 proteins (Fig. 5D, arrows). We next examined the role of S1P 3 receptors in S1P-stimulated ETS-1 up-regulation. H1793 cells stably transfected with sh-S1P 3 or sh-Ctrl vector were stimulated with or without S1P (200 nM) for 4 h, and ETS-1 expression was measured by qPCR analysis. As shown in Fig. 5E, knockdown of S1P 3 receptors completely abrogated the S1P-stimulated ETS-1 up-regulation. Similar to signaling molecules involved in S1P-induced CD44 expression (Fig. 2D), pharmacological inhibition of ROCK, not PI3K and NFB, diminished 85% of the S1P-increased ETS-1 up-regulation (Fig. 5F). The requirement of ROCK is further supported by a specific gene knockdown technique, in which si-ROCK1 transfected cells significantly diminished the S1Pincreased ETS-1 expression (Fig. 5G). Collectively, our results suggest that the S1P/S1P 3 /ROCK signaling axis stimulates FIGURE 4. S1P stimulates ETS-1 binding to CD44 promoter. A, CD44 promoter contains four candidate ETS-1 binding sites. P1, P2, and P3 primer pairs were used to differentially amplify these candidate sites. B, ETS-1 binds to sites 3 and/or 4. ChIP analysis was performed as described under "Experimental Procedures." Note that a specific amplicon (expected size, 230 bp) was detected by using the P2 primer pair, whereas no amplicon was detected by using the P1 primer pair (top panel). Middle panel, PCR amplification of total input chromatin, used as a loading control. Bottom panel, ChIP analysis was performed with normal IgG (nIgG) or ETS-1 immunoprecipitates, followed by PCR amplification with P2 primer pair. C, cells were treated with or without S1P for the indicated times and analyzed for ETS-1 ChIP assay with P3 primer pair. PCR amplification of anti-ETS-1 immunoprecipitates and total input chromatin (expected size, 429 bp) are shown in the upper and lower panels, respectively. D, PCR amplification of anti-ETS-1 immunoprecipitates and total input DNA with P2 primer pair is shown in the upper and lower panels, respectively. Note that S1P treatment increased ETS-1 binding to sites 3 and/or 4 in the CD44 promoter region. B-D are images of a representative experiment that was repeated two times with similar results. NOVEMBER 8, 2013 • VOLUME 288 • NUMBER 45 JOURNAL OF BIOLOGICAL CHEMISTRY 32131 ETS-1 expression and nuclear translocation, leading to transcriptional up-regulation of CD44. ETS-1 Plays an Essential Role in S1P-stimulated CD44 Expression and S1P-promoted Chemotaxis-We present evidence showing that S1P treatment enhances ETS-1 expression (Fig. 5) and stimulates ETS-1 binding to the CD44 promoter (Fig. 4). Next, we employed the shRNA-mediated gene silencing technique to investigate the role of ETS-1 in S1P-stimulated CD44 expression and cell migration. We were able to knock down ϳ50% of ETS-1 at both the mRNA and protein levels (Fig. 6, A and B). Transfection of sh-ETS-1 vector had no effects on the expression of GAPDH (Fig. 6A, used for internal control of qPCR analysis) and actin polypeptides (Fig. 6, B-D), suggesting that sh-ETS-1 is highly specific in knocking down ETS-1 molecules. Also, cells stably transfected with sh-ETS-1 or sh-Ctrl vector were treated with or without S1P (200 nM) for 4 h. The S1P-induced ETS-1 expression was completely inhibited in sh-ETS-1 transfected cells (Fig. 6C). Importantly, the S1P-stimulated CD44 expression was significantly diminished in ETS-1 knockdown cells (Fig. 6, D and E). Moreover, ETS-1 proteins were increased 1.4 Ϯ 0.2-and 2.2 Ϯ 0.1-fold (n ϭ 3) at 15 and 30 min, respectively, after S1P stimulation. In contrast, we did not observe significant alterations in CD44 levels at 15 and 30 min of S1P treatment (Fig. 6F, upper panels). Transfection of sh-ETS-1 profoundly diminished the S1P-increased ETS-1 and CD44 (Fig. 6F, lower panels). These results support the notion that S1P treatment stimulates the expression of ETS-1 proteins, which lead to the transcriptional up-regulation of CD44 molecules. In addition, the S1P-stimulated chemotactic responses were measured in cells stably transfected with sh-ETS-1 or sh-Ctrl vector. S1P treatment dose-dependently stimulated chemotaxis in cells stably transfected with sh-Ctrl vector. In con-trast, S1P was incapable of stimulating a migratory response in H1793 cells stably transfected with sh-ETS-1 vector (Fig. 6G). All these data together suggest that ETS-1 plays an essential role in the S1P-stimulated CD44 expression and chemotaxis. S1P 3 /ROCK1 Signaling Up-regulates ETS-1 via the JNK/c-Jun Pathway-It has been shown that JNK is a downstream signaling molecule of ROCK1 (55)(56)(57). Also, promoter analysis found four candidate binding sites of AP-1 (a heterodimeric complex composed of proteins including c-Jun and c-Fos) transcriptional factor in the ETS-1 promoter region (found in a EpiTect ChIP qPCR Primers search at the SABiosciences website). Thus, we examined whether S1P 3 /ROCK1 signaling-mediated ETS-1 up-regulation is controlled by JNK/c-Jun pathway. Western blotting with phospho-JNK antibody indicated that S1P treatment was capable of activating JNK (Fig. 7A). S1P-stimulated JNK activation was inhibited in S1P 3 knocked down cells (Fig. 7B), as well as in cells pretreated with the ROCK inhibitor (Fig. 7C), indicating the involvement of S1P 3 /ROCK1 pathway in S1P-mediated JNK activation. ChIP analysis with the c-Jun antibody showed an increase of c-Jun binding to the AP-1 site in the ETS-1 promoter region following S1P stimulation (Fig. 7D). Pharmacological inhibition of JNK activity with SP600125 diminished the S1P-increased protein levels of ETS-1 and CD44 (Fig. 7E). Transfection with si-JNK1 knocked down 42 Ϯ 9 and 22 Ϯ 11% (n ϭ 9) of p54 and p46, respectively, JNK1 (Fig. 7F). Moreover, si-JNK1 transfection substantially abrogated the S1P-increased ETS-1 and CD44 proteins (Fig. 7, F and G). In addition, SP600125 treatment (Fig. 7H) and si-JNK1 transfection (Fig. 7I) significantly inhibited the S1P-stimulated cell migration. These data together suggest that S1P 3 / ROCK1 activates the JNK/AP-1 pathway, ultimately leading to transcriptional up-regulation of ETS-1. . S1P enhances ETS-1 expression via S1P 3 /ROCK pathway. A, ETS-1 mRNAs were quantitated in H1793 cells treated with S1P for various times. B, S1P increased ETS-1 expression in H1792, H23, and H1650 lung adenocarcinoma cells and not in HBEC2-KT cells. C, Western blotting showed that S1P treatment increased ETS-1 proteins. D, H1793 cells were treated with or without S1P for 4 h. ETS-1 was detected by immunostaining with ETS-1 antibody (upper panels), and nuclei were stained with DAPI (lower panels). Note that S1P treatment stimulated the expression and nuclear localization of ETS-1 proteins (white arrows). E, knockdown of S1P 3 completely abrogated S1P-increased ETS-1 expression. H1793 cells transfected with sh-S1P 3 or sh-Ctrl vector were treated with or without S1P for 4 h. ETS-1 mRNAs were quantified by qPCR. F, inhibition of ROCK significantly diminished S1P-induced ETS-1 expression. H1793 were pretreated with or without pharmacological inhibitors, followed by stimulating with S1P for 4 h. ETS-1 mRNAs were measured by qPCR. DISCUSSION S1P, a serum-borne bioactive lipid, plays an important role in the regulation of cell migration. S1P was shown to be a potent chemoattractant, because the migration stimulatory effect of S1P is mediated by S1P 1 or S1P 3 receptors (16,24,59). In addition, S1P was shown to exhibit the migration inhibitory effect, which is mediated by S1P 2 receptors (25,60). Mechanistically, S1P/S1P 1 signaling activates AKT kinase, leading to the activation of Rac/Cdc42 small GTPases and ultimately stimulating the formation of membrane ruffling and chemotaxis (16,61). Also, the Rho family of small GTPases mediated translocation of cortactin to the membrane ruffling area (62)(63)(64), and activation of integrin molecules (32,65,66) was shown to be important in the S1P/S1P 1 -stimulated chemotaxis. In contrast, S1P/ S1P 2 signaling was shown to stimulate Rac GTPase-activating protein (Rac-GAP) and thus abrogate growth factor-induced Rac activation and chemotaxis (25,60). S1P/S1P 3 -mediated chemotactic response was shown to play important roles in various physiological and pathophysiological responses (30,(33)(34)(35)(36)(37)(38). However, molecular details of the S1P 3mediated chemotactic response are poorly understood and remain to be elucidated. We found that S1P 3 receptors are abundantly expressed in a panel of lung adenocarcinoma cell lines (39). Therefore, we utilized these lung carcinoma cell lines to characterize the mechanism underlying S1P 3 -mediated cell migration. We observed that S1P treatment markedly enhanced CD44 expression at both mRNA and protein levels and increased membrane localization of CD44 proteins (Fig. 1). We further demonstrated that S1P-induced CD44 expression is mediated by the S1P 3 /ROCK signaling pathway (Fig. 2). Knockdown of CD44 completely abrogated S1P/S1P 3 -stimulated migratory response (Fig. 3), indicating that CD44 polypeptides play an essential role in S1P 3 -mediated chemotaxis. Moreover, we demonstrated that the S1P/S1P 3 /ROCK/JNK/c-Jun signaling axis stimulated the expression of ETS-1 transcriptional factor and nuclear translocation of ETS-1 (Fig. 5). ChIP analysis suggests that S1P treatment increased the binding of ETS-1 to the CD44 promoter region (Fig. 4). This observation leads to the speculation that S1P-stimulated ETS-1 binding to the CD44 promoter may trans-activate the expression of CD44 molecules. Indeed, knockdown of ETS-1 not only abrogated S1Pinduced CD44 expression but also inhibited S1P 3 -stimulated cell migration (Fig. 6). Collectively, our data elucidate a novel chemotaxis stimulatory signaling pathway, i.e., ETS-1/CD44 axis, which plays an essential role in S1P 3 -stimulated cell migration. The ETS family of transcription factors plays important roles in various biological processes such as development, differentiation, proliferation, apoptosis, migration, tissue remodeling, and angiogenesis in various cell types. ETS-1, the prototype of the ETS family of transcription factors, controls a wide array of cellular activities via its transcriptional regulation of the expression of specific genes (84). Many genes (e.g., matrix metalloproteinases and urokinase plasminogen activator) that are important in extracellular matrix remodeling and cell migration are known targets of ETS-1 (85)(86)(87)(88), suggesting a critical role for ETS-1 in promoting cell migration (89). It should be noted that the ETS family of transcription factors has been shown to be associated with tumorigenic processes (23,58,90). Also, it has been elegantly demonstrated that CD44 polypeptides play critical roles in tumor "stemness," invasion, and metastasis. S1P is a potent chemoattractant. Recently, we reported that S1P 3 expression is markedly up-regulated in a panel of cultured lung adenocarcinoma cell lines (39). All these data together highlight an exciting possibility that the novel S1P 3 -ETS-CD44 signaling axis, characterized in this study, may contribute to invasion or metastasis of lung cancer progression. In summary, we have characterized a novel S1P-regulated signaling cascade, i.e., S1P 3 activates ETS-1 transcription factor via the ROCK/JNK/c-Jun pathway, leading to the transcriptional up-regulation of CD44 molecules, which consequently promotes cell chemotactic response. Moreover, our study implies that the S1P 3 /ETS-1/CD44 axis may represent a novel chemotaxis stimulatory signaling pathway for the migration and invasion of lung adenocarcinoma cells. FIGURE 7. The JNK/c-Jun signaling pathway mediates the S1P 3 /ROCK1 up-regulated ETS-1 expression. A, H1793 cells were treated with S1P (200 nM) for various times. Protein lysates were immunoblotted with phospho-JNK antibody. Extracts of HEK293 cells treated without and with phorbol 12-myristate 13-acetate ("Ϫ"ve and "ϩ"ve, respectively) were used as negative and positive control. B, sh-Ctrl or sh-S1P 3 stably transfected H1793 cells were treated with or without S1P (200 nM, 15 min), followed by Western blotting with anti-phospho-JNK. C, H1793 cells were pretreated in the presence or absence of a ROCK inhibitor (Y27632) for 30 min, followed by stimulating with or without S1P (200 nM, 15 min). Extracts were probed with antiphospho-JNK. D, H1793 cells were treated with S1P (200 nM) for indicated times. AP-1 ChIP assays were performed by amplifying the anti-c-Jun precipitates with primer pairs specific for AP-1 site in the ETS-1 promoter region (SABiosciences; GPH1016833(Ϫ)01A). E, H1793 cells were pretreated with JNK inhibitor (SP600125, 10 M) for 30 min, followed by stimulating with S1P for 4 h. Extracts were blotted with anti-ETS-1 or anti-CD44. F, levels of ETS-1 and CD44 were measured in H1793 cells, transfected with si-Ctrl or si-JNK1 (Ambion), following S1P stimulation. G, immunoblot was quantified by National Institutes of Health ImageJ software and normalized to actin. H and I, S1P-stimulated chemotaxis was measured in H1793 cells pretreated with or without JNK inhibitor (SP600125) (H) or transfected with si-Ctrl or si-JNK1 (I). Cell migration induced by FBS (10%) was used as a control. *, p Ͻ 0.05, t test (n ϭ 6). M.W., molecular mass.
package testy import ( "net/http" "regexp" "testing" ) // Error compares actual.Error() against expected, and triggers an error if // they do not match. If actual is non-nil, t.SkipNow() is called as well. func Error(t *testing.T, expected string, actual error) { var err string if actual != nil { err = actual.Error() } if expected != err { t.Errorf("Unexpected error: %s", err) } if actual != nil { t.SkipNow() } } type statusCoder interface { StatusCode() int } // StatusCode returns the HTTP status code embedded in the error, or 500 if // there is no specific status code. func StatusCode(err error) int { if err == nil { return 0 } if coder, ok := err.(statusCoder); ok { return coder.StatusCode() } return http.StatusInternalServerError } // StatusError compares actual.Error() and the embeded HTTP status code against // expected, and triggers an error if they do not match. If actual is non-nil, // t.SkipNow() is called as well. func StatusError(t *testing.T, expected string, status int, actual error) { var err string var actualStatus int if actual != nil { err = actual.Error() actualStatus = StatusCode(actual) } if expected != err { t.Errorf("Unexpected error: %s", err) } if status != actualStatus { t.Errorf("Unexpected status code: %d", actualStatus) } if actual != nil { t.SkipNow() } } // ErrorRE compares actual.Error() against expected, which is treated as a // regular expression, and triggers an error if they do not match. If actual is // non-nil, t.SkipNow() is called as well. func ErrorRE(t *testing.T, expected string, actual error) { var err string if actual != nil { err = actual.Error() } if !regexp.MustCompile(expected).MatchString(err) { t.Errorf("Unexpected error: %s", err) } if actual != nil { t.SkipNow() } }
<reponame>wingej0/3dgradebook2.0<filename>src/app/core/models/user.ts export interface User { uid : string, email : string, photoURL : string, displayName : string, school? : string, city? : string, state? : string, import? : { source? : string, domain? : string, token? : string, }, }
An In-Depth Tutorial For Both New and Old Players I've started a Let's Play on Youtube to help new players on their journey to complete the game. Vets of the game may have something to learn as well so please check it out, subscribe and more importantly ask questions/leave comments about what you'd like to see. This series will go in conjunction with a strategy guide I'm currently writing for the game. There are 4 episodes posted with more coming in on Tuesdays or Thursdays every week (unless I'm busy).My Channel: Survivalist Gaming -World Settings-Resources: Scarce (8 ore per bar rather than the normal 4)Power: Scarce (Power Gen is roughly halved)Conveyors: SlowDay Cycle: Eternal NightMobs: Hard MobsDeath Settings: IronmanI'm also willing to do a perma-death series on hardcore if anyone is interested in following that.Thanks in advance for the interest and I hope to see you on my channel.
<filename>birthdayproblem.py # -*- coding: utf-8 -*- """ Created on Thu Jan 20 18:23:48 2022 @author: olivi """ import random matches = 0 for i in range(1000000): bdays = [] for j in range(36): bday = random.randint(1, 365) if bday in bdays: matches += 1 break bdays.append(bday) print(matches / 1000000)
import disjoint_sets as ds import heapq as h def kruskal(graph): # graph = {edge: {edge: weight}} forest = ds.DisjointSets() mst = {} def add_edge(a, b, w): forest.connect(a, b) if a not in mst: mst[a] = {} if b not in mst: mst[b] = {} mst[a][b] = w mst[b][a] = w edge_queue = [] for a, edges in graph.items(): for b, weight in edges.items(): h.heappush(edge_queue, (weight, (a, b))) while len(edge_queue) > 0: w, (a, b) = h.heappop(edge_queue) has_a = a in forest has_b = b in forest if has_a and has_b: if not forest.connected(a, b): add_edge(a, b, w) else: if not has_a: forest.add(a) if not has_b: forest.add(b) add_edge(a, b, w) return mst
/** * Documents the return value. Return value is described using the * {@code @return} annotation. * * @param description the return value's description */ boolean documentReturn(String description) { if (!lazyInitDocumentation()) { return true; } if (documentation.returnDescription != null) { return false; } documentation.returnDescription = description; return true; }
/** * The Actuator class is used to add or remove Voldemort nodes to the controlled Voldemort cluster. * The Actuator uses the rebalance tool provided by Voldemort to redistribute data. * * @author Ahmad Al-Shishtawy <[email protected]> * */ public class Actuator implements Runnable, Steppable { static Logger log = LoggerFactory.getLogger(Actuator.class); // private int nVMs; // private boolean limit; // limit the max delta VMs to add or remove private LinkedList<Task> queue; Cluster cluster; private boolean createVMs; // if false then actuator will only rebalance and leave VMs private int voldMax; // Max vold cluster size private int voldMin; // Min vold cluster size private int voldDeltaMax; // max number of VMs to add or remove at one time private boolean rebalancing = false; public Actuator(Cluster cluster, int voldMin, int voldMax, int voldDeltaMax, boolean createVMs) { this.cluster = cluster; // FIXME: change to setters and getters instead of using Props to make it more generic this.createVMs = createVMs; this.voldMax = voldMax; this.voldMin = voldMin; this.voldDeltaMax = voldDeltaMax; queue = new LinkedList<Actuator.Task>(); } public synchronized boolean scheduleRebalance(int nVMs, boolean limit) { queue.add(new Task(nVMs, limit)); if(rebalancing) { log.warn("The actuator is rebalancing! Can't have multiple rebalance instances at same time!"); return false; } else { return true; } } private synchronized Task getNextTask() { if(queue.isEmpty()) { return null; } return queue.remove(); } /** * @param rebalancing */ private synchronized void setRebalancing(boolean rebalancing) { this.rebalancing = rebalancing; } /** * Changes the rebalancing flag only if it is different than the current status. Otherwise, return false * * @param rebalancing The new state * @return true if state was changed. false if the new state is the same as the current state */ private synchronized boolean testAndSetRebalancing(boolean rebalancing){ if(this.rebalancing == rebalancing) { return false; } this.rebalancing = rebalancing; return true; } /** * @return */ public synchronized boolean isRebalancing() { return this.rebalancing; } public void run() { // Just an extra check; should not happen if(!testAndSetRebalancing(true)) { log.error("The actuator is rebalancing! Can not have two rebalance operations in parallel according to Voldemort specifications!"); return; } Task t; while((t=getNextTask()) != null) { int nVMs = t.getnVMs(); boolean limit = t.getLimit(); if(nVMs > 0) { // add mode // 1 - Bound nVMs if(limit && nVMs > voldDeltaMax) { // TODO: check if good idea nVMs = voldDeltaMax; } if(cluster.getActiveVoldVMsCount()+nVMs > voldMax) { // the max size allowed for the Voldemort store nVMs = voldMax - cluster.getActiveVoldVMsCount(); } if(nVMs<=0) { // will never be <0 but might equal 0 setRebalancing(false); return; } cluster.setActiveVoldVMsCount(cluster.getActiveVoldVMsCount()+nVMs); // 2 - Create new VMs if(createVMs) { cluster.createVMs(nVMs, cluster.getVoldPrefix(), cluster.getVoldImage(), cluster.getVoldFlavor()); // wait to finish creation cluster.waitCreating(); // then wait a bit more for voldemort & os // FIXME: wait for things to finish try { Thread.sleep(2*60*1000); } catch (InterruptedException e) { e.printStackTrace(); } } // 3 - start rebalancing // cluster.getSync().reset(); cluster.genCluster(cluster.getActiveVoldVMsCount()); // new cluster to move to //for (int i = 0; i < 5; i++) { // try rebalancing x times FIXME:This is not needed now after updating the rebalance script long rebStart = System.nanoTime(); try { Process p=null; p=Runtime.getRuntime().exec("./myrebalance"); p.waitFor(); } catch (IOException e) { log.error("Rebalance didn't work!"); log.error(e.getMessage()); } catch (InterruptedException e) { log.error("Rebalance didn't work!"); log.error(e.getMessage()); } long rebEnd = System.nanoTime(); long rebTime = (rebEnd-rebStart)/1000000000; // in seconds log.info("Rebalance finished in {} sec", rebTime); // if(rebTime > 30) {// if takes less that 30 secs then probably it failed! // break; // } // try { // Thread.sleep(1000); // } catch (InterruptedException e) { // e.printStackTrace(); // } //} } else if (nVMs < 0 && cluster.getActiveVoldVMsCount() > voldMin) { //remove only if I have more than 3 nodes // 1 - Bound nVMs if((cluster.getActiveVoldVMsCount() + nVMs)<voldMin) { // note that nVMs is negative, remove nVMs = voldMin - cluster.getActiveVoldVMsCount(); } if(limit && nVMs < -1*voldDeltaMax) { // FIXME: check if good idea nVMs = -1*voldDeltaMax; // good to have lower bound } if(nVMs>=0) { // will never be >0 but might equal 0 setRebalancing(false); return; } cluster.setActiveVoldVMsCount(cluster.getActiveVoldVMsCount()+nVMs);; // note that nVMs is negative // 2 - start rebalancing // App.http("reset", "1"); // App.updateVMs(); cluster.genCluster(cluster.getActiveVoldVMsCount()); // new cluster to move to // for (int i = 0; i < 5; i++) { // try rebalancing x times FIXME:This is not needed now after updating the rebalance script long rebStart = System.nanoTime(); try { Process p=null; p=Runtime.getRuntime().exec("./myrebalance"); p.waitFor(); } catch (IOException e) { log.error("Rebalance didn't work!"); log.error(e.getMessage()); } catch (InterruptedException e) { log.error("Rebalance didn't work!"); log.error(e.getMessage()); } long rebEnd = System.nanoTime(); long rebTime = (rebEnd-rebStart)/1000000000; // in seconds System.out.println("Rebalance finished in " + rebTime + " sec"); // if(rebTime > 30) {// if takes less that 30 secs then probably it failed! // break; // } // try { // Thread.sleep(1000); // } catch (InterruptedException e) { // e.printStackTrace(); // } // } // 3 - Delete extra VMs if(createVMs) { for (int i = cluster.getActiveVoldVMsCount()-nVMs-1; i >= cluster.getActiveVoldVMsCount(); i--) { // nVMs is negative cluster.deleteVMs(cluster.getVoldPrefix()+i); try { Thread.sleep(1000); //don't delete too fast! maybe things will crash } catch (InterruptedException e) { e.printStackTrace(); } } } } // FIXME: Wait for system to settle after rebalance try { Thread.sleep(120*1000); } catch (InterruptedException e) { e.printStackTrace(); } } setRebalancing(false); } /** * Process the current time step * * @see cloud.elasticity.elastman.Steppable#step() */ public synchronized void step() { if(rebalancing || queue.isEmpty()) { // if no rebalance tasks then return // if a rebalance is ongoing then no need to start a // new instance as the current instance // will take care of tasks in the queue return; } new Thread(this).start(); } /** * @return */ public boolean isCreateVMs() { return createVMs; } /** * @param createVMs */ public void setCreateVMs(boolean createVMs) { this.createVMs = createVMs; } class Task { private int nVMs; private boolean limit; public Task(int nVMs, boolean limit) { this.nVMs = nVMs; this.limit = limit; } public boolean getLimit() { return limit; } public int getnVMs() { return nVMs; } } }
// Get the score of a Bigram. If the Bigram was // not found, the score will be 0. func (b *Bigrams) ScoreForBigram(other Bigram) float64 { score, has := b.data[other.GetKey()] if !has { return 0 } return score }
<gh_stars>100-1000 // Copyright 2021 Northern.tech AS // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package test import ( "bytes" "crypto/tls" "encoding/json" "io" "io/ioutil" "net/http" "net/http/httptest" "net/url" "reflect" "strconv" "strings" "github.com/mendersoftware/mender/app/updatecontrolmap" "github.com/mendersoftware/mender/client" "github.com/mendersoftware/mender/datastore" log "github.com/sirupsen/logrus" ) type updateType struct { Has bool Data datastore.UpdateInfo Unauthorized bool Called bool Current *client.CurrentUpdate ControlMap *updatecontrolmap.UpdateControlMap } type updateDownloadType struct { Called bool Data bytes.Buffer } type authType struct { Authorize bool Token []byte Called bool Verify bool } type statusType struct { Status string Aborted bool Called bool } type logType struct { Called bool Logs []byte } type inventoryType struct { Called bool Attrs []client.InventoryAttribute } type ClientTestServer struct { *httptest.Server Update updateType UpdateDownload updateDownloadType Auth authType Status statusType Log logType Inventory inventoryType } type Options struct { // TLSConfig specifies an optional tls.Config to use on // ClientTestServer.ServeTLS. TLSConfig *tls.Config } func NewClientTestServer(options ...Options) *ClientTestServer { var opts Options for _, opt := range options { if opt.TLSConfig != nil { opts.TLSConfig = opt.TLSConfig } } cts := &ClientTestServer{} mux := http.NewServeMux() mux.HandleFunc("/api/devices/v1/authentication/auth_requests", cts.authReq) mux.HandleFunc("/api/devices/v1/inventory/device/attributes", cts.inventoryReq) mux.HandleFunc("/api/devices/v1/deployments/device/deployments/next", cts.updateReq) // mux.HandleFunc("/api/devices/v1/deployments/device/deployments/%s/log", cts.logReq) // mux.HandleFunc("/api/devices/v1/deployments/device/deployments/%s/status", cts.statusReq) mux.HandleFunc("/api/devices/v1/deployments/device/deployments/", cts.deploymentsReq) mux.HandleFunc("/api/devices/v1/download", cts.updateDownloadReq) mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { log.Infof("fallback request handler, request %v", r) w.WriteHeader(http.StatusBadRequest) }) cts.Server = httptest.NewUnstartedServer(mux) if opts.TLSConfig != nil { cts.Server.TLS = opts.TLSConfig cts.Server.StartTLS() } else { cts.Server.Start() } return cts } func writeJSON(out io.Writer, data interface{}) error { enc := json.NewEncoder(out) return enc.Encode(data) } func fromJSON(in io.Reader, data interface{}) error { dec := json.NewDecoder(in) return dec.Decode(data) } func (cts *ClientTestServer) Reset() { cts.Update = updateType{} cts.UpdateDownload = updateDownloadType{} cts.Auth = authType{} cts.Log = logType{} cts.Inventory = inventoryType{} cts.Status = statusType{} } func isMethod(method string, w http.ResponseWriter, r *http.Request) bool { if r.Method != method { log.Errorf("method verification failed, expected %v got %v", method, r.Method) w.WriteHeader(http.StatusMethodNotAllowed) return false } return true } func isContentType(ct string, w http.ResponseWriter, r *http.Request) bool { rct := r.Header.Get("Content-Type") if ct != rct { log.Errorf("content-type verification failed, expected %v got %v", ct, rct) w.WriteHeader(http.StatusUnsupportedMediaType) return false } return true } // verifyAuth checks that client is authorized and returns false if not. // ClientTestServer.Auth.Verify must be true for verification to take place. // Client token must match ClientTestServer.Auth.Token. func (cts *ClientTestServer) verifyAuth(w http.ResponseWriter, r *http.Request) bool { if cts.Auth.Verify { hv := r.Header.Get("Authorization") if hv == "" { log.Errorf("no authorization header") w.WriteHeader(http.StatusUnauthorized) return false } if !strings.HasPrefix(hv, "Bearer ") { log.Errorf("bad authorization value: %v", hv) w.WriteHeader(http.StatusUnauthorized) return false } s := strings.SplitN(hv, " ", 2) tok := s[1] if !bytes.Equal(cts.Auth.Token, []byte(tok)) { log.Errorf("bad token, got %s expected %s", hv, cts.Auth.Token) w.WriteHeader(http.StatusUnauthorized) return false } } return true } func (cts *ClientTestServer) authReq(w http.ResponseWriter, r *http.Request) { log.Infof("got auth request %v", r) cts.Auth.Called = true if !isMethod(http.MethodPost, w, r) { return } if !isContentType("application/json", w, r) { return } if cts.Auth.Authorize { w.WriteHeader(http.StatusOK) if cts.Auth.Token != nil { w.Header().Set("Content-Type", "text/plain") w.Write(cts.Auth.Token) } } else { w.WriteHeader(http.StatusUnauthorized) } } func (cts *ClientTestServer) inventoryReq(w http.ResponseWriter, r *http.Request) { log.Infof("got inventory request %v", r) cts.Inventory.Called = true if !isMethod(http.MethodPut, w, r) { return } if !isContentType("application/json", w, r) { return } if !cts.verifyAuth(w, r) { return } var attrs []client.InventoryAttribute if err := fromJSON(r.Body, &attrs); err != nil { log.Errorf("failed to parse attrs data: %v", err) w.WriteHeader(http.StatusBadRequest) return } log.Infof("got attrs: %v", attrs) cts.Inventory.Attrs = attrs w.WriteHeader(http.StatusOK) } func (cts *ClientTestServer) deploymentsReq(w http.ResponseWriter, r *http.Request) { log.Infof("got deployments log/status request %v", r) p := r.URL.Path s := strings.TrimPrefix(p, "/api/devices/v1/deployments/device/deployments/") if s == p { // unchanged, was no prefix? w.WriteHeader(http.StatusBadRequest) return } log.Infof("request for %v", s) idwhat := strings.SplitN(s, "/", 2) id := idwhat[0] what := idwhat[1] switch { case what == "log": cts.logReq(w, r, id) case what == "status": cts.statusReq(w, r, id) default: w.WriteHeader(http.StatusBadRequest) } } func (cts *ClientTestServer) logReq(w http.ResponseWriter, r *http.Request, id string) { log.Infof("got log request deployment ID: %v, %v", id, r) cts.Log.Called = true if !isMethod(http.MethodPut, w, r) { return } if !isContentType("application/json", w, r) { return } if !cts.verifyAuth(w, r) { return } logs, err := ioutil.ReadAll(r.Body) if err != nil { log.Errorf("error when receiving logs: %v", err) w.WriteHeader(http.StatusBadRequest) return } log.Infof("got logs: %v", logs) cts.Log.Logs = logs w.WriteHeader(http.StatusNoContent) } func (cts *ClientTestServer) statusReq(w http.ResponseWriter, r *http.Request, id string) { log.Infof("got status request deployment ID: %v, %v", id, r) cts.Status.Called = true if !isMethod(http.MethodPut, w, r) { return } if !isContentType("application/json", w, r) { return } if !cts.verifyAuth(w, r) { return } if cts.Status.Aborted { w.WriteHeader(http.StatusConflict) return } var report client.StatusReport if err := fromJSON(r.Body, &report); err != nil { log.Errorf("failed to parse status data: %v", err) w.WriteHeader(http.StatusBadRequest) return } cts.Status.Status = report.Status w.WriteHeader(http.StatusNoContent) } func urlQueryToCurrentUpdate(vals url.Values) client.CurrentUpdate { cur := client.CurrentUpdate{ Artifact: vals.Get("artifact_name"), DeviceType: vals.Get("device_type"), } return cur } func (cts *ClientTestServer) updateReq(w http.ResponseWriter, r *http.Request) { var ok bool var current client.CurrentUpdate log.Infof("got update request %v", r) cts.Update.Called = true // Enterprise client device provides post is not supported yet if r.Method == "POST" { if !cts.verifyAuth(w, r) { return } body, err := ioutil.ReadAll(r.Body) if err != nil { w.WriteHeader(500) w.Write([]byte(err.Error())) return } err = json.Unmarshal(body, &current) if err != nil { w.WriteHeader(400) w.Write([]byte(err.Error())) return } if current.Artifact, ok = current. Provides["artifact_name"]; !ok { w.WriteHeader(400) w.Write([]byte("artifact_name missing from payload")) return } if current.DeviceType, ok = current. Provides["device_type"]; ok { w.WriteHeader(400) w.Write([]byte("device_type missing from payload")) return } if !reflect.DeepEqual(current, *cts.Update.Current) { log.Errorf("incorrect current update info, got %+v, expected %+v", current, *cts.Update.Current) w.WriteHeader(http.StatusBadRequest) return } } else if !isMethod(http.MethodGet, w, r) { return } else { if !cts.verifyAuth(w, r) { return } log.Infof("Valid update request GET: %v", r) log.Infof("parsed URL query: %v", r.URL.Query()) if current := urlQueryToCurrentUpdate(r.URL.Query()); !reflect.DeepEqual(current, *cts.Update.Current) { log.Errorf("incorrect current update info, got %+v, expected %+v", current, *cts.Update.Current) w.WriteHeader(http.StatusBadRequest) return } } switch { case cts.Update.Unauthorized: w.WriteHeader(http.StatusUnauthorized) case !cts.Update.Has: w.WriteHeader(http.StatusNoContent) case cts.Update.Has: w.WriteHeader(http.StatusOK) if cts.Update.Data.ID == "" { cts.Update.Data.ID = "foo" } if cts.Update.Data.ArtifactName() == "" { cts.Update.Data.Artifact.ArtifactName = "foo" } if cts.Update.Data.URI() == "" { cts.Update.Data.Artifact.Source.URI = cts.URL + "/download" } if len(cts.Update.Data.Artifact.CompatibleDevices) == 0 { cts.Update.Data.Artifact.CompatibleDevices = []string{"vexpress"} } var ud struct { *datastore.UpdateInfo ControlMap *updatecontrolmap.UpdateControlMap `json:"update_control_map"` } ud.UpdateInfo = &cts.Update.Data if cts.Update.ControlMap != nil { ud.ControlMap = cts.Update.ControlMap } w.Header().Set("Content-Type", "application/json") writeJSON(w, &ud) default: log.Errorf("Unrecognized update status: %v", cts.Update) } } func (cts *ClientTestServer) updateDownloadReq(w http.ResponseWriter, r *http.Request) { log.Infof("got update download request %v", r) cts.UpdateDownload.Called = true if !isMethod(http.MethodGet, w, r) { return } // fetch should not carry Authorization header hv := r.Header.Get("Authorization") if hv != "" { w.WriteHeader(http.StatusBadRequest) } w.Header().Set("Content-Length", strconv.Itoa(cts.UpdateDownload.Data.Len())) w.Header().Set("Content-Type", "application/octet-stream") w.WriteHeader(http.StatusOK) io.Copy(w, &cts.UpdateDownload.Data) }
def resolve_mark_agree(mark, lex): if mark.text == lex.debug["ana"]: a=5 if mark.head.morph not in ["","_"]: mark.agree_certainty = "head_morph" return [mark.head.morph] else: if mark.form == "pronoun": if mark.text in lex.pronouns: return lex.pronouns[mark.text] elif mark.text.lower() in lex.pronouns: return lex.pronouns[mark.text.lower()] if mark.form == "proper": if mark.core_text in lex.names: return [lex.names[mark.core_text]] elif mark.core_text in lex.first_names and mark.core_text not in lex.entities and mark.core_text not in lex.entity_heads: return [lex.first_names[mark.core_text]] if mark.head.pos in lex.pos_agree_mappings: mark.agree_certainty = "pos_agree_mappings" return [lex.pos_agree_mappings[mark.head.pos]] elif mark.core_text in lex.entities: for full_entry in lex.entities[mark.core_text]: entry = full_entry.split("\t")[1] if "/" in entry: if mark.agree == "": mark.agree = entry[entry.find("/") + 1:] mark.alt_agree.append(entry[entry.find("/") + 1:]) elif mark.head.text in lex.entity_heads: for full_entry in lex.entity_heads[mark.head.text]: entry = full_entry.split("\t")[1] if "/" in entry: if mark.agree == "": mark.agree = entry[entry.find("/") + 1:] mark.alt_agree.append(entry[entry.find("/") + 1:])
<reponame>Leehaeun0/react-analytics-provider import * as faker from 'faker'; import * as initUtils from '../../../src/utils/googleAnalytics/initialize'; const SCRIPT_ID = 'ga-gtag'; describe('googleAnalyticsHelper.initialize', () => { const setUp = () => { const trackingId = faker.lorem.word(); const mockDate = faker.datatype.datetime(); jest.useFakeTimers(); jest.setSystemTime(mockDate); const gtagSpy = jest.spyOn(initUtils, 'gtag'); const getElementByIdSpy = jest.spyOn(document, 'getElementById'); const createElementSpy = jest.spyOn(document, 'createElement'); const insertBeforeSpy = jest.spyOn(document, 'createElement'); const consoleInfoSpy = jest.spyOn(console, 'info'); const consoleWarnSpy = jest.spyOn(console, 'warn'); return { trackingId, mockDate, gtagSpy, getElementByIdSpy, createElementSpy, insertBeforeSpy, consoleInfoSpy, consoleWarnSpy, }; }; test('should return if script element already exists', () => { const {trackingId, getElementByIdSpy, createElementSpy} = setUp(); getElementByIdSpy.mockReturnValue({innerHTML: 'EXIST'} as HTMLElement); initUtils.initialize(trackingId); expect(getElementByIdSpy).toHaveBeenCalledWith(SCRIPT_ID); expect(createElementSpy).toBeCalledTimes(0); }); test('should create correct script element', () => { const {trackingId, getElementByIdSpy, insertBeforeSpy, gtagSpy, consoleInfoSpy, mockDate, createElementSpy} = setUp(); initUtils.initialize(trackingId); expect(getElementByIdSpy).toHaveBeenCalledWith(SCRIPT_ID); expect(createElementSpy).toHaveBeenCalledWith('script'); expect(insertBeforeSpy).toBeCalledTimes(1); expect(gtagSpy).toHaveBeenNthCalledWith(1, 'js', mockDate); expect(gtagSpy).toHaveBeenNthCalledWith(2, 'config', trackingId, undefined); const scriptElement = document.getElementById(SCRIPT_ID) as HTMLScriptElement; scriptElement.addEventListener = jest.fn().mockImplementationOnce((_, callback) => callback()); scriptElement.dispatchEvent(new Event('load')); expect(consoleInfoSpy).toHaveBeenCalledTimes(1); expect(scriptElement.id).toEqual(SCRIPT_ID); expect(scriptElement.type).toEqual('text/javascript'); expect(scriptElement.async).toEqual(true); expect(scriptElement.src).toContain(trackingId); }); test('should warn if failed to create correct script element', () => { const {trackingId, consoleWarnSpy} = setUp(); initUtils.initialize(trackingId); const scriptElement = document.getElementById(SCRIPT_ID) as HTMLScriptElement; scriptElement.addEventListener = jest.fn().mockImplementationOnce((_, callback) => callback()); scriptElement.dispatchEvent(new Event('error')); expect(consoleWarnSpy).toHaveBeenCalledTimes(1); }); });
#include<bits/stdc++.h> using namespace std; #define int long long #define mod 1000000007 #define endl '\n' int a[300005],b[300005][20],c[300005],power[30]; void powerCal() { power[0] = 1; for(int i=1;i<30;i++) { power[i] = power[i-1] * 2; } } int sparshTable(int N) { for(int j=0;j<20;j++) { for(int i=0;i<N;i++) { if(j==0) { b[i][j] = a[i]; } else { if(i+pow(2,j)-1<N) { int t1 = pow(2,j-1); b[i][j] = min(b[i][j-1] , b[i+t1][j-1]); } else break; } } } } int rangeMin(int i,int j) { int t = j+1-i; int l = log2(t); return min(b[i][l],b[j+1-power[l]][l]); } int next(int i,int N) { int l = i; int r = N-1,ans = i; while(l<=r) { int mid = (l+r)/2; if(rangeMin(i,mid)<a[i]) { r = mid-1; } else { ans = mid; l = mid+1; } } return ans; } int previous(int i,int N) { int l = 0; int r = i,ans = i; while(l<=r) { int mid = (l+r)/2; if(rangeMin(mid,i)<a[i]) { l = mid+1; } else { ans = mid; r = mid - 1; } } return ans; } void windowRange(int N) { for(int i=0;i<N;i++) { int p = a[i]; int t1 = next(i,N); int t2 = previous(i,N); // cout<<"("<<t1<<" "<<t2<<" "<<a[i]<<")"; c[a[i]] = max(c[a[i]],t1+1-t2); } } int32_t main() { ios_base::sync_with_stdio(0), cin.tie(0), cout.tie(0); int T=1; cin>>T; powerCal(); // We could have solved this one using stack next greater element to find window size. while(T--) { int N; cin>>N; memset(c,0,sizeof c); for(int i=0;i<N;i++) { cin>>a[i]; } sparshTable(N); windowRange(N); /* for(int i=1;i<=N;i++) { cout<<c[i]<<" "; } */ for(int i=1;i<=N;i++) { c[i] = min(((i-1<=0)?INT_MAX:c[i-1]),c[i]); } string s1; int t = N; for(int i=1;i<=N;i++) { // cout<<c[i]<<" "; if(c[i]>=t) { s1 += '1'; } else { s1 += '0'; } t--; } reverse(s1.begin(),s1.end()); cout<<s1<<endl; // cout<<endl; } return 0; }
Uneasy alliances: managed care plans formed by safety-net providers. Health care providers that have traditionally served the poor are forming their own managed care plans, often in alliance with local safety-net peers. These alliances make it easier to raise needed capital, increase the pool of likely enrollees, and enable plans to benefit from efficiencies of scale. At the same time, however, the alliances often are undermined by conflicts of interest among the different sponsors and between the sponsors and the plan. This paper suggests that these plans are most likely to do well when the state makes special efforts to help and when plans have the leadership and financial reserves to take advantage of their supportive state policies.
<filename>src/java/org/apache/cassandra/utils/btree/Path.java /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cassandra.utils.btree; import java.util.Comparator; import static org.apache.cassandra.utils.btree.BTree.MAX_DEPTH; import static org.apache.cassandra.utils.btree.BTree.getBranchKeyEnd; import static org.apache.cassandra.utils.btree.BTree.getKeyEnd; import static org.apache.cassandra.utils.btree.BTree.getLeafKeyEnd; import static org.apache.cassandra.utils.btree.BTree.isLeaf; /** * An internal class for searching and iterating through a tree. As it traverses the tree, * it adds the nodes visited to a stack. This allows us to backtrack from a child node * to its parent. * * As we navigate the tree, we destructively modify this stack. * * Path is only intended to be used via Cursor. */ class Path { // operations corresponding to the ones in NavigableSet static enum Op { CEIL, // the least element greater than or equal to the given element FLOOR, // the greatest element less than or equal to the given element HIGHER, // the least element strictly greater than the given element LOWER // the greatest element strictly less than the given element } static Path newPath() { // try to encourage stack allocation - probably misguided/unnecessary. but no harm Object[][] path = new Object[MAX_DEPTH][]; byte[] index = new byte[MAX_DEPTH]; return new Path(path, index); } // the path to the searched-for key final Object[][] path; // the index within the node of our path at a given depth final byte[] indexes; // current depth. nothing in path[i] for i > depth is valid. byte depth; Path(Object[][] path, byte[] indexes) { this.path = path; this.indexes = indexes; } /** * Find the provided key in the tree rooted at node, and store the root to it in the path * * @param node the tree to search in * @param comparator the comparator defining the order on the tree * @param target the key to search for * @param mode the type of search to perform * @param forwards if the path should be setup for forward or backward iteration * @param <V> */ <V> void find(Object[] node, Comparator<V> comparator, Object target, Op mode, boolean forwards) { // TODO : should not require parameter 'forwards' - consider modifying index to represent both // child and key position, as opposed to just key position (which necessitates a different value depending // on which direction you're moving in. Prerequisite for making Path public and using to implement general // search depth = -1; while (true) { int keyEnd = getKeyEnd(node); // search for the target in the current node int i = BTree.find(comparator, target, node, 0, keyEnd); if (i >= 0) { // exact match. transform exclusive bounds into the correct index by moving back or forwards one push(node, i); switch (mode) { case HIGHER: successor(); break; case LOWER: predecessor(); } return; } // traverse into the appropriate child if (!isLeaf(node)) { i = -i - 1; push(node, forwards ? i - 1 : i); node = (Object[]) node[keyEnd + i]; continue; } // bottom of the tree and still not found. pick the right index to satisfy Op i = -i - 1; switch (mode) { case FLOOR: case LOWER: i--; } if (i < 0) { push(node, 0); predecessor(); } else if (i >= keyEnd) { push(node, keyEnd - 1); successor(); } else { push(node, i); } return; } } private boolean isRoot() { return depth == 0; } private void pop() { depth--; } Object[] currentNode() { return path[depth]; } byte currentIndex() { return indexes[depth]; } private void push(Object[] node, int index) { path[++depth] = node; indexes[depth] = (byte) index; } void setIndex(int index) { indexes[depth] = (byte) index; } // move to the next key in the tree void successor() { Object[] node = currentNode(); int i = currentIndex(); if (!isLeaf(node)) { // if we're on a key in a branch, we MUST have a descendant either side of us, // so we always go down the left-most child until we hit a leaf node = (Object[]) node[getBranchKeyEnd(node) + i + 1]; while (!isLeaf(node)) { push(node, -1); node = (Object[]) node[getBranchKeyEnd(node)]; } push(node, 0); return; } // if we haven't reached the end of this leaf, just increment our index and return i += 1; if (i < getLeafKeyEnd(node)) { // moved to the next key in the same leaf setIndex(i); return; } // we've reached the end of this leaf, // so go up until we reach something we've not finished visiting while (!isRoot()) { pop(); i = currentIndex() + 1; node = currentNode(); if (i < getKeyEnd(node)) { setIndex(i); return; } } // we've visited the last key in the root node, so we're done setIndex(getKeyEnd(node)); } // move to the previous key in the tree void predecessor() { Object[] node = currentNode(); int i = currentIndex(); if (!isLeaf(node)) { // if we're on a key in a branch, we MUST have a descendant either side of us // so we always go down the right-most child until we hit a leaf node = (Object[]) node[getBranchKeyEnd(node) + i]; while (!isLeaf(node)) { i = getBranchKeyEnd(node); push(node, i); node = (Object[]) node[i * 2]; } push(node, getLeafKeyEnd(node) - 1); return; } // if we haven't reached the beginning of this leaf, just decrement our index and return i -= 1; if (i >= 0) { setIndex(i); return; } // we've reached the beginning of this leaf, // so go up until we reach something we've not finished visiting while (!isRoot()) { pop(); i = currentIndex() - 1; if (i >= 0) { setIndex(i); return; } } // we've visited the last key in the root node, so we're done setIndex(-1); } Object currentKey() { return currentNode()[currentIndex()]; } int compareTo(Path that, boolean forwards) { int d = Math.min(this.depth, that.depth); for (int i = 0; i <= d; i++) { int c = this.indexes[i] - that.indexes[i]; if (c != 0) return c; } // identical indices up to depth, so if somebody is lower depth they are on a later item if iterating forwards // and an earlier item if iterating backwards, as the node at max common depth must be a branch if they are // different depths, and branches that are currently descended into lag the child index they are in when iterating forwards, // i.e. if they are in child 0 they record an index of -1 forwards, or 0 when backwards d = this.depth - that.depth; return forwards ? d : -d; } }
If you ever visit the Magic City you might have toured the Birmingham Museum of Art, or Vulcan, or the Holocaust Education Center. President Trump's budget proposal could affect all of those with plans to slash the National Endowment for the Arts and Humanities. The Director, Gail Andrews, said taxpayers would lose more in culture than money with this plan. Each person at home pays less than a dollar a year to the two agencies in taxes. Andrews said by taking the small investment away from the public you take the chance to learn about the world around you from any of their exhibits. For years, visitors of all ages come to tour the Birmingham Museum of Art to learn a painting's history or cultural significance of a statue. Andrews said federal budget proposals slashing the NEA and NEH concerned her. Andrews told ABC 33/40, "I think we really hold as a core value access to the arts and humanities. As part of core values as human beings." The museum's annual budget is $6.8 million. The NEA funds the Alabama State Council on the Arts. The council then funds the museum: which can vary from $100,000 to $160,000 a year. A major potential loss. "It would have a significant impact," said Andrews. Money which goes to educational programs, school tours and projects for the visually impaired. Andrews worried more about cultural impact. She cited an example of a child. "They were touring the whole museum," said Andrews. "And as he was leaving the whole museum, he looked up at his docent (to say), the teachers, the guides who take children and adults through the museum (then said) 'I didn't know the world is so big'." Nancy Sloan is one of those docents, who volunteers her time to explain the meaning behind the exhibits. Sloan said the thought of a child not having this option breaks her heart. Sloan told ABC 33/40, "I think the arts are very important in the community. I would be very distressed." The budget will not be passed until later this year and some have asked Andrews why can't the museum simply find money from private donors. In fact, private donations make up half of the annual budget. But Andrews makes it clear it will make them struggle mightily. For example the Executive Director to the Alabama Humanities Foundation told us it gives $3,000 to $7,000 a year to the museum. This is funding from AHF which comes from the NEH that would no longer be there. As of now, the art museum remains free to the public.
import { Schema } from '../schema'; import { V1PageCell } from './v1PageCell'; export interface V1UpdatePageCellRequest { /** V1PageCell */ body: V1PageCell; } export declare const v1UpdatePageCellRequestSchema: Schema<V1UpdatePageCellRequest>;