code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
3
942
language
stringclasses
30 values
license
stringclasses
15 values
size
int32
3
1.05M
# Arenaria drypidea Boiss. SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Caryophyllales/Caryophyllaceae/Arenaria/Arenaria drypidea/README.md
Markdown
apache-2.0
174
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (version 1.7.0_45) on Mon Mar 03 10:44:38 EST 2014 --> <title>Uses of Interface org.hibernate.event.spi.PreUpdateEventListener (Hibernate JavaDocs)</title> <meta name="date" content="2014-03-03"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> </head> <body> <script type="text/javascript"><!-- if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Interface org.hibernate.event.spi.PreUpdateEventListener (Hibernate JavaDocs)"; } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar_top"> <!-- --> </a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../org/hibernate/event/spi/PreUpdateEventListener.html" title="interface in org.hibernate.event.spi">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../../../../../overview-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/hibernate/event/spi/class-use/PreUpdateEventListener.html" target="_top">Frames</a></li> <li><a href="PreUpdateEventListener.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Interface org.hibernate.event.spi.PreUpdateEventListener" class="title">Uses of Interface<br>org.hibernate.event.spi.PreUpdateEventListener</h2> </div> <div class="classUseContainer"> <ul class="blockList"> <li class="blockList"> <table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation"> <caption><span>Packages that use <a href="../../../../../org/hibernate/event/spi/PreUpdateEventListener.html" title="interface in org.hibernate.event.spi">PreUpdateEventListener</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Package</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="#org.hibernate.cfg.beanvalidation">org.hibernate.cfg.beanvalidation</a></td> <td class="colLast">&nbsp;</td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="#org.hibernate.event.spi">org.hibernate.event.spi</a></td> <td class="colLast">&nbsp;</td> </tr> <tr class="altColor"> <td class="colFirst"><a href="#org.hibernate.secure.internal">org.hibernate.secure.internal</a></td> <td class="colLast">&nbsp;</td> </tr> </tbody> </table> </li> <li class="blockList"> <ul class="blockList"> <li class="blockList"><a name="org.hibernate.cfg.beanvalidation"> <!-- --> </a> <h3>Uses of <a href="../../../../../org/hibernate/event/spi/PreUpdateEventListener.html" title="interface in org.hibernate.event.spi">PreUpdateEventListener</a> in <a href="../../../../../org/hibernate/cfg/beanvalidation/package-summary.html">org.hibernate.cfg.beanvalidation</a></h3> <table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing classes, and an explanation"> <caption><span>Classes in <a href="../../../../../org/hibernate/cfg/beanvalidation/package-summary.html">org.hibernate.cfg.beanvalidation</a> that implement <a href="../../../../../org/hibernate/event/spi/PreUpdateEventListener.html" title="interface in org.hibernate.event.spi">PreUpdateEventListener</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Class and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><code>class&nbsp;</code></td> <td class="colLast"><code><strong><a href="../../../../../org/hibernate/cfg/beanvalidation/BeanValidationEventListener.html" title="class in org.hibernate.cfg.beanvalidation">BeanValidationEventListener</a></strong></code> <div class="block"><div class="paragraph"></div> </td> </tr> </tbody> </table> </li> <li class="blockList"><a name="org.hibernate.event.spi"> <!-- --> </a> <h3>Uses of <a href="../../../../../org/hibernate/event/spi/PreUpdateEventListener.html" title="interface in org.hibernate.event.spi">PreUpdateEventListener</a> in <a href="../../../../../org/hibernate/event/spi/package-summary.html">org.hibernate.event.spi</a></h3> <table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing fields, and an explanation"> <caption><span>Fields in <a href="../../../../../org/hibernate/event/spi/package-summary.html">org.hibernate.event.spi</a> with type parameters of type <a href="../../../../../org/hibernate/event/spi/PreUpdateEventListener.html" title="interface in org.hibernate.event.spi">PreUpdateEventListener</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Field and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><code>static <a href="../../../../../org/hibernate/event/spi/EventType.html" title="class in org.hibernate.event.spi">EventType</a>&lt;<a href="../../../../../org/hibernate/event/spi/PreUpdateEventListener.html" title="interface in org.hibernate.event.spi">PreUpdateEventListener</a>&gt;</code></td> <td class="colLast"><span class="strong">EventType.</span><code><strong><a href="../../../../../org/hibernate/event/spi/EventType.html#PRE_UPDATE">PRE_UPDATE</a></strong></code>&nbsp;</td> </tr> </tbody> </table> </li> <li class="blockList"><a name="org.hibernate.secure.internal"> <!-- --> </a> <h3>Uses of <a href="../../../../../org/hibernate/event/spi/PreUpdateEventListener.html" title="interface in org.hibernate.event.spi">PreUpdateEventListener</a> in <a href="../../../../../org/hibernate/secure/internal/package-summary.html">org.hibernate.secure.internal</a></h3> <table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing classes, and an explanation"> <caption><span>Classes in <a href="../../../../../org/hibernate/secure/internal/package-summary.html">org.hibernate.secure.internal</a> that implement <a href="../../../../../org/hibernate/event/spi/PreUpdateEventListener.html" title="interface in org.hibernate.event.spi">PreUpdateEventListener</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Class and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><code>class&nbsp;</code></td> <td class="colLast"><code><strong><a href="../../../../../org/hibernate/secure/internal/JaccPreUpdateEventListener.html" title="class in org.hibernate.secure.internal">JaccPreUpdateEventListener</a></strong></code> <div class="block"><div class="paragraph"></div> </td> </tr> </tbody> </table> </li> </ul> </li> </ul> </div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar_bottom"> <!-- --> </a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../org/hibernate/event/spi/PreUpdateEventListener.html" title="interface in org.hibernate.event.spi">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../../../../../overview-tree.html">Tree</a></li> <li><a href="../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../index-all.html">Index</a></li> <li><a href="../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../index.html?org/hibernate/event/spi/class-use/PreUpdateEventListener.html" target="_top">Frames</a></li> <li><a href="PreUpdateEventListener.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &copy; 2001-2014 <a href="http://redhat.com">Red Hat, Inc.</a> All Rights Reserved.</small></p> </body> </html>
serious6/HibernateSimpleProject
javadoc/hibernate_Doc/org/hibernate/event/spi/class-use/PreUpdateEventListener.html
HTML
apache-2.0
9,634
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.vcs; import com.intellij.execution.ui.ConsoleView; import com.intellij.execution.ui.ConsoleViewContentType; import com.intellij.util.containers.ContainerUtil; import consulo.util.lang.Pair; import consulo.util.lang.StringUtil; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.Collections; import java.util.List; public final class VcsConsoleLine { private final List<Pair<String, ConsoleViewContentType>> myChunks; private VcsConsoleLine(@Nonnull List<Pair<String, ConsoleViewContentType>> chunks) { myChunks = chunks; } public void print(@Nonnull ConsoleView console) { ConsoleViewContentType lastType = ConsoleViewContentType.NORMAL_OUTPUT; for (Pair<String, ConsoleViewContentType> chunk : myChunks) { console.print(chunk.first, chunk.second); lastType = chunk.second; } console.print("\n", lastType); } @Nullable public static VcsConsoleLine create(@Nullable String message, @Nonnull ConsoleViewContentType contentType) { return create(Collections.singletonList(Pair.create(message, contentType))); } @Nullable public static VcsConsoleLine create(@Nonnull List<Pair<String, ConsoleViewContentType>> lineChunks) { List<Pair<String, ConsoleViewContentType>> chunks = ContainerUtil.filter(lineChunks, it -> !StringUtil.isEmptyOrSpaces(it.first)); if (chunks.isEmpty()) return null; return new VcsConsoleLine(chunks); } }
consulo/consulo
modules/base/vcs-api/src/main/java/com/intellij/openapi/vcs/VcsConsoleLine.java
Java
apache-2.0
1,604
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.brixcms.web.nodepage; import org.apache.wicket.IRequestTarget; import org.apache.wicket.Page; import org.apache.wicket.PageParameters; import org.apache.wicket.RequestCycle; import org.apache.wicket.model.IModel; import org.apache.wicket.request.target.component.IPageRequestTarget; import org.apache.wicket.util.lang.Objects; import org.apache.wicket.util.string.StringValue; import org.brixcms.exception.BrixException; import org.brixcms.jcr.wrapper.BrixNode; import java.io.Serializable; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.TreeSet; public class BrixPageParameters implements Serializable { // ------------------------------ FIELDS ------------------------------ private static final long serialVersionUID = 1L; private List<String> indexedParameters = null; ; private List<QueryStringParameter> queryStringParameters = null; // -------------------------- STATIC METHODS -------------------------- public static boolean equals(BrixPageParameters p1, BrixPageParameters p2) { if (Objects.equal(p1, p2)) { return true; } if (p1 == null && p2.getIndexedParamsCount() == 0 && p2.getQueryParamKeys().isEmpty()) { return true; } if (p2 == null && p1.getIndexedParamsCount() == 0 && p1.getQueryParamKeys().isEmpty()) { return true; } return false; } public int getIndexedParamsCount() { return indexedParameters != null ? indexedParameters.size() : 0; } public static BrixPageParameters getCurrent() { IRequestTarget target = RequestCycle.get().getRequestTarget(); // this is required for getting current page parameters from page constructor // (the actual page instance is not constructed yet. if (target instanceof PageParametersRequestTarget) { return ((PageParametersRequestTarget) target).getPageParameters(); } else { return getCurrentPage().getBrixPageParameters(); } } // --------------------------- CONSTRUCTORS --------------------------- public BrixPageParameters() { } public BrixPageParameters(PageParameters params) { if (params != null) { for (String name : params.keySet()) { addQueryParam(name, params.get(name)); } } } public void addQueryParam(String name, Object value) { addQueryParam(name, value, -1); } public BrixPageParameters(BrixPageParameters copy) { if (copy == null) { throw new IllegalArgumentException("Copy argument may not be null."); } if (copy.indexedParameters != null) this.indexedParameters = new ArrayList<String>(copy.indexedParameters); if (copy.queryStringParameters != null) this.queryStringParameters = new ArrayList<QueryStringParameter>( copy.queryStringParameters); } // ------------------------ CANONICAL METHODS ------------------------ @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof BrixPageParameters == false) { return false; } BrixPageParameters rhs = (BrixPageParameters) obj; if (!Objects.equal(indexedParameters, rhs.indexedParameters)) { return false; } if (queryStringParameters == null || rhs.queryStringParameters == null) { return rhs.queryStringParameters == queryStringParameters; } if (queryStringParameters.size() != rhs.queryStringParameters.size()) { return false; } for (String key : getQueryParamKeys()) { List<StringValue> values1 = getQueryParams(key); Set<String> v1 = new TreeSet<String>(); List<StringValue> values2 = rhs.getQueryParams(key); Set<String> v2 = new TreeSet<String>(); for (StringValue sv : values1) { v1.add(sv.toString()); } for (StringValue sv : values2) { v2.add(sv.toString()); } if (v1.equals(v2) == false) { return false; } } return true; } public Set<String> getQueryParamKeys() { if (queryStringParameters == null || queryStringParameters.isEmpty()) { return Collections.emptySet(); } Set<String> set = new TreeSet<String>(); for (QueryStringParameter entry : queryStringParameters) { set.add(entry.key); } return Collections.unmodifiableSet(set); } public List<StringValue> getQueryParams(String name) { if (name == null) { throw new IllegalArgumentException("Parameter name may not be null."); } if (queryStringParameters != null) { List<StringValue> result = new ArrayList<StringValue>(); for (QueryStringParameter entry : queryStringParameters) { if (entry.key.equals(name)) { result.add(StringValue.valueOf(entry.value)); } } return Collections.unmodifiableList(result); } else { return Collections.emptyList(); } } // -------------------------- OTHER METHODS -------------------------- public void addQueryParam(String name, Object value, int index) { if (name == null) { throw new IllegalArgumentException("Parameter name may not be null."); } if (value == null) { throw new IllegalArgumentException("Parameter value may not be null."); } if (queryStringParameters == null) queryStringParameters = new ArrayList<QueryStringParameter>(1); QueryStringParameter entry = new QueryStringParameter(name, value.toString()); if (index == -1) queryStringParameters.add(entry); else queryStringParameters.add(index, entry); } void assign(BrixPageParameters other) { if (this != other) { this.indexedParameters = other.indexedParameters; this.queryStringParameters = other.queryStringParameters; } } public void clearIndexedParams() { this.indexedParameters = null; } public void clearQueryParams() { this.queryStringParameters = null; } public StringValue getIndexedParam(int index) { if (indexedParameters != null) { if (index >= 0 && index < indexedParameters.size()) { String value = indexedParameters.get(index); return StringValue.valueOf(value); } } return StringValue.valueOf((String) null); } public StringValue getQueryParam(String name) { if (name == null) { throw new IllegalArgumentException("Parameter name may not be null."); } if (queryStringParameters != null) { for (QueryStringParameter entry : queryStringParameters) { if (entry.key.equals(name)) { return StringValue.valueOf(entry.value); } } } return StringValue.valueOf((String) null); } public List<QueryStringParameter> getQueryStringParams() { if (queryStringParameters == null) { return Collections.emptyList(); } else { return Collections.unmodifiableList(new ArrayList<QueryStringParameter>( queryStringParameters)); } } ; public void removeIndexedParam(int index) { if (indexedParameters != null) { if (index >= 0 && index < indexedParameters.size()) { indexedParameters.remove(index); } } } public void setIndexedParam(int index, Object object) { if (indexedParameters == null) indexedParameters = new ArrayList<String>(index); for (int i = indexedParameters.size(); i <= index; ++i) { indexedParameters.add(null); } String value = object != null ? object.toString() : null; indexedParameters.set(index, value); } public void setQueryParam(String name, Object value) { setQueryParam(name, value, -1); } public void setQueryParam(String name, Object value, int index) { removeQueryParam(name); if (value != null) { addQueryParam(name, value); } } public void removeQueryParam(String name) { if (name == null) { throw new IllegalArgumentException("Parameter name may not be null."); } if (queryStringParameters != null) { for (Iterator<QueryStringParameter> i = queryStringParameters.iterator(); i.hasNext();) { QueryStringParameter e = i.next(); if (e.key.equals(name)) { i.remove(); } } } } public String toCallbackURL() { return urlFor(getCurrentPage()); } /** * Constructs a url to the specified page appending these page parameters * * @param page * @return url */ public String urlFor(BrixNodeWebPage page) { IRequestTarget target = new BrixNodeRequestTarget(page, this); return RequestCycle.get().urlFor(target).toString(); } static BrixNodeWebPage getCurrentPage() { IRequestTarget target = RequestCycle.get().getRequestTarget(); BrixNodeWebPage page = null; if (target != null && target instanceof IPageRequestTarget) { Page p = ((IPageRequestTarget) target).getPage(); if (p instanceof BrixNodeWebPage) { page = (BrixNodeWebPage) p; } } if (page == null) { throw new BrixException( "Couldn't obtain the BrixNodeWebPage instance from RequestTarget."); } return page; } /** * Constructs a url to the specified page appending these page parameters * * @param * @return url */ public String urlFor(IModel<BrixNode> node) { IRequestTarget target = new BrixNodeRequestTarget(node, this); return RequestCycle.get().urlFor(target).toString(); } // -------------------------- INNER CLASSES -------------------------- public static class QueryStringParameter implements Serializable { private static final long serialVersionUID = 1L; private final String key; private final String value; public QueryStringParameter(String key, String value) { this.key = key; this.value = value; } public String getKey() { return key; } public String getValue() { return value; } } }
kbachl/brix-cms-backup
brix-core/src/main/java/org/brixcms/web/nodepage/BrixPageParameters.java
Java
apache-2.0
11,587
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.test.recovery; import org.apache.flink.api.common.JobID; import org.apache.flink.api.common.functions.MapFunction; import org.apache.flink.api.common.restartstrategy.RestartStrategies; import org.apache.flink.api.java.ExecutionEnvironment; import org.apache.flink.api.java.io.DiscardingOutputFormat; import org.apache.flink.client.program.ProgramInvocationException; import org.apache.flink.configuration.AkkaOptions; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.JobManagerOptions; import org.apache.flink.runtime.akka.AkkaUtils; import org.apache.flink.runtime.client.JobStatusMessage; import org.apache.flink.runtime.highavailability.HighAvailabilityServices; import org.apache.flink.runtime.highavailability.HighAvailabilityServicesUtils; import org.apache.flink.runtime.jobmanager.JobManager; import org.apache.flink.runtime.jobmanager.MemoryArchivist; import org.apache.flink.runtime.messages.JobManagerMessages; import org.apache.flink.runtime.metrics.NoOpMetricRegistry; import org.apache.flink.runtime.testingUtils.TestingUtils; import org.apache.flink.runtime.testutils.CommonTestUtils; import org.apache.flink.util.NetUtils; import org.apache.flink.util.TestLogger; import akka.actor.ActorRef; import akka.actor.ActorSystem; import akka.pattern.Patterns; import akka.util.Timeout; import org.junit.Test; import java.io.File; import java.io.StringWriter; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import scala.Option; import scala.Some; import scala.Tuple2; import scala.concurrent.Await; import scala.concurrent.Future; import scala.concurrent.duration.FiniteDuration; import static org.apache.flink.runtime.testutils.CommonTestUtils.getCurrentClasspath; import static org.apache.flink.runtime.testutils.CommonTestUtils.getJavaCommandPath; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * This test makes sure that jobs are canceled properly in cases where * the task manager went down and did not respond to cancel messages. */ @SuppressWarnings("serial") public class ProcessFailureCancelingITCase extends TestLogger { @Test public void testCancelingOnProcessFailure() throws Exception { final StringWriter processOutput = new StringWriter(); ActorSystem jmActorSystem = null; Process taskManagerProcess = null; HighAvailabilityServices highAvailabilityServices = null; try { // check that we run this test only if the java command // is available on this machine String javaCommand = getJavaCommandPath(); if (javaCommand == null) { System.out.println("---- Skipping Process Failure test : Could not find java executable ----"); return; } // create a logging file for the process File tempLogFile = File.createTempFile(getClass().getSimpleName() + "-", "-log4j.properties"); tempLogFile.deleteOnExit(); CommonTestUtils.printLog4jDebugConfig(tempLogFile); // find a free port to start the JobManager final int jobManagerPort = NetUtils.getAvailablePort(); // start a JobManager Tuple2<String, Object> localAddress = new Tuple2<String, Object>("localhost", jobManagerPort); Configuration jmConfig = new Configuration(); jmConfig.setString(AkkaOptions.WATCH_HEARTBEAT_INTERVAL, "5 s"); jmConfig.setString(AkkaOptions.WATCH_HEARTBEAT_PAUSE, "2000 s"); jmConfig.setInteger(AkkaOptions.WATCH_THRESHOLD, 10); jmConfig.setString(AkkaOptions.ASK_TIMEOUT, "100 s"); jmConfig.setString(JobManagerOptions.ADDRESS, localAddress._1()); jmConfig.setInteger(JobManagerOptions.PORT, jobManagerPort); highAvailabilityServices = HighAvailabilityServicesUtils.createHighAvailabilityServices( jmConfig, TestingUtils.defaultExecutor(), HighAvailabilityServicesUtils.AddressResolution.NO_ADDRESS_RESOLUTION); jmActorSystem = AkkaUtils.createActorSystem(jmConfig, new Some<>(localAddress)); ActorRef jmActor = JobManager.startJobManagerActors( jmConfig, jmActorSystem, TestingUtils.defaultExecutor(), TestingUtils.defaultExecutor(), highAvailabilityServices, new NoOpMetricRegistry(), Option.empty(), JobManager.class, MemoryArchivist.class)._1(); // the TaskManager java command String[] command = new String[] { javaCommand, "-Dlog.level=DEBUG", "-Dlog4j.configuration=file:" + tempLogFile.getAbsolutePath(), "-Xms80m", "-Xmx80m", "-classpath", getCurrentClasspath(), AbstractTaskManagerProcessFailureRecoveryTest.TaskManagerProcessEntryPoint.class.getName(), String.valueOf(jobManagerPort) }; // start the first two TaskManager processes taskManagerProcess = new ProcessBuilder(command).start(); new CommonTestUtils.PipeForwarder(taskManagerProcess.getErrorStream(), processOutput); // we wait for the JobManager to have the two TaskManagers available // since some of the CI environments are very hostile, we need to give this a lot of time (2 minutes) waitUntilNumTaskManagersAreRegistered(jmActor, 1, 120000); final Throwable[] errorRef = new Throwable[1]; // start the test program, which infinitely blocks Runnable programRunner = new Runnable() { @Override public void run() { try { ExecutionEnvironment env = ExecutionEnvironment.createRemoteEnvironment("localhost", jobManagerPort); env.setParallelism(2); env.setRestartStrategy(RestartStrategies.noRestart()); env.getConfig().disableSysoutLogging(); env.generateSequence(0, Long.MAX_VALUE) .map(new MapFunction<Long, Long>() { @Override public Long map(Long value) throws Exception { synchronized (this) { wait(); } return 0L; } }) .output(new DiscardingOutputFormat<Long>()); env.execute(); } catch (Throwable t) { errorRef[0] = t; } } }; Thread programThread = new Thread(programRunner); // kill the TaskManager taskManagerProcess.destroy(); taskManagerProcess = null; // immediately submit the job. this should hit the case // where the JobManager still thinks it has the TaskManager and tries to send it tasks programThread.start(); // try to cancel the job cancelRunningJob(jmActor); // we should see a failure within reasonable time (10s is the ask timeout). // since the CI environment is often slow, we conservatively give it up to 2 minutes, // to fail, which is much lower than the failure time given by the heartbeats ( > 2000s) programThread.join(120000); assertFalse("The program did not cancel in time (2 minutes)", programThread.isAlive()); Throwable error = errorRef[0]; assertNotNull("The program did not fail properly", error); assertTrue(error instanceof ProgramInvocationException); // all seems well :-) } catch (Exception e) { printProcessLog("TaskManager", processOutput.toString()); throw e; } catch (Error e) { printProcessLog("TaskManager 1", processOutput.toString()); throw e; } finally { if (taskManagerProcess != null) { taskManagerProcess.destroy(); } if (jmActorSystem != null) { jmActorSystem.shutdown(); } if (highAvailabilityServices != null) { highAvailabilityServices.closeAndCleanupAllData(); } } } private void cancelRunningJob(ActorRef jobManager) throws Exception { final FiniteDuration askTimeout = new FiniteDuration(10, TimeUnit.SECONDS); // try at most for 30 seconds final long deadline = System.currentTimeMillis() + 30000; JobID jobId = null; do { Future<Object> response = Patterns.ask(jobManager, JobManagerMessages.getRequestRunningJobsStatus(), new Timeout(askTimeout)); Object result; try { result = Await.result(response, askTimeout); } catch (Exception e) { throw new Exception("Could not retrieve running jobs from the JobManager.", e); } if (result instanceof JobManagerMessages.RunningJobsStatus) { List<JobStatusMessage> jobs = ((JobManagerMessages.RunningJobsStatus) result).getStatusMessages(); if (jobs.size() == 1) { jobId = jobs.get(0).getJobId(); break; } } } while (System.currentTimeMillis() < deadline); if (jobId == null) { // we never found it running, must have failed already return; } // tell the JobManager to cancel the job jobManager.tell( new JobManagerMessages.LeaderSessionMessage( HighAvailabilityServices.DEFAULT_LEADER_ID, new JobManagerMessages.CancelJob(jobId)), ActorRef.noSender()); } private void waitUntilNumTaskManagersAreRegistered(ActorRef jobManager, int numExpected, long maxDelay) throws Exception { final long deadline = System.currentTimeMillis() + maxDelay; while (true) { long remaining = deadline - System.currentTimeMillis(); if (remaining <= 0) { fail("The TaskManagers did not register within the expected time (" + maxDelay + "msecs)"); } FiniteDuration timeout = new FiniteDuration(remaining, TimeUnit.MILLISECONDS); try { Future<?> result = Patterns.ask(jobManager, JobManagerMessages.getRequestNumberRegisteredTaskManager(), new Timeout(timeout)); Integer numTMs = (Integer) Await.result(result, timeout); if (numTMs == numExpected) { break; } } catch (TimeoutException e) { // ignore and retry } catch (ClassCastException e) { fail("Wrong response: " + e.getMessage()); } } } private void printProcessLog(String processName, String log) { if (log == null || log.length() == 0) { return; } System.out.println("-----------------------------------------"); System.out.println(" BEGIN SPAWNED PROCESS LOG FOR " + processName); System.out.println("-----------------------------------------"); System.out.println(log); System.out.println("-----------------------------------------"); System.out.println(" END SPAWNED PROCESS LOG"); System.out.println("-----------------------------------------"); } }
zimmermatt/flink
flink-tests/src/test/java/org/apache/flink/test/recovery/ProcessFailureCancelingITCase.java
Java
apache-2.0
10,977
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2011.09.09 at 01:22:27 PM CEST // package test; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.XmlValue; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;attribute name="content-type" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" /> * &lt;attribute name="seq" type="{http://www.w3.org/2001/XMLSchema}anySimpleType" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "content" }) @XmlRootElement(name = "fpage") public class Fpage { @XmlValue protected String content; @XmlAttribute(name = "content-type") @XmlSchemaType(name = "anySimpleType") protected String contentType; @XmlAttribute @XmlSchemaType(name = "anySimpleType") protected String seq; /** * Gets the value of the content property. * * @return * possible object is * {@link String } * */ public String getContent() { return content; } /** * Sets the value of the content property. * * @param value * allowed object is * {@link String } * */ public void setContent(String value) { this.content = value; } /** * Gets the value of the contentType property. * * @return * possible object is * {@link String } * */ public String getContentType() { return contentType; } /** * Sets the value of the contentType property. * * @param value * allowed object is * {@link String } * */ public void setContentType(String value) { this.contentType = value; } /** * Gets the value of the seq property. * * @return * possible object is * {@link String } * */ public String getSeq() { return seq; } /** * Sets the value of the seq property. * * @param value * allowed object is * {@link String } * */ public void setSeq(String value) { this.seq = value; } }
BlueBrain/bluima
modules/bluima_xml/src/test/Fpage.java
Java
apache-2.0
3,031
'use strict'; const Task = require('co-task'); const sql = require('../api/helpers/sql'); module.exports = { up: function (queryInterface, Sequelize) { return Task.spawn(function* () { yield queryInterface.addColumn('ClassicSalads', 'ClassicSaladCatagoryId', Sequelize.INTEGER); yield sql.foreignKeyUp(queryInterface, 'ClassicSalads', 'ClassicSaladCatagoryId', 'ClassicSaladCatagories', 'id'); }); }, down: function (queryInterface, Sequelize) { return Task.spawn(function* () { yield sql.foreignKeyDown(queryInterface, 'ClassicSalads', 'ClassicSaladCatagoryId', 'ClassicSaladCatagories', 'id'); yield queryInterface.removeColumn('ClassicSalads', 'ClassicSaladCatagoryId'); }); } };
arcana261/SaladioBackEnd
migrations/20161221201621-associate-classicsaladcatagory-and-classicsalad.js
JavaScript
apache-2.0
734
<div class="navbar-default sidebar" role="navigation"> <div class="sidebar-nav navbar-collapse"> <ul class="nav in" id="side-menu"> <li> &nbsp; </li> <li ng-class="{active: collapseVar==key}" ng-repeat="(key, value) in doc.data"> <a href="" ng-click="check(key)"><i class="fa fa-info-circle fa-fw"></i> {{key}}<span class="fa arrow"></span></a> <ul class="nav nav-second-level" collapse="collapseVar!=key"> <li ng-repeat="(key2, value2) in value" ng-show="key2 != 'info'"> <a href="/#/dashboard/doc/{{docapp.id}}/api/{{value2.info.id}}">{{key2}}</a> </li> </ul> </li> </ul> </div> </div>
nearform/sentinel-angular
scripts/directives/sidebardoc/sidebardoc.html
HTML
apache-2.0
792
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.oozie.action.hadoop; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.oozie.action.ActionExecutorException; import org.apache.oozie.util.XLog; import org.jdom.Element; import org.jdom.Namespace; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.List; public abstract class ScriptLanguageActionExecutor extends JavaActionExecutor { public ScriptLanguageActionExecutor(String type) { super(type); } @Override public List<Class<?>> getLauncherClasses() { return null; } protected boolean shouldAddScriptToCache(){ return true; } @Override protected Configuration setupLauncherConf(Configuration conf, Element actionXml, Path appPath, Context context) throws ActionExecutorException { super.setupLauncherConf(conf, actionXml, appPath, context); if(shouldAddScriptToCache()) { addScriptToCache(conf, actionXml, appPath, context); } return conf; } protected void addScriptToCache(Configuration conf, Element actionXml, Path appPath, Context context) throws ActionExecutorException { Namespace ns = actionXml.getNamespace(); String script = actionXml.getChild("script", ns).getTextTrim(); String name = new Path(script).getName(); String scriptContent = context.getProtoActionConf().get(this.getScriptName()); Path scriptFile = null; if (scriptContent != null) { // Create script on filesystem if this is // an http submission job; FSDataOutputStream dos = null; try { Path actionPath = context.getActionDir(); scriptFile = new Path(actionPath, script); FileSystem fs = context.getAppFileSystem(); dos = fs.create(scriptFile); dos.write(scriptContent.getBytes(StandardCharsets.UTF_8)); addToCache(conf, actionPath, script + "#" + name, false); } catch (Exception ex) { throw new ActionExecutorException(ActionExecutorException.ErrorType.ERROR, "FAILED_OPERATION", XLog .format("Not able to write script file {0} on hdfs", scriptFile), ex); } finally { try { if (dos != null) { dos.close(); } } catch (IOException ex) { XLog.getLog(getClass()).error("Error: " + ex.getMessage()); } } } else { addToCache(conf, appPath, script + "#" + name, false); } } protected abstract String getScriptName(); }
cbaenziger/oozie
core/src/main/java/org/apache/oozie/action/hadoop/ScriptLanguageActionExecutor.java
Java
apache-2.0
3,702
# ---------------------------------------------------------------------------- # # Package : filebeat # Version : 7.2.0 # Source repo : https://github.com/elastic/beats.git # Tested on : RHEL 7.5 # Script License: Apache License Version 2.0 # Maintainer : Edmond Chan <[email protected]> # # Disclaimer: This script has been tested in root mode on given # ========== platform using the mentioned version of the package. # It may not work as expected with newer versions of the # package and/or distribution. In such case, please # contact "Maintainer" of this script. # # ---------------------------------------------------------------------------- #!/bin/bash # install dependencies yum install golang make git mkdir go export GOPATH=~/go mkdir -p ${GOPATH}/src/github.com/elastic cd ${GOPATH}/src/github.com/elastic git clone https://github.com/elastic/beats.git cd $GOPATH/src/github.com/elastic/beats/filebeat git checkout v7.2.0 make
ppc64le/build-scripts
f/filebeat/filebeat_7.2.0_redhat_7.sh
Shell
apache-2.0
1,000
// Code generated - DO NOT EDIT. package topology import ( "github.com/skydive-project/skydive/graffiti/getter" "strings" ) func (obj *NextHop) GetFieldBool(key string) (bool, error) { return false, getter.ErrFieldNotFound } func (obj *NextHop) GetFieldInt64(key string) (int64, error) { switch key { case "Priority": return int64(obj.Priority), nil case "IfIndex": return int64(obj.IfIndex), nil } return 0, getter.ErrFieldNotFound } func (obj *NextHop) GetFieldString(key string) (string, error) { switch key { case "IP": return obj.IP.String(), nil case "MAC": return string(obj.MAC), nil } return "", getter.ErrFieldNotFound } func (obj *NextHop) GetFieldKeys() []string { return []string{ "Priority", "IP", "MAC", "IfIndex", } } func (obj *NextHop) MatchBool(key string, predicate getter.BoolPredicate) bool { return false } func (obj *NextHop) MatchInt64(key string, predicate getter.Int64Predicate) bool { if b, err := obj.GetFieldInt64(key); err == nil { return predicate(b) } return false } func (obj *NextHop) MatchString(key string, predicate getter.StringPredicate) bool { if b, err := obj.GetFieldString(key); err == nil { return predicate(b) } return false } func (obj *NextHop) GetField(key string) (interface{}, error) { if s, err := obj.GetFieldString(key); err == nil { return s, nil } if i, err := obj.GetFieldInt64(key); err == nil { return i, nil } return nil, getter.ErrFieldNotFound } func init() { strings.Index("", ".") }
skydive-project/skydive
topology/nexthop_gendecoder.go
GO
apache-2.0
1,511
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.lang.ant.config.execution; import com.intellij.execution.filters.Filter; import com.intellij.execution.filters.OpenFileHyperlinkInfo; import com.intellij.execution.filters.TextConsoleBuilder; import com.intellij.execution.filters.TextConsoleBuilderFactory; import com.intellij.execution.process.ProcessHandler; import com.intellij.execution.process.ProcessOutputTypes; import com.intellij.execution.ui.ConsoleView; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.util.Key; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.io.File; import java.io.OutputStream; public final class PlainTextView implements AntOutputView { private final ConsoleView myConsole; private final Project myProject; private String myCommandLine; private final LightProcessHandler myProcessHandler = new LightProcessHandler(); public PlainTextView(Project project) { myProject = project; TextConsoleBuilder builder = TextConsoleBuilderFactory.getInstance().createBuilder(project); builder.addFilter(new AntMessageFilter()); builder.addFilter(new JUnitFilter()); myConsole = builder.getConsole(); myConsole.attachToProcess(myProcessHandler); } public void dispose() { Disposer.dispose(myConsole); } @Override public String getId() { return "_text_view_"; } @Override public JComponent getComponent() { return myConsole.getComponent(); } @Override @Nullable public Object addMessage(AntMessage message) { print(message.getText() + "\n", ProcessOutputTypes.STDOUT); return null; } private void print(String text, Key type) { myProcessHandler.notifyTextAvailable(text, type); } public void addMessages(AntMessage[] messages) { for (AntMessage message : messages) { addMessage(message); } } @Override public void addJavacMessage(AntMessage message, String url) { if (message.getLine() > 0) { String msg = TreeView.printMessage(message, url); print(msg, ProcessOutputTypes.STDOUT); } print(message.getText(), ProcessOutputTypes.STDOUT); } @Override public void addException(AntMessage exception, boolean showFullTrace) { String text = exception.getText(); if (!showFullTrace) { int index = text.indexOf("\r\n"); if (index != -1) { text = text.substring(0, index) + "\n"; } } print(text, ProcessOutputTypes.STDOUT); } public void clearAllMessages() { myConsole.clear(); } @Override public void startBuild(AntMessage message) { print(myCommandLine + "\n", ProcessOutputTypes.SYSTEM); addMessage(message); } @Override public void buildFailed(AntMessage message) { print(myCommandLine + "\n", ProcessOutputTypes.SYSTEM); addMessage(message); } @Override public void startTarget(AntMessage message) { addMessage(message); } @Override public void startTask(AntMessage message) { addMessage(message); } @Override public void finishBuild(String messageText) { print("\n" + messageText + "\n", ProcessOutputTypes.SYSTEM); } @Override public void finishTarget() { } @Override public void finishTask() { } @Override @Nullable public Object getData(@NotNull String dataId) { return null; } public void setBuildCommandLine(String commandLine) { myCommandLine = commandLine; } private final class JUnitFilter implements Filter { @Override @Nullable public Result applyFilter(String line, int entireLength) { HyperlinkUtil.PlaceInfo placeInfo = HyperlinkUtil.parseJUnitMessage(myProject, line); if (placeInfo == null) { return null; } int textStartOffset = entireLength - line.length(); int highlightStartOffset = textStartOffset + placeInfo.getLinkStartIndex(); int highlightEndOffset = textStartOffset + placeInfo.getLinkEndIndex() + 1; OpenFileHyperlinkInfo info = new OpenFileHyperlinkInfo(myProject, placeInfo.getFile(), placeInfo.getLine(), placeInfo.getColumn()); return new Result(highlightStartOffset, highlightEndOffset, info); } } private final class AntMessageFilter implements Filter { @Override public Result applyFilter(String line, int entireLength) { int afterLineNumberIndex = line.indexOf(": "); // end of file_name_and_line_number sequence if (afterLineNumberIndex == -1) { return null; } String fileAndLineNumber = line.substring(0, afterLineNumberIndex); int index = fileAndLineNumber.lastIndexOf(':'); if (index == -1) { return null; } final String fileName = fileAndLineNumber.substring(0, index); String lineNumberStr = fileAndLineNumber.substring(index + 1).trim(); int lineNumber; try { lineNumber = Integer.parseInt(lineNumberStr); } catch (NumberFormatException e) { return null; } final VirtualFile file = LocalFileSystem.getInstance().findFileByPath(fileName.replace(File.separatorChar, '/')); if (file == null) { return null; } int textStartOffset = entireLength - line.length(); int highlightEndOffset = textStartOffset + afterLineNumberIndex; OpenFileHyperlinkInfo info = new OpenFileHyperlinkInfo(myProject, file, lineNumber - 1); return new Result(textStartOffset, highlightEndOffset, info); } } private static class LightProcessHandler extends ProcessHandler { @Override protected void destroyProcessImpl() { throw new UnsupportedOperationException(); } @Override protected void detachProcessImpl() { throw new UnsupportedOperationException(); } @Override public boolean detachIsDefault() { return false; } @Override @Nullable public OutputStream getProcessInput() { return null; } } }
mdanielwork/intellij-community
plugins/ant/src/com/intellij/lang/ant/config/execution/PlainTextView.java
Java
apache-2.0
6,686
/* Copyright 2012-2022 Marco De Salvo Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ using RDFSharp.Model; using RDFSharp.Semantics.OWL; using System; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; namespace RDFSharp.Semantics.SKOS { /// <summary> /// RDFSKOSConceptScheme represents an instance of skos:ConceptScheme within an ontology data. /// </summary> public class RDFSKOSConceptScheme : RDFOntologyFact, IEnumerable<RDFSKOSConcept> { #region Properties /// <summary> /// Count of the concepts composing the scheme /// </summary> public long ConceptsCount => this.Concepts.Count; /// <summary> /// Count of the collections composing the scheme /// </summary> public long CollectionsCount => this.Collections.Count; /// <summary> /// Count of the ordered collections composing the scheme /// </summary> public long OrderedCollectionsCount => this.OrderedCollections.Count; /// <summary> /// Count of the labels composing the scheme /// </summary> public long LabelsCount => this.Labels.Count; /// <summary> /// Gets the enumerator on the concepts of the scheme for iteration /// </summary> public IEnumerator<RDFSKOSConcept> ConceptsEnumerator => this.Concepts.Values.GetEnumerator(); /// <summary> /// Gets the enumerator on the collections of the scheme for iteration /// </summary> public IEnumerator<RDFSKOSCollection> CollectionsEnumerator => this.Collections.Values.GetEnumerator(); /// <summary> /// Gets the enumerator on the ordered collections of the scheme for iteration /// </summary> public IEnumerator<RDFSKOSOrderedCollection> OrderedCollectionsEnumerator => this.OrderedCollections.Values.GetEnumerator(); /// <summary> /// Gets the enumerator on the labels of the scheme for iteration /// </summary> public IEnumerator<RDFSKOSLabel> LabelsEnumerator => this.Labels.Values.GetEnumerator(); /// <summary> /// Annotations describing concepts of the scheme /// </summary> public RDFSKOSAnnotations Annotations { get; internal set; } /// <summary> /// Relations describing concepts of the scheme /// </summary> public RDFSKOSRelations Relations { get; internal set; } /// <summary> /// Concepts contained in the scheme (encodes the 'skos:inScheme' relation) /// </summary> internal Dictionary<long, RDFSKOSConcept> Concepts { get; set; } /// <summary> /// Collections contained in the scheme /// </summary> internal Dictionary<long, RDFSKOSCollection> Collections { get; set; } /// <summary> /// OrderedCollections contained in the scheme /// </summary> internal Dictionary<long, RDFSKOSOrderedCollection> OrderedCollections { get; set; } /// <summary> /// Labels contained in the scheme /// </summary> internal Dictionary<long, RDFSKOSLabel> Labels { get; set; } #endregion #region Ctors /// <summary> /// Default-ctor to build a conceptScheme with the given name /// </summary> public RDFSKOSConceptScheme(RDFResource conceptName) : base(conceptName) { this.Concepts = new Dictionary<long, RDFSKOSConcept>(); this.Collections = new Dictionary<long, RDFSKOSCollection>(); this.OrderedCollections = new Dictionary<long, RDFSKOSOrderedCollection>(); this.Labels = new Dictionary<long, RDFSKOSLabel>(); this.Annotations = new RDFSKOSAnnotations(); this.Relations = new RDFSKOSRelations(); } #endregion #region Interfaces /// <summary> /// Exposes a typed enumerator on the scheme's concepts /// </summary> IEnumerator<RDFSKOSConcept> IEnumerable<RDFSKOSConcept>.GetEnumerator() => this.ConceptsEnumerator; /// <summary> /// Exposes an untyped enumerator on the scheme's concepts /// </summary> IEnumerator IEnumerable.GetEnumerator() => this.ConceptsEnumerator; #endregion #region Methods #region Add /// <summary> /// Adds the given concept to the scheme /// </summary> public RDFSKOSConceptScheme AddConcept(RDFSKOSConcept concept) { if (concept != null) { if (!this.Concepts.ContainsKey(concept.PatternMemberID)) this.Concepts.Add(concept.PatternMemberID, concept); } return this; } /// <summary> /// Adds the given collection to the scheme /// </summary> public RDFSKOSConceptScheme AddCollection(RDFSKOSCollection collection) { if (collection != null) { if (!this.Collections.ContainsKey(collection.PatternMemberID)) { this.Collections.Add(collection.PatternMemberID, collection); //Also add concepts of the collection foreach (var cn in collection.Concepts.Values) this.AddConcept(cn); //Also adds collections of the collection foreach (var cl in collection.Collections.Values) this.AddCollection(cl); } } return this; } /// <summary> /// Adds the given ordered collection to the scheme /// </summary> public RDFSKOSConceptScheme AddOrderedCollection(RDFSKOSOrderedCollection orderedCollection) { if (orderedCollection != null) { if (!this.OrderedCollections.ContainsKey(orderedCollection.PatternMemberID)) { this.OrderedCollections.Add(orderedCollection.PatternMemberID, orderedCollection); //Also add concepts of the ordered collection foreach (var cn in orderedCollection.Concepts.Values.OrderBy(x => x.Item1)) this.AddConcept(cn.Item2); } } return this; } /// <summary> /// Adds the given label to the scheme /// </summary> public RDFSKOSConceptScheme AddLabel(RDFSKOSLabel label) { if (label != null) { if (!this.Labels.ContainsKey(label.PatternMemberID)) this.Labels.Add(label.PatternMemberID, label); } return this; } #endregion #region Remove /// <summary> /// Removes the given concept from the scheme /// </summary> public RDFSKOSConceptScheme RemoveConcept(RDFSKOSConcept concept) { if (concept != null) { if (this.Concepts.ContainsKey(concept.PatternMemberID)) this.Concepts.Remove(concept.PatternMemberID); } return this; } /// <summary> /// Removes the given collection from the scheme /// </summary> public RDFSKOSConceptScheme RemoveCollection(RDFSKOSCollection collection) { if (collection != null) { if (this.Collections.ContainsKey(collection.PatternMemberID)) this.Collections.Remove(collection.PatternMemberID); } return this; } /// <summary> /// Removes the given ordered collection from the scheme /// </summary> public RDFSKOSConceptScheme RemoveOrderedCollection(RDFSKOSOrderedCollection orderedCollection) { if (orderedCollection != null) { if (this.OrderedCollections.ContainsKey(orderedCollection.PatternMemberID)) this.OrderedCollections.Remove(orderedCollection.PatternMemberID); } return this; } /// <summary> /// Removes the given label from the scheme /// </summary> public RDFSKOSConceptScheme RemoveLabel(RDFSKOSLabel label) { if (label != null) { if (this.Labels.ContainsKey(label.PatternMemberID)) this.Labels.Remove(label.PatternMemberID); } return this; } #endregion #region Select /// <summary> /// Selects the concept represented by the given string from the scheme /// </summary> public RDFSKOSConcept SelectConcept(string concept) { if (concept != null) { long conceptID = RDFModelUtilities.CreateHash(concept); if (this.Concepts.ContainsKey(conceptID)) return this.Concepts[conceptID]; } return null; } /// <summary> /// Selects the collection represented by the given string from the scheme /// </summary> public RDFSKOSCollection SelectCollection(string collection) { if (collection != null) { long collectionID = RDFModelUtilities.CreateHash(collection); if (this.Collections.ContainsKey(collectionID)) return this.Collections[collectionID]; } return null; } /// <summary> /// Selects the ordered collection represented by the given string from the scheme /// </summary> public RDFSKOSOrderedCollection SelectOrderedCollection(string orderedCollection) { if (orderedCollection != null) { long orderedCollectionID = RDFModelUtilities.CreateHash(orderedCollection); if (this.OrderedCollections.ContainsKey(orderedCollectionID)) return this.OrderedCollections[orderedCollectionID]; } return null; } /// <summary> /// Selects the label represented by the given string from the scheme /// </summary> public RDFSKOSLabel SelectLabel(string label) { if (label != null) { long labelID = RDFModelUtilities.CreateHash(label); if (this.Labels.ContainsKey(labelID)) return this.Labels[labelID]; } return null; } #endregion #region Set /// <summary> /// Builds a new intersection scheme from this scheme and a given one /// </summary> public RDFSKOSConceptScheme IntersectWith(RDFSKOSConceptScheme conceptScheme) { RDFSKOSConceptScheme result = new RDFSKOSConceptScheme(new RDFResource()); if (conceptScheme != null) { //Add intersection concepts foreach (RDFSKOSConcept c in this) { if (conceptScheme.Concepts.ContainsKey(c.PatternMemberID)) result.AddConcept(c); } //Add intersection collections foreach (RDFSKOSCollection c in this.Collections.Values) { if (conceptScheme.Collections.ContainsKey(c.PatternMemberID)) result.AddCollection(c); } //Add intersection ordered collections foreach (RDFSKOSOrderedCollection o in this.OrderedCollections.Values) { if (conceptScheme.OrderedCollections.ContainsKey(o.PatternMemberID)) result.AddOrderedCollection(o); } //Add intersection labels foreach (RDFSKOSLabel l in this.Labels.Values) { if (conceptScheme.Labels.ContainsKey(l.PatternMemberID)) result.AddLabel(l); } //Add intersection relations result.Relations.TopConcept = this.Relations.TopConcept.IntersectWith(conceptScheme.Relations.TopConcept); result.Relations.Broader = this.Relations.Broader.IntersectWith(conceptScheme.Relations.Broader); result.Relations.BroaderTransitive = this.Relations.BroaderTransitive.IntersectWith(conceptScheme.Relations.BroaderTransitive); result.Relations.BroadMatch = this.Relations.BroadMatch.IntersectWith(conceptScheme.Relations.BroadMatch); result.Relations.Narrower = this.Relations.Narrower.IntersectWith(conceptScheme.Relations.Narrower); result.Relations.NarrowerTransitive = this.Relations.NarrowerTransitive.IntersectWith(conceptScheme.Relations.NarrowerTransitive); result.Relations.NarrowMatch = this.Relations.NarrowMatch.IntersectWith(conceptScheme.Relations.NarrowMatch); result.Relations.Related = this.Relations.Related.IntersectWith(conceptScheme.Relations.Related); result.Relations.RelatedMatch = this.Relations.RelatedMatch.IntersectWith(conceptScheme.Relations.RelatedMatch); result.Relations.SemanticRelation = this.Relations.SemanticRelation.IntersectWith(conceptScheme.Relations.SemanticRelation); result.Relations.MappingRelation = this.Relations.MappingRelation.IntersectWith(conceptScheme.Relations.MappingRelation); result.Relations.CloseMatch = this.Relations.CloseMatch.IntersectWith(conceptScheme.Relations.CloseMatch); result.Relations.ExactMatch = this.Relations.ExactMatch.IntersectWith(conceptScheme.Relations.ExactMatch); result.Relations.Notation = this.Relations.Notation.IntersectWith(conceptScheme.Relations.Notation); result.Relations.PrefLabel = this.Relations.PrefLabel.IntersectWith(conceptScheme.Relations.PrefLabel); result.Relations.AltLabel = this.Relations.AltLabel.IntersectWith(conceptScheme.Relations.AltLabel); result.Relations.HiddenLabel = this.Relations.HiddenLabel.IntersectWith(conceptScheme.Relations.HiddenLabel); result.Relations.LiteralForm = this.Relations.LiteralForm.IntersectWith(conceptScheme.Relations.LiteralForm); result.Relations.LabelRelation = this.Relations.LabelRelation.IntersectWith(conceptScheme.Relations.LabelRelation); //Add intersection annotations result.Annotations.PrefLabel = this.Annotations.PrefLabel.IntersectWith(conceptScheme.Annotations.PrefLabel); result.Annotations.AltLabel = this.Annotations.AltLabel.IntersectWith(conceptScheme.Annotations.AltLabel); result.Annotations.HiddenLabel = this.Annotations.HiddenLabel.IntersectWith(conceptScheme.Annotations.HiddenLabel); result.Annotations.Note = this.Annotations.Note.IntersectWith(conceptScheme.Annotations.Note); result.Annotations.ChangeNote = this.Annotations.ChangeNote.IntersectWith(conceptScheme.Annotations.ChangeNote); result.Annotations.EditorialNote = this.Annotations.EditorialNote.IntersectWith(conceptScheme.Annotations.EditorialNote); result.Annotations.HistoryNote = this.Annotations.HistoryNote.IntersectWith(conceptScheme.Annotations.HistoryNote); result.Annotations.ScopeNote = this.Annotations.ScopeNote.IntersectWith(conceptScheme.Annotations.ScopeNote); result.Annotations.Definition = this.Annotations.Definition.IntersectWith(conceptScheme.Annotations.Definition); result.Annotations.Example = this.Annotations.Example.IntersectWith(conceptScheme.Annotations.Example); } return result; } /// <summary> /// Builds a new union scheme from this scheme and a given one /// </summary> public RDFSKOSConceptScheme UnionWith(RDFSKOSConceptScheme conceptScheme) { RDFSKOSConceptScheme result = new RDFSKOSConceptScheme(new RDFResource()); //Add concepts from this scheme foreach (RDFSKOSConcept c in this) result.AddConcept(c); //Add collections from this scheme foreach (RDFSKOSCollection c in this.Collections.Values) result.AddCollection(c); //Add ordered collections from this scheme foreach (RDFSKOSOrderedCollection o in this.OrderedCollections.Values) result.AddOrderedCollection(o); //Add labels from this scheme foreach (RDFSKOSLabel l in this.Labels.Values) result.AddLabel(l); //Add relations from this scheme result.Relations.TopConcept = result.Relations.TopConcept.UnionWith(this.Relations.TopConcept); result.Relations.Broader = result.Relations.Broader.UnionWith(this.Relations.Broader); result.Relations.BroaderTransitive = result.Relations.BroaderTransitive.UnionWith(this.Relations.BroaderTransitive); result.Relations.BroadMatch = result.Relations.BroadMatch.UnionWith(this.Relations.BroadMatch); result.Relations.Narrower = result.Relations.Narrower.UnionWith(this.Relations.Narrower); result.Relations.NarrowerTransitive = result.Relations.NarrowerTransitive.UnionWith(this.Relations.NarrowerTransitive); result.Relations.NarrowMatch = result.Relations.NarrowMatch.UnionWith(this.Relations.NarrowMatch); result.Relations.Related = result.Relations.Related.UnionWith(this.Relations.Related); result.Relations.RelatedMatch = result.Relations.RelatedMatch.UnionWith(this.Relations.RelatedMatch); result.Relations.SemanticRelation = result.Relations.SemanticRelation.UnionWith(this.Relations.SemanticRelation); result.Relations.MappingRelation = result.Relations.MappingRelation.UnionWith(this.Relations.MappingRelation); result.Relations.CloseMatch = result.Relations.CloseMatch.UnionWith(this.Relations.CloseMatch); result.Relations.ExactMatch = result.Relations.ExactMatch.UnionWith(this.Relations.ExactMatch); result.Relations.Notation = result.Relations.Notation.UnionWith(this.Relations.Notation); result.Relations.PrefLabel = result.Relations.PrefLabel.UnionWith(this.Relations.PrefLabel); result.Relations.AltLabel = result.Relations.AltLabel.UnionWith(this.Relations.AltLabel); result.Relations.HiddenLabel = result.Relations.HiddenLabel.UnionWith(this.Relations.HiddenLabel); result.Relations.LiteralForm = result.Relations.LiteralForm.UnionWith(this.Relations.LiteralForm); result.Relations.LabelRelation = result.Relations.LabelRelation.UnionWith(this.Relations.LabelRelation); //Add annotations from this scheme result.Annotations.PrefLabel = result.Annotations.PrefLabel.UnionWith(this.Annotations.PrefLabel); result.Annotations.AltLabel = result.Annotations.AltLabel.UnionWith(this.Annotations.AltLabel); result.Annotations.HiddenLabel = result.Annotations.HiddenLabel.UnionWith(this.Annotations.HiddenLabel); result.Annotations.Note = result.Annotations.Note.UnionWith(this.Annotations.Note); result.Annotations.ChangeNote = result.Annotations.ChangeNote.UnionWith(this.Annotations.ChangeNote); result.Annotations.EditorialNote = result.Annotations.EditorialNote.UnionWith(this.Annotations.EditorialNote); result.Annotations.HistoryNote = result.Annotations.HistoryNote.UnionWith(this.Annotations.HistoryNote); result.Annotations.ScopeNote = result.Annotations.ScopeNote.UnionWith(this.Annotations.ScopeNote); result.Annotations.Definition = result.Annotations.Definition.UnionWith(this.Annotations.Definition); result.Annotations.Example = result.Annotations.Example.UnionWith(this.Annotations.Example); //Manage the given scheme if (conceptScheme != null) { //Add concepts from the given scheme foreach (RDFSKOSConcept c in conceptScheme) result.AddConcept(c); //Add collections from the given scheme foreach (RDFSKOSCollection c in conceptScheme.Collections.Values) result.AddCollection(c); //Add ordered collections from the given scheme foreach (RDFSKOSOrderedCollection o in conceptScheme.OrderedCollections.Values) result.AddOrderedCollection(o); //Add labels from the given scheme foreach (RDFSKOSLabel l in conceptScheme.Labels.Values) result.AddLabel(l); //Add relations from the given scheme result.Relations.TopConcept = result.Relations.TopConcept.UnionWith(conceptScheme.Relations.TopConcept); result.Relations.Broader = result.Relations.Broader.UnionWith(conceptScheme.Relations.Broader); result.Relations.BroaderTransitive = result.Relations.BroaderTransitive.UnionWith(conceptScheme.Relations.BroaderTransitive); result.Relations.BroadMatch = result.Relations.BroadMatch.UnionWith(conceptScheme.Relations.BroadMatch); result.Relations.Narrower = result.Relations.Narrower.UnionWith(conceptScheme.Relations.Narrower); result.Relations.NarrowerTransitive = result.Relations.NarrowerTransitive.UnionWith(conceptScheme.Relations.NarrowerTransitive); result.Relations.NarrowMatch = result.Relations.NarrowMatch.UnionWith(conceptScheme.Relations.NarrowMatch); result.Relations.Related = result.Relations.Related.UnionWith(conceptScheme.Relations.Related); result.Relations.RelatedMatch = result.Relations.RelatedMatch.UnionWith(conceptScheme.Relations.RelatedMatch); result.Relations.SemanticRelation = result.Relations.SemanticRelation.UnionWith(conceptScheme.Relations.SemanticRelation); result.Relations.MappingRelation = result.Relations.MappingRelation.UnionWith(conceptScheme.Relations.MappingRelation); result.Relations.CloseMatch = result.Relations.CloseMatch.UnionWith(conceptScheme.Relations.CloseMatch); result.Relations.ExactMatch = result.Relations.ExactMatch.UnionWith(conceptScheme.Relations.ExactMatch); result.Relations.Notation = result.Relations.Notation.UnionWith(conceptScheme.Relations.Notation); result.Relations.PrefLabel = result.Relations.PrefLabel.UnionWith(conceptScheme.Relations.PrefLabel); result.Relations.AltLabel = result.Relations.AltLabel.UnionWith(conceptScheme.Relations.AltLabel); result.Relations.HiddenLabel = result.Relations.HiddenLabel.UnionWith(conceptScheme.Relations.HiddenLabel); result.Relations.LiteralForm = result.Relations.LiteralForm.UnionWith(conceptScheme.Relations.LiteralForm); result.Relations.LabelRelation = result.Relations.LabelRelation.UnionWith(conceptScheme.Relations.LabelRelation); //Add annotations from the given scheme result.Annotations.PrefLabel = result.Annotations.PrefLabel.UnionWith(conceptScheme.Annotations.PrefLabel); result.Annotations.AltLabel = result.Annotations.AltLabel.UnionWith(conceptScheme.Annotations.AltLabel); result.Annotations.HiddenLabel = result.Annotations.HiddenLabel.UnionWith(conceptScheme.Annotations.HiddenLabel); result.Annotations.Note = result.Annotations.Note.UnionWith(conceptScheme.Annotations.Note); result.Annotations.ChangeNote = result.Annotations.ChangeNote.UnionWith(conceptScheme.Annotations.ChangeNote); result.Annotations.EditorialNote = result.Annotations.EditorialNote.UnionWith(conceptScheme.Annotations.EditorialNote); result.Annotations.HistoryNote = result.Annotations.HistoryNote.UnionWith(conceptScheme.Annotations.HistoryNote); result.Annotations.ScopeNote = result.Annotations.ScopeNote.UnionWith(conceptScheme.Annotations.ScopeNote); result.Annotations.Definition = result.Annotations.Definition.UnionWith(conceptScheme.Annotations.Definition); result.Annotations.Example = result.Annotations.Example.UnionWith(conceptScheme.Annotations.Example); } return result; } /// <summary> /// Builds a new difference scheme from this scheme and a given one /// </summary> public RDFSKOSConceptScheme DifferenceWith(RDFSKOSConceptScheme conceptScheme) { RDFSKOSConceptScheme result = new RDFSKOSConceptScheme(new RDFResource()); if (conceptScheme != null) { //Add difference concepts foreach (RDFSKOSConcept c in this) { if (!conceptScheme.Concepts.ContainsKey(c.PatternMemberID)) result.AddConcept(c); } //Add difference collections foreach (RDFSKOSCollection c in this.Collections.Values) { if (!conceptScheme.Collections.ContainsKey(c.PatternMemberID)) result.AddCollection(c); } //Add difference ordered collections foreach (RDFSKOSOrderedCollection o in this.OrderedCollections.Values) { if (!conceptScheme.OrderedCollections.ContainsKey(o.PatternMemberID)) result.AddOrderedCollection(o); } //Add difference labels foreach (RDFSKOSLabel l in this.Labels.Values) { if (!conceptScheme.Labels.ContainsKey(l.PatternMemberID)) result.AddLabel(l); } //Add difference relations result.Relations.TopConcept = this.Relations.TopConcept.DifferenceWith(conceptScheme.Relations.TopConcept); result.Relations.Broader = this.Relations.Broader.DifferenceWith(conceptScheme.Relations.Broader); result.Relations.BroaderTransitive = this.Relations.BroaderTransitive.DifferenceWith(conceptScheme.Relations.BroaderTransitive); result.Relations.BroadMatch = this.Relations.BroadMatch.DifferenceWith(conceptScheme.Relations.BroadMatch); result.Relations.Narrower = this.Relations.Narrower.DifferenceWith(conceptScheme.Relations.Narrower); result.Relations.NarrowerTransitive = this.Relations.NarrowerTransitive.DifferenceWith(conceptScheme.Relations.NarrowerTransitive); result.Relations.NarrowMatch = this.Relations.NarrowMatch.DifferenceWith(conceptScheme.Relations.NarrowMatch); result.Relations.Related = this.Relations.Related.DifferenceWith(conceptScheme.Relations.Related); result.Relations.RelatedMatch = this.Relations.RelatedMatch.DifferenceWith(conceptScheme.Relations.RelatedMatch); result.Relations.SemanticRelation = this.Relations.SemanticRelation.DifferenceWith(conceptScheme.Relations.SemanticRelation); result.Relations.MappingRelation = this.Relations.MappingRelation.DifferenceWith(conceptScheme.Relations.MappingRelation); result.Relations.CloseMatch = this.Relations.CloseMatch.DifferenceWith(conceptScheme.Relations.CloseMatch); result.Relations.ExactMatch = this.Relations.ExactMatch.DifferenceWith(conceptScheme.Relations.ExactMatch); result.Relations.Notation = this.Relations.Notation.DifferenceWith(conceptScheme.Relations.Notation); result.Relations.PrefLabel = this.Relations.PrefLabel.DifferenceWith(conceptScheme.Relations.PrefLabel); result.Relations.AltLabel = this.Relations.AltLabel.DifferenceWith(conceptScheme.Relations.AltLabel); result.Relations.HiddenLabel = this.Relations.HiddenLabel.DifferenceWith(conceptScheme.Relations.HiddenLabel); result.Relations.LiteralForm = this.Relations.LiteralForm.DifferenceWith(conceptScheme.Relations.LiteralForm); result.Relations.LabelRelation = this.Relations.LabelRelation.DifferenceWith(conceptScheme.Relations.LabelRelation); //Add difference annotations result.Annotations.PrefLabel = this.Annotations.PrefLabel.DifferenceWith(conceptScheme.Annotations.PrefLabel); result.Annotations.AltLabel = this.Annotations.AltLabel.DifferenceWith(conceptScheme.Annotations.AltLabel); result.Annotations.HiddenLabel = this.Annotations.HiddenLabel.DifferenceWith(conceptScheme.Annotations.HiddenLabel); result.Annotations.Note = this.Annotations.Note.DifferenceWith(conceptScheme.Annotations.Note); result.Annotations.ChangeNote = this.Annotations.ChangeNote.DifferenceWith(conceptScheme.Annotations.ChangeNote); result.Annotations.EditorialNote = this.Annotations.EditorialNote.DifferenceWith(conceptScheme.Annotations.EditorialNote); result.Annotations.HistoryNote = this.Annotations.HistoryNote.DifferenceWith(conceptScheme.Annotations.HistoryNote); result.Annotations.ScopeNote = this.Annotations.ScopeNote.DifferenceWith(conceptScheme.Annotations.ScopeNote); result.Annotations.Definition = this.Annotations.Definition.DifferenceWith(conceptScheme.Annotations.Definition); result.Annotations.Example = this.Annotations.Example.DifferenceWith(conceptScheme.Annotations.Example); } else { //Add concepts from this scheme foreach (RDFSKOSConcept c in this) result.AddConcept(c); //Add collections from this scheme foreach (RDFSKOSCollection c in this.Collections.Values) result.AddCollection(c); //Add ordered collections from this scheme foreach (RDFSKOSOrderedCollection o in this.OrderedCollections.Values) result.AddOrderedCollection(o); //Add labels from this scheme foreach (RDFSKOSLabel l in this.Labels.Values) result.AddLabel(l); //Add relations from this scheme result.Relations.TopConcept = result.Relations.TopConcept.UnionWith(this.Relations.TopConcept); result.Relations.Broader = result.Relations.Broader.UnionWith(this.Relations.Broader); result.Relations.BroaderTransitive = result.Relations.BroaderTransitive.UnionWith(this.Relations.BroaderTransitive); result.Relations.BroadMatch = result.Relations.BroadMatch.UnionWith(this.Relations.BroadMatch); result.Relations.Narrower = result.Relations.Narrower.UnionWith(this.Relations.Narrower); result.Relations.NarrowerTransitive = result.Relations.NarrowerTransitive.UnionWith(this.Relations.NarrowerTransitive); result.Relations.NarrowMatch = result.Relations.NarrowMatch.UnionWith(this.Relations.NarrowMatch); result.Relations.Related = result.Relations.Related.UnionWith(this.Relations.Related); result.Relations.RelatedMatch = result.Relations.RelatedMatch.UnionWith(this.Relations.RelatedMatch); result.Relations.SemanticRelation = result.Relations.SemanticRelation.UnionWith(this.Relations.SemanticRelation); result.Relations.MappingRelation = result.Relations.MappingRelation.UnionWith(this.Relations.MappingRelation); result.Relations.CloseMatch = result.Relations.CloseMatch.UnionWith(this.Relations.CloseMatch); result.Relations.ExactMatch = result.Relations.ExactMatch.UnionWith(this.Relations.ExactMatch); result.Relations.Notation = result.Relations.Notation.UnionWith(this.Relations.Notation); result.Relations.PrefLabel = result.Relations.PrefLabel.UnionWith(this.Relations.PrefLabel); result.Relations.AltLabel = result.Relations.AltLabel.UnionWith(this.Relations.AltLabel); result.Relations.HiddenLabel = result.Relations.HiddenLabel.UnionWith(this.Relations.HiddenLabel); result.Relations.LiteralForm = result.Relations.LiteralForm.UnionWith(this.Relations.LiteralForm); result.Relations.LabelRelation = result.Relations.LabelRelation.UnionWith(this.Relations.LabelRelation); //Add annotations from this scheme result.Annotations.PrefLabel = result.Annotations.PrefLabel.UnionWith(this.Annotations.PrefLabel); result.Annotations.AltLabel = result.Annotations.AltLabel.UnionWith(this.Annotations.AltLabel); result.Annotations.HiddenLabel = result.Annotations.HiddenLabel.UnionWith(this.Annotations.HiddenLabel); result.Annotations.Note = result.Annotations.Note.UnionWith(this.Annotations.Note); result.Annotations.ChangeNote = result.Annotations.ChangeNote.UnionWith(this.Annotations.ChangeNote); result.Annotations.EditorialNote = result.Annotations.EditorialNote.UnionWith(this.Annotations.EditorialNote); result.Annotations.HistoryNote = result.Annotations.HistoryNote.UnionWith(this.Annotations.HistoryNote); result.Annotations.ScopeNote = result.Annotations.ScopeNote.UnionWith(this.Annotations.ScopeNote); result.Annotations.Definition = result.Annotations.Definition.UnionWith(this.Annotations.Definition); result.Annotations.Example = result.Annotations.Example.UnionWith(this.Annotations.Example); } return result; } #endregion #region Convert /// <summary> /// Gets a graph representation of this scheme, exporting inferences according to the selected behavior /// </summary> public RDFGraph ToRDFGraph(RDFSemanticsEnums.RDFOntologyInferenceExportBehavior infexpBehavior) => this.ToRDFOntologyData().ToRDFGraph(infexpBehavior); /// <summary> /// Asynchronously gets a graph representation of this scheme, exporting inferences according to the selected behavior /// </summary> public Task<RDFGraph> ToRDFGraphAsync(RDFSemanticsEnums.RDFOntologyInferenceExportBehavior infexpBehavior) => Task.Run(() => ToRDFGraph(infexpBehavior)); /// <summary> /// Gets an ontology data representation of this scheme /// </summary> public RDFOntologyData ToRDFOntologyData() { RDFOntologyData result = new RDFOntologyData(); //ConceptScheme result.AddFact(this); result.AddClassTypeRelation(this, RDFVocabulary.SKOS.CONCEPT_SCHEME.ToRDFOntologyClass()); //Concepts foreach (RDFSKOSConcept c in this) { result.AddFact(c); result.AddClassTypeRelation(c, RDFVocabulary.SKOS.CONCEPT.ToRDFOntologyClass()); result.AddAssertionRelation(c, RDFVocabulary.SKOS.IN_SCHEME.ToRDFOntologyObjectProperty(), this); } //Collections foreach (RDFSKOSCollection c in this.Collections.Values) { result.AddAssertionRelation(c, RDFVocabulary.SKOS.IN_SCHEME.ToRDFOntologyObjectProperty(), this); result = result.UnionWith(c.ToRDFOntologyData()); } //OrderedCollections foreach (RDFSKOSOrderedCollection o in this.OrderedCollections.Values) { result.AddAssertionRelation(o, RDFVocabulary.SKOS.IN_SCHEME.ToRDFOntologyObjectProperty(), this); result = result.UnionWith(o.ToRDFOntologyData()); } //Labels foreach (RDFSKOSLabel l in this.Labels.Values) { result.AddFact(l); result.AddClassTypeRelation(l, RDFVocabulary.SKOS.SKOSXL.LABEL.ToRDFOntologyClass()); result.AddAssertionRelation(l, RDFVocabulary.SKOS.IN_SCHEME.ToRDFOntologyObjectProperty(), this); } //Assertions result.Relations.Assertions = result.Relations.Assertions.UnionWith(this.Relations.TopConcept) .UnionWith(this.Relations.Broader) .UnionWith(this.Relations.BroaderTransitive) .UnionWith(this.Relations.BroadMatch) .UnionWith(this.Relations.Narrower) .UnionWith(this.Relations.NarrowerTransitive) .UnionWith(this.Relations.NarrowMatch) .UnionWith(this.Relations.Related) .UnionWith(this.Relations.RelatedMatch) .UnionWith(this.Relations.SemanticRelation) .UnionWith(this.Relations.MappingRelation) .UnionWith(this.Relations.CloseMatch) .UnionWith(this.Relations.ExactMatch) .UnionWith(this.Relations.Notation) .UnionWith(this.Relations.PrefLabel) .UnionWith(this.Relations.AltLabel) .UnionWith(this.Relations.HiddenLabel) .UnionWith(this.Relations.LiteralForm) .UnionWith(this.Relations.LabelRelation); //Annotations result.Annotations.CustomAnnotations = result.Annotations.CustomAnnotations.UnionWith(this.Annotations.PrefLabel) .UnionWith(this.Annotations.AltLabel) .UnionWith(this.Annotations.HiddenLabel) .UnionWith(this.Annotations.Note) .UnionWith(this.Annotations.ChangeNote) .UnionWith(this.Annotations.EditorialNote) .UnionWith(this.Annotations.HistoryNote) .UnionWith(this.Annotations.ScopeNote) .UnionWith(this.Annotations.Definition) .UnionWith(this.Annotations.Example); return result; } /// <summary> /// Asynchronously gets an ontology data representation of this scheme /// </summary> public Task<RDFOntologyData> ToRDFOntologyDataAsync() => Task.Run(() => ToRDFOntologyData()); #endregion #endregion } }
mdesalvo/RDFSharp
RDFSharp/Semantics/SKOS/Abstractions/RDFSKOSConceptScheme.cs
C#
apache-2.0
41,109
# frozen_string_literal: true module Beta module Types module Objects class PrefectureType < Beta::Types::Objects::Base implements GraphQL::Types::Relay::Node global_id_field :id field :annict_id, Integer, null: false field :name, String, null: false end end end end
annict/annict
app/graphql/beta/types/objects/prefecture_type.rb
Ruby
apache-2.0
326
/* * Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.orientechnologies.orient.core.command.script; import java.util.Map; import java.util.Map.Entry; import javax.script.Bindings; import javax.script.Invocable; import javax.script.ScriptContext; import javax.script.ScriptEngine; import javax.script.ScriptException; import com.orientechnologies.orient.core.Orient; import com.orientechnologies.orient.core.command.OCommandExecutorAbstract; import com.orientechnologies.orient.core.command.OCommandRequest; import com.orientechnologies.orient.core.db.record.ODatabaseRecordTx; import com.orientechnologies.orient.core.metadata.function.OFunction; /** * Executes Script Commands. * * @see OCommandScript * @author Luca Garulli * */ public class OCommandExecutorFunction extends OCommandExecutorAbstract { protected OCommandFunction request; public OCommandExecutorFunction() { } @SuppressWarnings("unchecked") public OCommandExecutorFunction parse(final OCommandRequest iRequest) { request = (OCommandFunction) iRequest; return this; } public Object execute(final Map<Object, Object> iArgs) { return executeInContext(null, iArgs); } public Object executeInContext(final Map<String, Object> iContext, final Map<Object, Object> iArgs) { parserText = request.getText(); final ODatabaseRecordTx db = (ODatabaseRecordTx) getDatabase(); final OFunction f = db.getMetadata().getFunctionLibrary().getFunction(parserText); final OScriptManager scriptManager = Orient.instance().getScriptManager(); final ScriptEngine scriptEngine = scriptManager.getEngine(f.getLanguage()); final Bindings binding = scriptManager.bind(scriptEngine, db, iContext, iArgs); try { scriptEngine.setBindings(binding, ScriptContext.ENGINE_SCOPE); // COMPILE FUNCTION LIBRARY scriptEngine.eval(scriptManager.getLibrary(db, f.getLanguage())); if (scriptEngine instanceof Invocable) { // INVOKE AS FUNCTION. PARAMS ARE PASSED BY POSITION final Invocable invocableEngine = (Invocable) scriptEngine; Object[] args = null; if (iArgs != null) { args = new Object[iArgs.size()]; int i = 0; for (Entry<Object, Object> arg : iArgs.entrySet()) args[i++] = arg.getValue(); } return invocableEngine.invokeFunction(parserText, args); } else { // INVOKE THE CODE SNIPPET return scriptEngine.eval(invokeFunction(f, iArgs.values().toArray()), binding); } } catch (ScriptException e) { throw new OCommandScriptException("Error on execution of the script", request.getText(), e.getColumnNumber(), e); } catch (NoSuchMethodException e) { throw new OCommandScriptException("Error on execution of the script", request.getText(), 0, e); } finally { scriptManager.unbind(binding); } } public boolean isIdempotent() { return false; } @Override protected void throwSyntaxErrorException(String iText) { throw new OCommandScriptException("Error on execution of the script: " + iText, request.getText(), 0); } protected String invokeFunction(final OFunction f, Object[] iArgs) { final StringBuilder code = new StringBuilder(); code.append(f.getName()); code.append('('); int i = 0; for (Object a : iArgs) { if (i++ > 0) code.append(','); code.append(a); } code.append(");"); return code.toString(); } }
redox/OrientDB
core/src/main/java/com/orientechnologies/orient/core/command/script/OCommandExecutorFunction.java
Java
apache-2.0
4,208
import java.util.Scanner; /** * @author Oleg Cherednik * @since 13.07.2018 */ public class Solution { static int palindromeIndex(String s) { for (int i = 0, j = s.length() - 1; i < j; i++, j--) { if (s.charAt(i) == s.charAt(j)) continue; for (int k = i, m = j - 1; k < m; k++, m--) if (s.charAt(k) != s.charAt(m)) return i; return j; } return -1; } private static final Scanner scanner = new Scanner(System.in); public static void main(String[] args) { int q = scanner.nextInt(); scanner.skip("(\r\n|[\n\r\u2028\u2029\u0085])?"); for (int qItr = 0; qItr < q; qItr++) { String s = scanner.nextLine(); int result = palindromeIndex(s); System.out.println(String.valueOf(result)); } scanner.close(); } }
oleg-cherednik/hackerrank
Algorithms/Strings/Palindrome Index/Solution.java
Java
apache-2.0
921
package de.newsarea.homecockpit.connector.facade.registration.util; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.lang.reflect.Constructor; import java.lang.reflect.Method; import java.net.URL; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; public final class ClassLoaderHelper { private static Logger log = LoggerFactory.getLogger(ClassLoaderHelper.class); private ClassLoaderHelper() { } public static Constructor<?> determineFirstConstructor(Class<?> clazz) { try { for(Constructor<?> constructor : clazz.getConstructors()) { return constructor; } } catch (SecurityException e) { log.error(e.getMessage(), e); } return null; } public static Constructor<?> determineConstructorByArgumentTypes(Class<?> clazz, Class<?>[] argumentTypes) { try { for(Constructor<?> constructor : clazz.getConstructors()) { if(isAssignableFrom(constructor, argumentTypes)) { return constructor; } } } catch (SecurityException e) { log.error(e.getMessage(), e); } return null; } private static boolean isAssignableFrom(Constructor<?> constructor, Class<?>[] argumentTypes) { Class<?>[] constructorArgTypes = constructor.getParameterTypes(); if(constructorArgTypes.length != argumentTypes.length) { return false; } // ~ for(int i=0; i < argumentTypes.length; i++) { if(!argumentTypes[i].isAssignableFrom(constructorArgTypes[i])) { return false; } } return true; } public static List<Class<?>> determineClasses(String packageName) throws ClassNotFoundException, IOException { ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); assert classLoader != null; String path = packageName.replace('.', '/'); Enumeration<URL> resources = classLoader.getResources(path); List<File> dirs = new ArrayList<>(); while (resources.hasMoreElements()) { URL resource = resources.nextElement(); dirs.add(new File(resource.getFile().replaceAll("%20", " "))); } ArrayList<Class<?>> classes = new ArrayList<>(); for (File directory : dirs) { classes.addAll(findClasses(directory, packageName)); } return classes; } public static List<Class<?>> findClasses(File directory, String packageName) throws ClassNotFoundException { List<Class<?>> classes = new ArrayList<>(); if (!directory.exists()) { return classes; } File[] files = directory.listFiles(); for (File file : files) { if (file.isDirectory()) { assert !file.getName().contains("."); classes.addAll(findClasses(file, packageName + "." + file.getName())); } else if (file.getName().endsWith(".class")) { classes.add(Class.forName(packageName + '.' + file.getName().substring(0, file.getName().length() - 6))); } } return classes; } public static Method determineSetterMethod(Class<?> clazz, String name) { for(Method method : clazz.getMethods()) { if(method.getName().equalsIgnoreCase("set" + name)) { return method; } } return null; } }
RBernhardt/homecockpit-connectors
connectors-facade/src/main/java/de/newsarea/homecockpit/connector/facade/registration/util/ClassLoaderHelper.java
Java
apache-2.0
3,378
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * @author Upendra Jariya * @sponsor Douglas Johnson * @version 1.0 * @since 2014-11-10 */ package tools.datasync.utils; import org.apache.commons.codec.digest.DigestUtils; import org.apache.log4j.Logger; import tools.datasync.api.utils.HashGenerator; public class Md5HashGenerator implements HashGenerator { private static Md5HashGenerator instance = null; private static Logger LOG = Logger.getLogger(Md5HashGenerator.class .getName()); private Md5HashGenerator() { } public static synchronized Md5HashGenerator getInstance() { if (instance == null) { instance = new Md5HashGenerator(); } return instance; } public String generate(String data) { try { byte[] digest = DigestUtils.md5(data); return (DigestUtils.md5Hex(digest)); } catch (Exception e) { LOG.warn("Error while generating checksum on value [" + data + "]", e); return null; } } public boolean validate(String data, String hash) { String newHash = generate(data); return newHash.equals(hash); } }
datasynctools/sync-tools-prototype
data-sync-tools-core/src/main/java/tools/datasync/utils/Md5HashGenerator.java
Java
apache-2.0
1,858
package com.github.database.rider.core.script; import org.assertj.core.api.SoftAssertions; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import javax.script.ScriptException; import static org.assertj.core.api.Java6Assertions.assertThat; public class ScriptEngineManagerWrapperTest { @Rule public ExpectedException exceptionRule = ExpectedException.none(); private ScriptEngineManagerWrapper scriptEngineManager = ScriptEngineManagerWrapper.getInstance(); private SoftAssertions softly = new SoftAssertions(); @Before public void init() { softly = new SoftAssertions(); } @Test public void shouldGetJsScriptResult() throws ScriptException { Object scriptResult = ScriptEngineManagerWrapper.getInstance().getScriptResult("js: 1+1"); assertThat(scriptResult).isEqualTo(2); } @Test public void shouldGetGroovyScriptResult() throws ScriptException { Object scriptResult = scriptEngineManager.getScriptResult("groovy: 1+1"); assertThat(scriptResult).isEqualTo(2); } @Test public void shouldNotGetScriptResultFromUnknownEngine() throws ScriptException { exceptionRule.expect(RuntimeException.class); exceptionRule.expectMessage("Could not find script engine by name 'kotlin'"); scriptEngineManager.getScriptResult("kotlin: 1+1"); } @Test public void shouldAssertValueGreaterThanZero() throws ScriptException { String script = "js:(value > 0)"; softly.assertThat(scriptEngineManager.getScriptAssert(script, 2)).as("js script with value=2").isTrue(); softly.assertThat(scriptEngineManager.getScriptAssert(script, 0)).as("js script with value=0").isFalse(); softly.assertThat(scriptEngineManager.getScriptAssert(script, -1)).as("js script with value=-1").isFalse(); script = "groovy:(value > 0)"; softly.assertThat(scriptEngineManager.getScriptAssert(script, 2)).as("groovy script with value=2").isTrue(); softly.assertThat(scriptEngineManager.getScriptAssert(script, 0)).as("groovy script with value=0").isFalse(); softly.assertThat(scriptEngineManager.getScriptAssert(script, -1)).as("groovy script with value=-1").isFalse(); softly.assertAll(); } @Test public void shouldAssertNullValue() throws ScriptException { SoftAssertions soft = new SoftAssertions(); String script = "js:(value == null)"; soft.assertThat(scriptEngineManager.getScriptAssert(script, null)).as("js script with null value").isTrue(); soft.assertThat(scriptEngineManager.getScriptAssert(script, 1)).as("js script with non-null value").isFalse(); script = "groovy:(value == null)"; soft.assertThat(scriptEngineManager.getScriptAssert(script, null)).as("groovy script with null value").isTrue(); soft.assertThat(scriptEngineManager.getScriptAssert(script, 1)).as("groovy script with non-null value").isFalse(); soft.assertAll(); } @Test public void shouldAssertContainsValue() throws ScriptException { SoftAssertions soft = new SoftAssertions(); String script = "js:(value.contains('dbunit'))"; soft.assertThat(scriptEngineManager.getScriptAssert(script, "dbunit rules")).as("js script with 'dbunit rules' value").isTrue(); soft.assertThat(scriptEngineManager.getScriptAssert(script, "database rider rules")).as("js script 'database rider' value").isFalse(); script = "groovy:(value.contains('dbunit'))"; soft.assertThat(scriptEngineManager.getScriptAssert(script, "dbunit rules")).as("groovy script with 'dbunit rules' value").isTrue(); soft.assertThat(scriptEngineManager.getScriptAssert(script, "database rider rules")).as("groovy script 'database rider' value").isFalse(); soft.assertAll(); } @Test public void shouldNotAssertInvalidScript() throws ScriptException { exceptionRule.expect(ScriptException.class); exceptionRule.expectMessage("value.includes is not a function"); String script = "js:(value.includes('dbunit'))"; scriptEngineManager.getScriptAssert(script, "dbunit rules"); } }
database-rider/database-rider
rider-core/src/test/java/com/github/database/rider/core/script/ScriptEngineManagerWrapperTest.java
Java
apache-2.0
4,246
/*global describe, beforeEach, it*/ 'use strict'; var assert = require('yeoman-generator').assert; var helper = require('./helper'); describe('mcap:connections', function () { beforeEach(function (done) { var answers = { name: 'MyApp' }; // Creates a generateor with the default options / arguments helper.createAppGenerator({ answers: answers }, done); }); it('creates expected files', function (done) { var expectedFiles = [ 'connections/sap.json' ]; var expectedContent = { name: 'SAP', description: 'SAP API', type: 'rest', properties: { descriptorUrl: 'http://sap.mway.io', username: 'admin', password: 'root' } }; var answers = { name: 'SAP', description: 'SAP API', descriptorUrl: 'http://sap.mway.io', username: 'admin', password: 'root' }; helper.createSubGenerator('connection', {answers: answers}, function () { assert.file(expectedFiles); helper.deepEqual('connections/sap.json', expectedContent); done(); }); }); });
mwaylabs/mcap-cli
generators/generator-mcap/test/test-connections.js
JavaScript
apache-2.0
1,116
/* * Copyright 2018 Google LLC. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.cloud.tools.jib.api; import java.util.Objects; /** Holds credentials (username and password). */ public class Credential { // If the username is set to <token>, the secret would be a refresh token. // https://github.com/docker/cli/blob/master/docs/reference/commandline/login.md#credential-helper-protocol public static final String OAUTH2_TOKEN_USER_NAME = "<token>"; /** * Gets a {@link Credential} configured with a username and password. * * @param username the username * @param password the password * @return a new {@link Credential} */ public static Credential from(String username, String password) { return new Credential(username, password); } private final String username; private final String password; private Credential(String username, String password) { this.username = username; this.password = password; } /** * Gets the username. * * @return the username */ public String getUsername() { return username; } /** * Gets the password. * * @return the password */ public String getPassword() { return password; } /** * Check whether this credential is an OAuth 2.0 refresh token. * * @return true if this credential is an OAuth 2.0 refresh token. */ public boolean isOAuth2RefreshToken() { return OAUTH2_TOKEN_USER_NAME.equals(username); } @Override public boolean equals(Object other) { if (this == other) { return true; } if (!(other instanceof Credential)) { return false; } Credential otherCredential = (Credential) other; return username.equals(otherCredential.username) && password.equals(otherCredential.password); } @Override public int hashCode() { return Objects.hash(username, password); } @Override public String toString() { return username + ":" + password; } }
GoogleContainerTools/jib
jib-core/src/main/java/com/google/cloud/tools/jib/api/Credential.java
Java
apache-2.0
2,498
package com.github.agourlay.cornichon.steps.wrapped import com.github.agourlay.cornichon.core._ import com.github.agourlay.cornichon.steps.regular.assertStep.{ AssertStep, GenericEqualityAssertion } import com.github.agourlay.cornichon.testHelpers.CommonTestSuite import munit.FunSuite import scala.concurrent.duration._ class WithinStepSpec extends FunSuite with CommonTestSuite { test("controls duration of 'within' wrapped steps") { val d = 50.millis val nested = AssertStep( "possible random value step", _ => { Thread.sleep(10) GenericEqualityAssertion(true, true) } ) :: Nil val withinStep = WithinStep(nested, d) val s = Scenario("scenario with Within", withinStep :: Nil) val res = awaitIO(ScenarioRunner.runScenario(Session.newEmpty)(s)) assert(res.isSuccess) } test("fails if duration of 'within' is exceeded") { val d = 10.millis val nested = AssertStep( "possible random value step", _ => { Thread.sleep(20) GenericEqualityAssertion(true, true) } ) :: Nil val withinStep = WithinStep(nested, d) val s = Scenario("scenario with Within", withinStep :: Nil) val res = awaitIO(ScenarioRunner.runScenario(Session.newEmpty)(s)) assert(!res.isSuccess) } }
agourlay/cornichon
cornichon-core/src/test/scala/com/github/agourlay/cornichon/steps/wrapped/WithinStepSpec.scala
Scala
apache-2.0
1,298
const browserSync = require('../../../'); const utils = require('../utils'); const register = require('../../../dist/plugins/clients').ClientEvents.register; const assert = require('chai').assert; describe('Client connection stream', function () { it('does not have duplicates', function (done) { browserSync.create({}).subscribe(function (bs) { const client = utils.getClientSocket(bs); client.emit(register, utils.getClient('123456')); client.emit(register, utils.getClient('123456')); bs.clients$.skip(1) .take(2) .toArray() .subscribe(function (clients) { assert.equal(clients[0].size, 1); assert.equal(clients[1].size, 1); const jsClients1 = clients[0].toList().toJS(); const jsClients2 = clients[1].toList().toJS(); assert.equal(jsClients1[0].id, '123456'); assert.equal(jsClients2[0].id, '123456'); bs.cleanup(); done(); }, function (err) {done(err)}); }); }); it('allows unique clients', function (done) { browserSync.create({}).subscribe(function (bs) { const client = utils.getClientSocket(bs); client.emit(register, utils.getClient('xyz')); client.emit(register, utils.getClient('zxy')); bs.clients$.skip(1) .take(2) .toArray() .subscribe(function (clients) { assert.equal(clients[0].size, 1); assert.equal(clients[1].size, 2); const jsClients1 = clients[0].toList().toJS(); const jsClients2 = clients[1].toList().toJS(); assert.equal(jsClients1[0].id, 'xyz'); assert.equal(jsClients2[0].id, 'xyz'); assert.equal(jsClients2[1].id, 'zxy'); bs.cleanup(); done(); }, function(err) { done(err) }); }); }); it('allows unique clients (stress)', function (done) { browserSync.create({}).subscribe(function (bs) { for (var i = 1, n = 51; i < n; i += 1) { utils.getClientSocket(bs).emit(register, utils.getClient('id-' + i)); } bs.clients$.skip(1) .take(50) .toArray() .subscribe(function (clients) { assert.equal(clients[49].size, 50); assert.ok(clients[49].get('id-40')); assert.equal(clients[49].get('id-40').get('id'), 'id-40'); bs.cleanup(); done(); }, function(err) { done(err) }); }); }); });
BrowserSync/browser-sync-core
test/mocha/api/clients$.js
JavaScript
apache-2.0
2,869
using Lucene.Net.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Util.Fst { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using DataInput = Lucene.Net.Store.DataInput; using DataOutput = Lucene.Net.Store.DataOutput; /// <summary> /// An FST <see cref="Outputs{T}"/> implementation, holding two other outputs. /// <para/> /// @lucene.experimental /// </summary> public class PairOutputs<A, B> : Outputs<PairOutputs<A, B>.Pair> { private readonly Pair NO_OUTPUT; private readonly Outputs<A> outputs1; private readonly Outputs<B> outputs2; /// <summary> /// Holds a single pair of two outputs. </summary> public class Pair { public A Output1 { get; private set; } public B Output2 { get; private set; } // use newPair internal Pair(A output1, B output2) { this.Output1 = output1; this.Output2 = output2; } public override bool Equals(object other) { // LUCENENET specific - simplified expression return ReferenceEquals(other, this) || (other is Pair pair && Output1.Equals(pair.Output1) && Output2.Equals(pair.Output2)); } public override int GetHashCode() { return Output1.GetHashCode() + Output2.GetHashCode(); } } public PairOutputs(Outputs<A> outputs1, Outputs<B> outputs2) { this.outputs1 = outputs1; this.outputs2 = outputs2; NO_OUTPUT = new Pair(outputs1.NoOutput, outputs2.NoOutput); } /// <summary> /// Create a new <see cref="Pair"/> </summary> public virtual Pair NewPair(A a, B b) { if (a.Equals(outputs1.NoOutput)) { a = outputs1.NoOutput; } if (b.Equals(outputs2.NoOutput)) { b = outputs2.NoOutput; } if (a.Equals(outputs1.NoOutput) && b.Equals(outputs2.NoOutput)) { return NO_OUTPUT; } else { var p = new Pair(a, b); if (Debugging.AssertsEnabled) Debugging.Assert(Valid(p)); return p; } } // for assert private bool Valid(Pair pair) { bool noOutput1 = pair.Output1.Equals(outputs1.NoOutput); bool noOutput2 = pair.Output2.Equals(outputs2.NoOutput); if (noOutput1 && !pair.Output1.Equals(outputs1.NoOutput)) { return false; } if (noOutput2 && !pair.Output2.Equals(outputs2.NoOutput)) { return false; } if (noOutput1 && noOutput2) { if (!pair.Equals(NO_OUTPUT)) { return false; } else { return true; } } else { return true; } } public override Pair Common(Pair pair1, Pair pair2) { if (Debugging.AssertsEnabled) { Debugging.Assert(Valid(pair1)); Debugging.Assert(Valid(pair2)); } return NewPair(outputs1.Common(pair1.Output1, pair2.Output1), outputs2.Common(pair1.Output2, pair2.Output2)); } public override Pair Subtract(Pair output, Pair inc) { if (Debugging.AssertsEnabled) { Debugging.Assert(Valid(output)); Debugging.Assert(Valid(inc)); } return NewPair(outputs1.Subtract(output.Output1, inc.Output1), outputs2.Subtract(output.Output2, inc.Output2)); } public override Pair Add(Pair prefix, Pair output) { if (Debugging.AssertsEnabled) { Debugging.Assert(Valid(prefix)); Debugging.Assert(Valid(output)); } return NewPair(outputs1.Add(prefix.Output1, output.Output1), outputs2.Add(prefix.Output2, output.Output2)); } [MethodImpl(MethodImplOptions.AggressiveInlining)] public override void Write(Pair output, DataOutput writer) { if (Debugging.AssertsEnabled) Debugging.Assert(Valid(output)); outputs1.Write(output.Output1, writer); outputs2.Write(output.Output2, writer); } [MethodImpl(MethodImplOptions.AggressiveInlining)] public override Pair Read(DataInput @in) { A output1 = outputs1.Read(@in); B output2 = outputs2.Read(@in); return NewPair(output1, output2); } public override Pair NoOutput => NO_OUTPUT; [MethodImpl(MethodImplOptions.AggressiveInlining)] public override string OutputToString(Pair output) { if (Debugging.AssertsEnabled) Debugging.Assert(Valid(output)); return "<pair:" + outputs1.OutputToString(output.Output1) + "," + outputs2.OutputToString(output.Output2) + ">"; } public override string ToString() { return "PairOutputs<" + outputs1 + "," + outputs2 + ">"; } } }
NightOwl888/lucenenet
src/Lucene.Net/Util/Fst/PairOutputs.cs
C#
apache-2.0
6,273
# Orchis lutea Dulac SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Liliopsida/Asparagales/Orchidaceae/Dactylorhiza/Dactylorhiza sambucina/ Syn. Orchis lutea/README.md
Markdown
apache-2.0
175
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_112) on Tue Sep 12 14:31:26 MST 2017 --> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>Uses of Interface org.wildfly.swarm.config.management.security_realm.LdapAuthorizationSupplier (BOM: * : All 2017.9.5 API)</title> <meta name="date" content="2017-09-12"> <link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Interface org.wildfly.swarm.config.management.security_realm.LdapAuthorizationSupplier (BOM: * : All 2017.9.5 API)"; } } catch(err) { } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../../../org/wildfly/swarm/config/management/security_realm/LdapAuthorizationSupplier.html" title="interface in org.wildfly.swarm.config.management.security_realm">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../../../../../../../overview-tree.html">Tree</a></li> <li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../../index-all.html">Index</a></li> <li><a href="../../../../../../../help-doc.html">Help</a></li> </ul> <div class="aboutLanguage">WildFly Swarm API, 2017.9.5</div> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../../../index.html?org/wildfly/swarm/config/management/security_realm/class-use/LdapAuthorizationSupplier.html" target="_top">Frames</a></li> <li><a href="LdapAuthorizationSupplier.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Interface org.wildfly.swarm.config.management.security_realm.LdapAuthorizationSupplier" class="title">Uses of Interface<br>org.wildfly.swarm.config.management.security_realm.LdapAuthorizationSupplier</h2> </div> <div class="classUseContainer"> <ul class="blockList"> <li class="blockList"> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation"> <caption><span>Packages that use <a href="../../../../../../../org/wildfly/swarm/config/management/security_realm/LdapAuthorizationSupplier.html" title="interface in org.wildfly.swarm.config.management.security_realm">LdapAuthorizationSupplier</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Package</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="#org.wildfly.swarm.config.management">org.wildfly.swarm.config.management</a></td> <td class="colLast">&nbsp;</td> </tr> </tbody> </table> </li> <li class="blockList"> <ul class="blockList"> <li class="blockList"><a name="org.wildfly.swarm.config.management"> <!-- --> </a> <h3>Uses of <a href="../../../../../../../org/wildfly/swarm/config/management/security_realm/LdapAuthorizationSupplier.html" title="interface in org.wildfly.swarm.config.management.security_realm">LdapAuthorizationSupplier</a> in <a href="../../../../../../../org/wildfly/swarm/config/management/package-summary.html">org.wildfly.swarm.config.management</a></h3> <table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation"> <caption><span>Methods in <a href="../../../../../../../org/wildfly/swarm/config/management/package-summary.html">org.wildfly.swarm.config.management</a> with parameters of type <a href="../../../../../../../org/wildfly/swarm/config/management/security_realm/LdapAuthorizationSupplier.html" title="interface in org.wildfly.swarm.config.management.security_realm">LdapAuthorizationSupplier</a></span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Method and Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><code><a href="../../../../../../../org/wildfly/swarm/config/management/SecurityRealm.html" title="type parameter in SecurityRealm">T</a></code></td> <td class="colLast"><span class="typeNameLabel">SecurityRealm.</span><code><span class="memberNameLink"><a href="../../../../../../../org/wildfly/swarm/config/management/SecurityRealm.html#ldapAuthorization-org.wildfly.swarm.config.management.security_realm.LdapAuthorizationSupplier-">ldapAuthorization</a></span>(<a href="../../../../../../../org/wildfly/swarm/config/management/security_realm/LdapAuthorizationSupplier.html" title="interface in org.wildfly.swarm.config.management.security_realm">LdapAuthorizationSupplier</a>&nbsp;supplier)</code> <div class="block">Configuration to use LDAP as the user repository.</div> </td> </tr> </tbody> </table> </li> </ul> </li> </ul> </div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../../../org/wildfly/swarm/config/management/security_realm/LdapAuthorizationSupplier.html" title="interface in org.wildfly.swarm.config.management.security_realm">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../../../../../../../overview-tree.html">Tree</a></li> <li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../../index-all.html">Index</a></li> <li><a href="../../../../../../../help-doc.html">Help</a></li> </ul> <div class="aboutLanguage">WildFly Swarm API, 2017.9.5</div> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../../../index.html?org/wildfly/swarm/config/management/security_realm/class-use/LdapAuthorizationSupplier.html" target="_top">Frames</a></li> <li><a href="LdapAuthorizationSupplier.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &#169; 2017 <a href="http://www.jboss.org">JBoss by Red Hat</a>. All rights reserved.</small></p> </body> </html>
wildfly-swarm/wildfly-swarm-javadocs
2017.9.5/apidocs/org/wildfly/swarm/config/management/security_realm/class-use/LdapAuthorizationSupplier.html
HTML
apache-2.0
8,110
#!/usr/bin/env python """A flow to run checks for a host.""" from grr.lib import aff4 from grr.lib import flow from grr.lib import rdfvalue from grr.lib.checks import checks from grr.proto import flows_pb2 class CheckFlowArgs(rdfvalue.RDFProtoStruct): protobuf = flows_pb2.CheckFlowArgs class CheckRunner(flow.GRRFlow): """This flow runs checks on a host. CheckRunner: - Identifies what checks should be run for a host. - Identifies the artifacts that need to be collected to perform those checks. - Orchestrates collection of the host data. - Routes host data to the relevant checks. - Returns check data ready for reporting. """ friendly_name = "Run Checks" category = "/Checks/" behaviours = flow.GRRFlow.behaviours + "BASIC" @flow.StateHandler(next_state=["MapArtifactData"]) def Start(self): """.""" client = aff4.FACTORY.Open(self.client_id, token=self.token) self.state.Register("knowledge_base", client.Get(client.Schema.KNOWLEDGE_BASE)) self.state.Register("labels", client.GetLabels()) self.state.Register("artifacts_wanted", set()) self.state.Register("artifacts_fetched", set()) self.state.Register("checks_run", []) self.state.Register("checks_with_findings", []) self.state.Register("results_store", None) self.state.Register("host_data", {}) self.CallState(next_state="MapArtifactData") @flow.StateHandler(next_state=["AddResponses", "RunChecks"]) def MapArtifactData(self, responses): """Get processed data, mapped to artifacts.""" self.state.artifacts_wanted = checks.CheckRegistry.SelectArtifacts( os=self.state.knowledge_base.os) # Fetch Artifacts and map results to the artifacts that generated them. # This is an inefficient collection, but necessary because results need to # be mapped to the originating artifact. An alternative would be to have # rdfvalues labeled with originating artifact ids. for artifact_id in self.state.artifacts_wanted: self.CallFlow("ArtifactCollectorFlow", artifact_list=[artifact_id], request_data={"artifact_id": artifact_id}, next_state="AddResponses") self.CallState(next_state="RunChecks") @flow.StateHandler() def AddResponses(self, responses): artifact_id = responses.request_data["artifact_id"] # TODO(user): Check whether artifact collection succeeded. self.state.host_data[artifact_id] = list(responses) @flow.StateHandler(next_state=["Done"]) def RunChecks(self, responses): if not responses.success: raise RuntimeError("Checks did not run successfully.") # Hand host data across to checks. Do this after all data has been collected # in case some checks require multiple artifacts/results. for finding in checks.CheckHost(self.state.host_data, os=self.state.knowledge_base.os): self.state.checks_run.append(finding.check_id) if finding.anomaly: self.state.checks_with_findings.append(finding.check_id) self.SendReply(finding)
ojengwa/grr
lib/flows/general/checks.py
Python
apache-2.0
3,088
/* Copyright 2017 - 2022 R. Thomas * Copyright 2017 - 2022 Quarkslab * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <algorithm> #include <string> #include <sstream> #include "LIEF/MachO/hash.hpp" #include "LIEF/MachO/RelocationObject.hpp" #include "pyMachO.hpp" namespace LIEF { namespace MachO { template<class T> using getter_t = T (RelocationObject::*)(void) const; template<class T> using setter_t = void (RelocationObject::*)(T); template<> void create<RelocationObject>(py::module& m) { py::class_<RelocationObject, Relocation>(m, "RelocationObject", R"delim( Class that represents a relocation presents in the MachO object file (``.o``). Usually, this kind of relocation is found in the :class:`lief.MachO.Section`. )delim") .def_property("value", static_cast<getter_t<int32_t>>(&RelocationObject::value), static_cast<setter_t<int32_t>>(&RelocationObject::value), R"delim( For **scattered** relocations, the address of the relocatable expression for the item in the file that needs to be updated if the address is changed. For relocatable expressions with the difference of two section addresses, the address from which to subtract (in mathematical terms, the minuend) is contained in the first relocation entry and the address to subtract (the subtrahend) is contained in the second relocation entry.", )delim") .def_property_readonly("is_scattered", &RelocationObject::is_scattered, "``True`` if the relocation is a scattered one") .def("__eq__", &RelocationObject::operator==) .def("__ne__", &RelocationObject::operator!=) .def("__hash__", [] (const RelocationObject& relocation) { return Hash::hash(relocation); }) .def("__str__", [] (const RelocationObject& relocation) { std::ostringstream stream; stream << relocation; std::string str = stream.str(); return str; }); } } }
lief-project/LIEF
api/python/MachO/objects/pyRelocationObject.cpp
C++
apache-2.0
2,566
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #ifndef TENSORFLOW_COMPILER_MLIR_LITE_TF_TFL_TRANSLATE_CL_H_ #define TENSORFLOW_COMPILER_MLIR_LITE_TF_TFL_TRANSLATE_CL_H_ // This file contains command-line options aimed to provide the parameters // required by the TensorFlow Graph(Def) to TF Lite Flatbuffer conversion. It is // only intended to be included by binaries. #include <string> #include "llvm/Support/CommandLine.h" // The commandline options are defined in LLVM style, so the caller should // use llvm::InitLLVM to initilize the options. // // Please see the implementation file for documentation of details of these // options. // TODO(jpienaar): Revise the command line option parsing here. extern llvm::cl::opt<std::string> input_file_name; extern llvm::cl::opt<std::string> output_file_name; extern llvm::cl::opt<bool> use_splatted_constant; extern llvm::cl::opt<bool> input_mlir; extern llvm::cl::opt<bool> output_mlir; extern llvm::cl::list<std::string> extra_opdefs; extern llvm::cl::opt<bool> emit_quant_adaptor_ops; extern llvm::cl::opt<std::string> quant_stats_file_name; #endif // TENSORFLOW_COMPILER_MLIR_LITE_TF_TFL_TRANSLATE_CL_H_
DavidNorman/tensorflow
tensorflow/compiler/mlir/lite/tf_tfl_translate_cl.h
C
apache-2.0
1,784
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="viewport" content="width=device-width, initial-scale=1"> <meta name="description" content=""> <meta name="author" content=""> <title>Ilmurah</title> <link rel="shortcut icon" href="img/icon.jpg" /> <!-- Bootstrap Core CSS --> <link href="css/bootstrap.min.css" rel="stylesheet"> <link rel="stylesheet" href="css/main.css"> <!-- Custom Fonts --> <link href="font-awesome/css/font-awesome.min.css" rel="stylesheet" type="text/css"> <link href="http://fonts.googleapis.com/css?family=Source+Sans+Pro:300,400,700,300italic,400italic,700italic" rel="stylesheet" type="text/css"> <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries --> <!-- WARNING: Respond.js doesn't work if you view the page via file:// --> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/libs/html5shiv/3.7.0/html5shiv.js"></script> <script src="https://oss.maxcdn.com/libs/respond.js/1.4.2/respond.min.js"></script> <![endif]--> </head> <body> <script type="text/javascript"> </script> <div class="container-fluid"> <div class="row"> <a id="menu-toggle" href="#" class="btn btn-dark btn-lg toggle"><i class="fa fa-bars"></i></a> <div class="col-md-3 sidebar" id="sidebar-wrapper"> <a id="menu-close" href="#" class="btn btn-light btn-lg pull-right toggle-close"><i class="fa fa-times"></i></a> <ul class="nav nav-stacked nav-pills"> <a href="index.html" class="sidebar-logo"><img src="img/logo.png" class="img-responsive"></a> <br/><br/> <li> <div class="sidebar-searchbar"> <div class="sidebar-searchbar-form"> <input type="text" class="form-control" placeholder="Search beasiswa..."> </div> <div class="sidebar-searchbar-button"> <a href="search_result.html" class="btn btn-primary">GO</a> </div> </div> </li> <br/> <li> <a href="mahasiswa.html" >Home Page</a> </li> <li> <a href="advancedsearch.html" >Pencarian Beasiswa Detil</a> </li> <li><a href="my_profile.html">Profil</a></li> <li><a href="index.html">Logout</a></li> </ul> </div> <div class="col-md-9 content-container"> <div class="page-content-container"> <!-- About --> <section id="about" class="about"> <div class="container-fluid meta-originally-container"> <div class="row"> <div class="col-lg-8 col-lg-offset-2"> <div class="row"> <div class="col-md-10 col-md-offset-2"> <h2>Didi Sumardi</h2> </div> </div> <div class="row"> <div class="col-md-2"> <img src="img/profile.jpg" class="img-circle img-responsive" alt="" /> </div> <div class="col-md-10"> <h3>NIM</h3> <p>10016002 <span class="label label-success">Tersambung dengan ol.akademik.itb.ac.id</span></p> <h3>Alamat Lengkap</h3> <p>Jalan Terama 35 03/04 Bandung</p> <h3>Alamat Profil Eksternal</h3> <p>https://www.linkedin.com/in/didi.s</p> <p> <a href="edit_my_profile.html" class="btn btn-default">Ubah</a> </p> </div> </div> </div> </div> <!-- /.row --> </div> <!-- /.container --> </section> </div> <!-- Footer --> <footer id="contact" class="ilmurah-footer"> <div class="container-fluid"> <div class="row"> <div class="col-lg-10 col-lg-offset-1 text-center"> <h4><strong>Stopdown Startup</strong></h4> <br/><br/> <p>Sources:</p> <ul class="list-unstyled"> <li><a href="http://www.dreamersroadmap.com/"><p>DREAMer’s Roadmap</p></a> </li> <li><a href="http://www.usatoday.com/story/tech/personal/2013/08/09/app-for-finding-college-scholarships/2636505/"><p>Scholly</p></a> </li> </ul> <br> <hr class="small"> <p class="text-muted">Copyright &copy; Ilmurah 2016</p> </div> </div> </div> </footer> </div> </div> </div> <!-- jQuery --> <script src="js/jquery.js"></script> <!-- Bootstrap Core JavaScript --> <script src="js/bootstrap.min.js"></script> <script type="text/javascript"> $("#menu-toggle").hide(); // Closes the sidebar menu $("#menu-close").click(function(e) { e.preventDefault(); $("#sidebar-wrapper").toggleClass("active"); $("#menu-toggle").show(); }); // Opens the sidebar menu $("#menu-toggle").click(function(e) { e.preventDefault(); $("#sidebar-wrapper").toggleClass("active"); $("#menu-toggle").hide(); }); // Scrolls to the selected menu item on the page $(function() { $('a[href*=#]:not([href=#])').click(function() { if (location.pathname.replace(/^\//, '') == this.pathname.replace(/^\//, '') || location.hostname == this.hostname) { var target = $(this.hash); target = target.length ? target : $('[name=' + this.hash.slice(1) + ']'); if (target.length) { $('html,body').animate({ scrollTop: target.offset().top }, 1000); return false; } } }); }); </script> </body> </html>
afghifari/ilmurahIMK2016
build/my_profile.html
HTML
apache-2.0
6,239
/* * Copyright (C) 2016 Mkhytar Mkhoian * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.justplay1.shoppist.interactor.units; import com.justplay1.shoppist.executor.PostExecutionThread; import com.justplay1.shoppist.executor.ThreadExecutor; import com.justplay1.shoppist.models.UnitModel; import com.justplay1.shoppist.repository.UnitsRepository; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import java.util.Collections; import java.util.List; import static com.justplay1.shoppist.ModelUtil.createFakeUnitModel; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.verifyZeroInteractions; public class UpdateUnitsTest { private UpdateUnits useCase; @Mock private ThreadExecutor mockThreadExecutor; @Mock private PostExecutionThread mockPostExecutionThread; @Mock private UnitsRepository mockUnitsRepository; private List<UnitModel> models; @Before public void setUp() { MockitoAnnotations.initMocks(this); useCase = new UpdateUnits(mockUnitsRepository, mockThreadExecutor, mockPostExecutionThread); models = Collections.singletonList(createFakeUnitModel()); useCase.init(models); } @Test public void updateUnitsUseCase_HappyCase() { useCase.buildUseCaseObservable().subscribe(); verify(mockUnitsRepository).update(models); verifyNoMoreInteractions(mockUnitsRepository); verifyZeroInteractions(mockThreadExecutor); verifyZeroInteractions(mockPostExecutionThread); } }
justplay1/Shoppist
domain/src/test/java/com/justplay1/shoppist/interactor/units/UpdateUnitsTest.java
Java
apache-2.0
2,190
package fi.rivermouth.talous.auth; import java.util.ArrayList; import java.util.List; import org.springframework.security.authentication.AuthenticationManager; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.core.Authentication; import org.springframework.security.core.GrantedAuthority; import org.springframework.security.core.authority.SimpleGrantedAuthority; import fi.rivermouth.talous.domain.User; public class UserAuthenticationManager implements AuthenticationManager { @Override public Authentication authenticate(Authentication authentication) { List<GrantedAuthority> grantedAuths = new ArrayList<GrantedAuthority>(); grantedAuths.add(new SimpleGrantedAuthority(User.ROLE)); return new UsernamePasswordAuthenticationToken(authentication.getName(), authentication.getCredentials(), grantedAuths); } }
Rivermouth/Rivermouth-Talous
src/main/java/fi/rivermouth/talous/auth/UserAuthenticationManager.java
Java
apache-2.0
906
#!/usr/bin/python3 ################################################################################ # # Copyright 2014 Stjepan Henc <[email protected]> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ################################################################################ import scipy.io.wavfile as wav import numpy as np import copy class Signal: # Data loaders def LoadFromFile(self, file): self.fs, self.s = wav.read(file) self.sLength, self.nChans = self.s.shape def LoadWF(self, waveform, fs): self.s = waveform self.fs = fs self.sLength, self.nChans = self.s.shape def __init__(self, *args): #signal properties self.singlePrecision = 0 self.s = np.array([]) self.fs = 44100 self.sLength = 0 self.nChans = 0 self.weightingFunction = np.hamming #FIXME #STFT properties self.S = np.array([]) self.windowLength = 60 self.nfft = 0 self.nfftUtil = 0 self.overlapRatio = 0.5 self.framesPositions = np.array([]) self.nFrames = 0 self.weightingWindow = np.array([]) self.overlap = 0 # Windowing properties self.sWin = np.array([]) self.sWeights = np.array([]) self.sWin = np.array([]) self.sWeights = np.array([]) if len(args) == 1: if type(args[0]) == type(''): # it's a filename self.LoadFromFile(args[0]) elif type(args[0] == type(self)): # copy data from other signal self.__dict__ = copy.deepcopy(args[0].__dict__) elif len(args) == 2: # args[0] is a signal, args[1] is sample freq. self.LoadWF(args(0), args(1))
sthenc/pyKAM
Signal.py
Python
apache-2.0
2,314
USB Armory ========== In this package is support for using the [USB Armory](https://inversepath.com/usbarmory.html) hardware with the firmware transparency demo. Since the SoC on the hardware already has ROM we can't patch that to root our trust there, so for now we'll simply use a first-stage EL3 bootloader to "enforce" the correctness of the proof bundle before chaining to something (e.g. a full linux image based app) that represents the firmware being made transparent. The enforcement code not being in the masked ROM is an obvious shortcoming of this demo, however given the interesting array of security hardware on this board it should be possible to use some of that as an alternative trust base. Storage ------- > :warning: these are scratch notes, not yet reflective of reality, and so may > change drastically! We'll use the µSD card slot of the USB Armory for our purposes. The SD card will contain: - our "enforcing" [bootloader](./bootloader) - the firmware being made discoverable - a [proof bundle](/binary_transparency/firmware/api/update_package.go) for the firmware which convinces the bootloader that it _is_ discoverable and therefore ok to launch. > :info: the USB Armory is built around an NXP i.MX6 SoC. When booting, the ROM > loader on this SoC expects to find the first-stage bootloader at the > 1024th byte of the external storage. > This allows sufficient space beforehand to store a partition table. The on-disk partition layout will be: index | name | size | format | notes ------|------------|---------|--------|----------------------------------------------- 1 | boot | 10M | raw | Must cover disk bytes 1024 onwards as we'll directly write the bootloader here. 2 | proof | 512KB | ext4 | EXT4 filesystem for storing a serialised proof bundle 3 | firmware | 64MB+ | ext4 | EXT4 filesystem containing the bootable firmware image, armory boot config, etc. ### Preparing the SD Card > :warning: When following the instructions below, be *very sure* you know which > device is your SD card - if performed with an incorrect device, the instructions below > can cause data loss! #### Linux ##### Partition & file-systems First use the `parted -l` command to figure out which device corresponds to your SD card. > :tip: you can run the `parted -l` command twice, once with your SD card > reader plugged in, and once without to help identify the device. `/dev/my_sdcard` is used as a placeholder below, you should replace that with the path for your SD card device. ```bash sudo parted /dev/my_sdcard # double (triple!) check we've got the right device: (parted) print ... (parted) mklabel msdos # Create space for the bootloader (parted) mkpart primary 1KB 10240KB # Create a partition for the proofs (parted) mkpart primary ext4 10241KB 10753KB # Create a partition for the firmware (parted) mkpart primary ext4 10754KB 100MB # Check our work: (parted) unit b (parted) print Model: Generic- Micro SD/M2 (scsi) Disk /dev/sdc: 15931539456B Sector size (logical/physical): 512B/512B Partition Table: msdos Disk Flags: Number Start End Size Type File system Flags 1 512B 10240511B 10240000B primary lba 2 10240512B 10753023B 512512B primary ext4 lba 3 10753536B 100000255B 89246720B primary ext4 lba ``` Finally, create filesystems on the 2nd and 3rd partitions of our SDCard: ```bash $ sudo mkfs.ext4 /dev/my_sdcard2 -L proof $ sudo mkfs.ext4 /dev/my_sdcard3 -L firmware ``` Next you'll build and install the bootloader on the card. Compiling the bootloader ------------------------ Follow the instructions on the [tamago-example](https://github.com/f-secure-foundry/tamago-example#Compiling) site to set up your tool-chain and environment variables. To compile the bootloader itself, run the following command in the `bootloader` directory: ```bash # Note that START_KERNEL corresponds to the offset of the firmware partition, # and START_PROOF is the offset of the proof partition make CROSS_COMPILE=arm-none-eabi- TARGET=usbarmory imx BOOT=uSD START_KERNEL=10753536 START_PROOF=10240512 LEN_KERNEL=89246720 ``` If successful, this will create a few files - the one we're interested in is `armory-boot.imx`, this is the "native format" bootloader code for the NXP SoC. ##### Install bootloader Finally, we'll write the bootloader we just built onto the SD card in the right place: ```bash # Note that we're writing to the raw device here NOT the boot partition. $ sudo dd if=armory-boot.imx of=/dev/myscard bs=512 seek=2 conv=fsync,notrunc ``` Firmware images --------------- Currently, the bootloader can only chain to either a Linux kernel, or a bare-metal ELF unikernel (only tested with tamago-example thus far). There are some invariants which must hold for this chain to work: 1. The `firmware` partition MUST be located at the precise offset mentioned above. 2. The `firmware` partition MUST be formatted with ext4. 3. The `firmware` partition MUST contain a `/boot` directory with at least the following contents: * `armory-boot.conf` - a JSON file which tells the bootloader which files to load * Either: * to boot a Linux Kernel: * a valid ARM linux Kernel image * a valid DTB file * to boot ELF unikernel: * a valid ARM bare-metal ELF binary/unikernel Note that the `armory-boot.conf` file also contains SHA256 hashes of all files referenced, and these MUST be correct. To aid in the creation of valid firmware images, use the `[cmd/usbarmory/image_builder/build.sh](/binary_transparency/firmware/cmd/usbarmory/image_builder/build.sh)` script, e.g.: ```bash $ ./cmd/usbarmory/image_builder/build.sh -u ./testdata/firmware/usbarmory/example/tamago-example -o /tmp/armory.ext4 /tmp/armory.ext4: Writing to the journal is not supported. Created image in /tmp/armory.ext4: -rw-rw-r-- 1 al al 13M Nov 30 10:39 /tmp/armory.ext4 ``` This image can be written to the target partition using the following commands: ```bash # first, log the image $ go run ./cmd/publisher/ --logtostderr --binary_path /tmp/armory.ext4 --output_path /tmp/update.ota --device="armory" # then flash the device firmware $ sudo $(which go) ./cmd/flash_tool \ --logtostderr \ --device=armory \ --update_file /tmp/update.ota \ --armory_proof_mount_point /path/to/mounted/proof/partition --armory_unikernel_dev /dev/mysdcard3 ``` <details> <summary>Alternative approach using regular shell commands</summary> Alternatively, if you prefer to see what's going on, you can currently achieve a similar goal with the following $ sudo dd if=/tmp/armory.ext of=/dev/my_sdcard3 bs=1M conf=fsync # finally, copy over the proof bundle (assumes /dev/my_sdcard2 is mounted on /mnt/proof) $ jq '.ProofBundle|@base64d|fromjson' /tmp/update.ota > /tmp/bundle.proof $ sudo mv /tmp/bundle.proof /mnt/proof/bundle.json ``` </details> ### Linux > :frog: The [Armory Debian Base Image](https://github.com/f-secure-foundry/usbarmory-debian-base_image/releases) > is a good source for the kernel (zImage) and dtb files. > > You can decompress and mount the image to access the files like so: > ```bash > # decompress image > $ xz -d usbarmory-mark-two-usd-debian_buster-base_image-20200714.raw.xz > # mount image with loopback: > # note the offset parameter below - the raw file is a complete disk image, this > # offset is the first byte of the root partition (you can use fdisk or parted > # on the raw file to view this yourself) > $ sudo mount -o loop,ro,offet=5242880 /home/al/Downloads/usbarmory-mark-two-usd-debian_buster-base_image-20201020.raw /mnt > # the files we're interested in are now visible in /mnt/boot: > $ ls -l /mnt/boot > total 8148 > -rw-r--r-- 1 root root 99319 Oct 20 17:13 config-5.4.72-0-usbarmory > lrwxrwxrwx 1 root root 21 Oct 20 17:14 imx6ull-usbarmory.dtb -> imx6ulz-usbarmory.dtb > -rw-r--r-- 1 root root 19938 Oct 20 17:14 imx6ulz-usbarmory-default-5.4.72-0.dtb > lrwxrwxrwx 1 root root 38 Oct 20 17:14 imx6ulz-usbarmory.dtb -> imx6ulz-usbarmory-default-5.4.72-0.dtb > -rw-r--r-- 1 root root 1488951 Oct 20 17:13 System.map-5.4.72-0-usbarmory > lrwxrwxrwx 1 root root 25 Oct 20 17:14 zImage -> zImage-5.4.72-0-usbarmory > -rwxr-xr-x 1 root root 6726952 Oct 20 17:13 zImage-5.4.72-0-usbarmory > ``` An example `armory-boot.conf` file configured to boot a Linux kernel is: ```json { "kernel": [ "/boot/zImage-5.4.51-0-usbarmory", "aceb3514d5ba6ac591a7d5f2cad680e83a9f848d19763563da8024f003e927c7" ], "dtb": [ "/boot/imx6ulz-usbarmory-default-5.4.51-0.dtb", "60d4fe465ef60042293f5723bf4a001d8e75f26e517af2b55e6efaef9c0db1f6" ], "cmdline": "console=ttymxc1,115200 root=/dev/sdc3 rootwait rw" } ``` TODO(al): Consider wrapping this up into a script. ### ELF unikernel > :frog: A good sample unikernel is the > [tamago-example](https://github.com/f-secure-foundry/tamago-example) > application. An example `armory-boot.conf` file configured to boot an ELF unikernel is: ```json { "unikernel": [ "/boot/tamago-example", "aceb3514d5ba6ac591a7d5f2cad680e83a9f848d19763563da8024f003e927c7" ] } ``` Booting ------- If all is well, booting the USB Armory using the debug accessory will show console output like so: ``` Terminal ready �armory-boot: starting kernel image@80800000 params@87000000 Booting Linux on physical CPU 0x0 Linux version 5.4.72-0 (usbarmory@f-secure-foundry) (gcc version 7.5.0 (Ubuntu/Linaro 7.5.0-3ubuntu1~18.04)) #1 PREEMPT Tue Oct 20 16:03:37 UTC 2020 CPU: ARMv7 Processor [410fc075] revision 5 (ARMv7), cr=10c53c7d CPU: div instructions available: patching division code CPU: PIPT / VIPT nonaliasing data cache, VIPT aliasing instruction cache OF: fdt: Machine model: F-Secure USB armory Mk II Memory policy: Data cache writeback CPU: All CPU(s) started in SVC mode. Built 1 zonelists, mobility grouping on. Total pages: 130048 Kernel command line: console=ttymxc1,115200 root=/dev/sda3 rootwait rw Dentry cache hash table entries: 65536 (order: 6, 262144 bytes, linear) ... ``` Firmware Measurement -------------------- The 'firmware measurement' hash for the USB Armory is defined to be the SHA256 hash of the raw bytes of the `ext4` **filesystem image** stored in the 'firmware' partition of the SD Card. Note that this _may well_ be a different size than the partition itself.
google/trillian-examples
binary_transparency/firmware/devices/usbarmory/README.md
Markdown
apache-2.0
10,495
#include <assert.h> #include <pthread.h> #include <stdint.h> #include <time.h> #include "../lib/includes.h" #include "../lib/blacklist.h" #include "../lib/logger.h" #include "../lib/xalloc.h" #include "iterator.h" #include "aesrand.h" #include "shard.h" #include "state.h" struct iterator { cycle_t cycle; uint8_t num_threads; shard_t *thread_shards; uint8_t *complete; pthread_mutex_t mutex; uint32_t curr_threads; }; void shard_complete(uint8_t thread_id, void *arg) { iterator_t *it = (iterator_t *) arg; assert(thread_id < it->num_threads); pthread_mutex_lock(&it->mutex); it->complete[thread_id] = 1; it->curr_threads--; shard_t *s = &it->thread_shards[thread_id]; zsend.sent += s->state.sent; zsend.blacklisted += s->state.blacklisted; zsend.whitelisted += s->state.whitelisted; zsend.sendto_failures += s->state.failures; uint8_t done = 1; for (uint8_t i = 0; done && (i < it->num_threads); ++i) { done = done && it->complete[i]; } if (done) { zsend.finish = now(); zsend.complete = 1; zsend.first_scanned = it->thread_shards[0].state.first_scanned; } pthread_mutex_unlock(&it->mutex); } iterator_t* iterator_init(uint8_t num_threads, uint8_t shard, uint8_t num_shards) { uint64_t num_addrs = blacklist_count_allowed(); iterator_t *it = xmalloc(sizeof(struct iterator)); const cyclic_group_t *group = get_group(num_addrs); if (num_addrs > (1LL << 32)) { zsend.max_index = 0xFFFFFFFF; } else { zsend.max_index = (uint32_t) num_addrs; } it->cycle = make_cycle(group, zconf.aes); it->num_threads = num_threads; it->curr_threads = num_threads; it->thread_shards = xcalloc(num_threads, sizeof(shard_t)); it->complete = xcalloc(it->num_threads, sizeof(uint8_t)); pthread_mutex_init(&it->mutex, NULL); for (uint8_t i = 0; i < num_threads; ++i) { shard_init(&it->thread_shards[i], shard, num_shards, i, num_threads, &it->cycle, shard_complete, it ); } zconf.generator = it->cycle.generator; return it; } uint32_t iterator_get_sent(iterator_t *it) { uint32_t sent = 0; for (uint8_t i = 0; i < it->num_threads; ++i) { sent += it->thread_shards[i].state.sent; } return sent; } shard_t* get_shard(iterator_t *it, uint8_t thread_id) { assert(thread_id < it->num_threads); return &it->thread_shards[thread_id]; } uint32_t iterator_get_curr_send_threads(iterator_t *it) { assert(it); return it->curr_threads; }
scarito/zmap-android
src/iterator.c
C
apache-2.0
2,428
/* * Copyright 2012 Rackspace * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ #include "virgo.h" #include "virgo_paths.h" #include "virgo_error.h" #include "virgo__types.h" #include "virgo__conf.h" #include "virgo__util.h" #include <assert.h> #include <ctype.h> #include <stdlib.h> #include <stdio.h> #include <string.h> #ifndef _WIN32 #include <limits.h> #include <unistd.h> #include <errno.h> #endif virgo_error_t* virgo_conf_service_name(virgo_t *v, const char *name) { if (v->service_name) { free((void*)v->service_name); } v->service_name = strdup(name); return VIRGO_SUCCESS; } virgo_error_t* virgo_conf_lua_bundle_path(virgo_t *v, const char *path) { if (v->lua_bundle_path) { free((void*)v->lua_bundle_path); } v->lua_bundle_path = strdup(path); return VIRGO_SUCCESS; } virgo_error_t* virgo_conf_lua_load_path(virgo_t *v, const char *path) { if (v->lua_load_path) { free((void*)v->lua_load_path); } v->lua_load_path = strdup(path); return VIRGO_SUCCESS; } virgo_error_t* virgo_conf_args(virgo_t *v) { virgo_error_t *err; const char *arg; char path[VIRGO_PATH_MAX]; short forced_zip = FALSE; arg = virgo__argv_get_value(v, "-z", "--zip"); if (arg != NULL) { err = virgo_conf_lua_load_path(v, arg); if (err) { return err; } forced_zip = TRUE; v->try_upgrade = FALSE; } if (virgo__argv_has_flag(v, "-o", "--no-upgrade")) { v->try_upgrade = FALSE; } if (virgo__argv_has_flag(v, "-r", "--exit-on-upgrade")) { v->exit_on_upgrade = TRUE; } if (virgo__argv_has_flag(v, "-p", "--restart-sysv-on-upgrade")) { v->restart_on_upgrade = TRUE; } arg = virgo__argv_get_value(v, "-l", "--logfile"); if (arg != NULL) { v->log_path = strdup(arg); } if (!forced_zip) { arg = virgo__argv_get_value(v, "-b", "--bundle-path"); if (arg) { virgo_conf_lua_bundle_path(v, arg); } /* bundle filename */ err = virgo__paths_get(v, VIRGO_PATH_BUNDLE, path, sizeof(path)); if (err) { return err; } err = virgo_conf_lua_load_path(v, path); if (err) { return err; } } return VIRGO_SUCCESS; } const char* virgo_conf_get(virgo_t *v, const char *key) { return virgo__conf_get(v, key); } static void nuke_newlines(char *p) { size_t i; size_t l = strlen(p); for (i = 0; i < l; i++) { if (p[i] == '\n') { p[i] = '\0'; } if (p[i] == '\r') { p[i] = '\0'; } } } static char* next_chunk(char **x_p) { char *p = *x_p; while (isspace(p[0])) { p++;}; nuke_newlines(p); *x_p = p; return strdup(p); } static void conf_insert_node_to_table(virgo_t *v, const char *key, const char *value) { lua_pushstring(v->L, key); lua_pushstring(v->L, value); lua_settable(v->L, -3); } static void conf_parse(virgo_t *v, FILE *fp) { char buf[8096]; char *p = NULL; while ((p = fgets(buf, sizeof(buf), fp)) != NULL) { char *key; virgo_conf_t *node; /* comment lines */ if (p[0] == '#') { continue; } while (isspace(p[0])) { p++;}; if (strlen(p) == 0) { continue; } /* Insert into list */ node = calloc(1, sizeof(*node)); node->next = v->config; v->config = node; /* calculate key/value pairs */ key = next_chunk(&p); p = key; while(!isspace(p[0])) { p++;}; *p = '\0'; /* null terminate key */ node->key = strdup(key); p++; while(isspace(p[0])) { p++;}; node->value = strdup(p); free(key); conf_insert_node_to_table(v, node->key, node->value); } } const char* virgo__conf_get(virgo_t *v, const char *key) { virgo_conf_t *p = v->config; if (strcmp("lua_load_path", key) == 0) { return v->lua_load_path; } while (p) { if (strcmp(p->key, key) == 0) { return p->value; } p = p->next; } return NULL; } void virgo__conf_destroy(virgo_t *v) { virgo_conf_t *p = v->config, *t; while (p) { t = p->next; free((void*)p->key); free((void*)p->value); free(p); p = t; } v->config = NULL; } static virgo_error_t* virgo__conf_get_path(virgo_t *v, const char **p_path) { #ifdef _WIN32 char *programdata; const char *path; path = virgo__argv_get_value(v, "-c", "--config"); if (path == NULL) { char gen_path[512]; programdata = getenv("ProgramData"); if (programdata == NULL) { return virgo_error_create(VIRGO_EINVAL, "Unable to get environment variable: \"ProgramData\"\n"); } sprintf(gen_path, "%s\\%s\\config\\%s", programdata, VIRGO_DEFAULT_CONFIG_WINDOWS_DIRECTORY, VIRGO_DEFAULT_CONFIG_FILENAME); *p_path = strdup(gen_path); return VIRGO_SUCCESS; } *p_path = strdup(path); return VIRGO_SUCCESS; #else /* !_WIN32 */ char *path; char buffer[PATH_MAX]; int count; path = (char*) virgo__argv_get_value(v, "-c", "--config"); if (path == NULL) { virgo__paths_get(v, VIRGO_PATH_CONFIG_DIR, buffer, sizeof(buffer)); count = strlen(buffer) + strlen(VIRGO_DEFAULT_CONFIG_FILENAME) + strlen(SEP) + 1; *p_path = (char*) malloc(count); snprintf((char*) *p_path, count, "%s%s%s", buffer, SEP, VIRGO_DEFAULT_CONFIG_FILENAME); return VIRGO_SUCCESS; } *p_path = strdup(path); return VIRGO_SUCCESS; #endif } virgo_error_t* virgo__conf_init(virgo_t *v) { virgo_error_t* err; FILE *fp; const char *path; err = virgo__conf_get_path(v, &path); if (err) { return err; } /* destroy config if already read */ if (v->config) { virgo__conf_destroy(v); } /* put config in virgo.config table */ fp = fopen(path, "r"); if (fp) { lua_getglobal(v->L, "virgo"); lua_pushstring(v->L, "config"); lua_newtable(v->L); conf_parse(v, fp); lua_settable(v->L, -3); fclose(fp); } lua_pushstring(v->L, "config_path"); lua_pushstring(v->L, path); lua_settable(v->L, -3); free((void*)path); return VIRGO_SUCCESS; }
cp16net/virgo-base
lib/virgo_conf.c
C
apache-2.0
6,480
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.rest.action.admin.indices; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.DataStreamAlias; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestBuilderListener; import java.io.IOException; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.HEAD; /** * The REST handler for get alias and head alias APIs. */ public class RestGetAliasesAction extends BaseRestHandler { @Override public List<Route> routes() { return List.of( new Route(GET, "/_alias"), new Route(GET, "/_aliases"), new Route(GET, "/_alias/{name}"), new Route(HEAD, "/_alias/{name}"), new Route(GET, "/{index}/_alias"), new Route(HEAD, "/{index}/_alias"), new Route(GET, "/{index}/_alias/{name}"), new Route(HEAD, "/{index}/_alias/{name}")); } @Override public String getName() { return "get_aliases_action"; } static RestResponse buildRestResponse(boolean aliasesExplicitlyRequested, String[] requestedAliases, ImmutableOpenMap<String, List<AliasMetadata>> responseAliasMap, Map<String, List<DataStreamAlias>> dataStreamAliases, XContentBuilder builder) throws Exception { final Set<String> indicesToDisplay = new HashSet<>(); final Set<String> returnedAliasNames = new HashSet<>(); for (final ObjectObjectCursor<String, List<AliasMetadata>> cursor : responseAliasMap) { for (final AliasMetadata aliasMetadata : cursor.value) { if (aliasesExplicitlyRequested) { // only display indices that have aliases indicesToDisplay.add(cursor.key); } returnedAliasNames.add(aliasMetadata.alias()); } } // compute explicitly requested aliases that have are not returned in the result final SortedSet<String> missingAliases = new TreeSet<>(); // first wildcard index, leading "-" as an alias name after this index means // that it is an exclusion int firstWildcardIndex = requestedAliases.length; for (int i = 0; i < requestedAliases.length; i++) { if (Regex.isSimpleMatchPattern(requestedAliases[i])) { firstWildcardIndex = i; break; } } for (int i = 0; i < requestedAliases.length; i++) { if (Metadata.ALL.equals(requestedAliases[i]) || Regex.isSimpleMatchPattern(requestedAliases[i]) || (i > firstWildcardIndex && requestedAliases[i].charAt(0) == '-')) { // only explicitly requested aliases will be called out as missing (404) continue; } // check if aliases[i] is subsequently excluded int j = Math.max(i + 1, firstWildcardIndex); for (; j < requestedAliases.length; j++) { if (requestedAliases[j].charAt(0) == '-') { // this is an exclude pattern if (Regex.simpleMatch(requestedAliases[j].substring(1), requestedAliases[i]) || Metadata.ALL.equals(requestedAliases[j].substring(1))) { // aliases[i] is excluded by aliases[j] break; } } } if (j == requestedAliases.length) { // explicitly requested aliases[i] is not excluded by any subsequent "-" wildcard in expression if (false == returnedAliasNames.contains(requestedAliases[i])) { // aliases[i] is not in the result set missingAliases.add(requestedAliases[i]); } } } final RestStatus status; builder.startObject(); { if (missingAliases.isEmpty()) { status = RestStatus.OK; } else { status = RestStatus.NOT_FOUND; final String message; if (missingAliases.size() == 1) { message = String.format(Locale.ROOT, "alias [%s] missing", Strings.collectionToCommaDelimitedString(missingAliases)); } else { message = String.format(Locale.ROOT, "aliases [%s] missing", Strings.collectionToCommaDelimitedString(missingAliases)); } builder.field("error", message); builder.field("status", status.getStatus()); } for (final var entry : responseAliasMap) { if (aliasesExplicitlyRequested == false || (aliasesExplicitlyRequested && indicesToDisplay.contains(entry.key))) { builder.startObject(entry.key); { builder.startObject("aliases"); { for (final AliasMetadata alias : entry.value) { AliasMetadata.Builder.toXContent(alias, builder, ToXContent.EMPTY_PARAMS); } } builder.endObject(); } builder.endObject(); } } for (var entry : dataStreamAliases.entrySet()) { builder.startObject(entry.getKey()); { builder.startObject("aliases"); { for (DataStreamAlias alias : entry.getValue()) { builder.startObject(alias.getName()); builder.endObject(); } } builder.endObject(); } builder.endObject(); } } builder.endObject(); return new BytesRestResponse(status, builder); } @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { // The TransportGetAliasesAction was improved do the same post processing as is happening here. // We can't remove this logic yet to support mixed clusters. We should be able to remove this logic here // in when 8.0 becomes the new version in the master branch. final boolean namesProvided = request.hasParam("name"); final String[] aliases = request.paramAsStringArrayOrEmptyIfAll("name"); final GetAliasesRequest getAliasesRequest = new GetAliasesRequest(aliases); final String[] indices = Strings.splitStringByCommaToArray(request.param("index")); getAliasesRequest.indices(indices); getAliasesRequest.indicesOptions(IndicesOptions.fromRequest(request, getAliasesRequest.indicesOptions())); getAliasesRequest.local(request.paramAsBoolean("local", getAliasesRequest.local())); //we may want to move this logic to TransportGetAliasesAction but it is based on the original provided aliases, which will //not always be available there (they may get replaced so retrieving request.aliases is not quite the same). return channel -> client.admin().indices().getAliases(getAliasesRequest, new RestBuilderListener<GetAliasesResponse>(channel) { @Override public RestResponse buildResponse(GetAliasesResponse response, XContentBuilder builder) throws Exception { return buildRestResponse(namesProvided, aliases, response.getAliases(), response.getDataStreamAliases(), builder); } }); } }
robin13/elasticsearch
server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetAliasesAction.java
Java
apache-2.0
9,110
# # Sample : put() : Put a single request message to a queue # require 'wmq' WMQ::QueueManager.connect(q_mgr_name: 'REID') do |qmgr| message = WMQ::Message.new message.data = 'Hello World' message.descriptor = { msg_type: WMQ::MQMT_REQUEST, reply_to_q: 'TEST.REPLY.QUEUE' } qmgr.put(q_name: 'TEST.QUEUE', message: message) end
reidmorrison/rubywmq
examples/put1_c.rb
Ruby
apache-2.0
355
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import axios from 'axios'; import { AxiosResponse } from 'axios'; export function getDruidErrorMessage(e: any) { const data: any = ((e.response || {}).data || {}); return [data.error, data.errorMessage, data.errorClass].filter(Boolean).join(' / ') || e.message; } export async function queryDruidRune(runeQuery: Record<string, any>): Promise<any> { let runeResultResp: AxiosResponse<any>; try { runeResultResp = await axios.post("/druid/v2", runeQuery); } catch (e) { throw new Error(getDruidErrorMessage(e)); } return runeResultResp.data; } export async function queryDruidSql(sqlQuery: Record<string, any>): Promise<any[]> { let sqlResultResp: AxiosResponse<any>; try { sqlResultResp = await axios.post("/druid/v2/sql", sqlQuery); } catch (e) { throw new Error(getDruidErrorMessage(e)); } return sqlResultResp.data; }
liquidm/druid
web-console/src/utils/druid-query.tsx
TypeScript
apache-2.0
1,670
using System; using System.Threading; using System.Threading.Tasks; using Moq; using Moq.Protected; using Riganti.Utils.Infrastructure.Core; using Xunit; #if EFCORE using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore.Infrastructure; using Riganti.Utils.Infrastructure.EntityFrameworkCore.Transactions; #else using System.Data.Entity; using Riganti.Utils.Infrastructure.EntityFramework.Transactions; #endif #if EFCORE namespace Riganti.Utils.Infrastructure.EntityFrameworkCore.Tests.UnitOfWork #else namespace Riganti.Utils.Infrastructure.EntityFramework.Tests.UnitOfWork #endif { public class EntityFrameworkUnitOfWorkTests { [Fact] public void Commit_CallCommitCoreOnlyIfHasOwnDbContext() { Func<DbContext> dbContextFactory = () => new Mock<DbContext>().Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); var unitOfWorkParentMock = new Mock<EntityFrameworkUnitOfWork>(unitOfWorkProvider, dbContextFactory, DbContextOptions.ReuseParentContext) { CallBase = true }; using (var unitOfWorkParent = unitOfWorkParentMock.Object) { unitOfWorkRegistryStub.RegisterUnitOfWork(unitOfWorkParent); var unitOfWorkChildMock = new Mock<EntityFrameworkUnitOfWork>(unitOfWorkProvider, dbContextFactory, DbContextOptions.ReuseParentContext) { CallBase = true }; using (var unitOfWorkChild = unitOfWorkChildMock.Object) { unitOfWorkChild.Commit(); } unitOfWorkChildMock.Protected().Verify("CommitCore", Times.Never()); unitOfWorkParent.Commit(); } unitOfWorkParentMock.Protected().Verify("CommitCore", Times.Once()); } [Fact] public void Commit_CorrectChildRequestIgnoredBehavior() { Func<DbContext> dbContextFactory = () => new Mock<DbContext>().Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); Assert.Throws<ChildCommitPendingException>(() => { using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { using (var unitOfWorkChild = unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { unitOfWorkChild.Commit(); } } }); // test that unit of work provider keeps working after caught exception using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { } } } [Fact] public void Commit_CorrectMultipleLayeredReuseParentBehavior() { Func<DbContext> dbContextFactory = () => new Mock<DbContext>().Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); using (var unitOfWorkParent = unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { // 1st level, context 1 using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { // 2nd level, context 1 using (unitOfWorkProvider.Create(DbContextOptions.AlwaysCreateOwnContext)) { // 3rd level, context 2 using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { // 4th level, context 2 using (var unitOfWorkParent3 = unitOfWorkProvider.Create(DbContextOptions.AlwaysCreateOwnContext)) { // 5th level, context 3 using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { // 6th level, context 3 using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { } using (var unitOfWorkChild3 = unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { // 7th level, context 3 commit requested unitOfWorkChild3.Commit(); } } // commit mandatory, context 3 commit pending unitOfWorkParent3.Commit(); } } } } // commit optional, no reusing child commit pending unitOfWorkParent.Commit(); } } [Fact] public void Commit_UOWHasNotParrent_CallCommitCore() { Func<DbContext> dbContextFactory = () => new Mock<DbContext>().Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); var unitOfWorkParentMock = new Mock<EntityFrameworkUnitOfWork>(unitOfWorkProvider, dbContextFactory, DbContextOptions.ReuseParentContext) { CallBase = true }; using (var unitOfWorkParent = unitOfWorkParentMock.Object) { unitOfWorkParent.Commit(); } unitOfWorkParentMock.Protected().Verify("CommitCore", Times.Once()); } [Fact] public void CommitAsync_UOWHasChild_CallCommitCore() { Func<DbContext> dbContextFactory = () => new Mock<DbContext>().Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); var unitOfWorkParentMock = new Mock<EntityFrameworkUnitOfWork>(unitOfWorkProvider, dbContextFactory, DbContextOptions.ReuseParentContext) { CallBase = true }; using (var unitOfWorkParent = unitOfWorkParentMock.Object) { unitOfWorkRegistryStub.RegisterUnitOfWork(unitOfWorkParent); using (var unitOfWorkChild = new EntityFrameworkUnitOfWork(unitOfWorkProvider, dbContextFactory, DbContextOptions.ReuseParentContext)) { unitOfWorkChild.Commit(); } unitOfWorkParent.Commit(); } unitOfWorkParentMock.Protected().Verify("CommitCore", Times.Once()); } [Fact] public async Task CommitAsync_CallCommitCoreOnlyIfHasOwnDbContext() { Func<DbContext> dbContextFactory = () => new Mock<DbContext>().Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); var unitOfWorkParentMock = new Mock<EntityFrameworkUnitOfWork>(unitOfWorkProvider, dbContextFactory, DbContextOptions.ReuseParentContext) { CallBase = true }; using (var unitOfWorkParent = unitOfWorkParentMock.Object) { unitOfWorkRegistryStub.RegisterUnitOfWork(unitOfWorkParent); var unitOfWorkChildMock = new Mock<EntityFrameworkUnitOfWork>(unitOfWorkProvider, dbContextFactory, DbContextOptions.ReuseParentContext) { CallBase = true }; using (var unitOfWorkChild = unitOfWorkChildMock.Object) { await unitOfWorkChild.CommitAsync(); } unitOfWorkChildMock.Protected().Verify("CommitAsyncCore", Times.Never(), new CancellationToken()); await unitOfWorkParent.CommitAsync(); } unitOfWorkParentMock.Protected().Verify("CommitAsyncCore", Times.Once(), new CancellationToken()); } [Fact] public async Task CommitAsync_ThrowIfChildCommitRequestedNotFulfilledByRoot() { Func<DbContext> dbContextFactory = () => new Mock<DbContext>().Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); await Assert.ThrowsAsync<ChildCommitPendingException>(async () => { using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { using (var unitOfWorkChild = unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { await unitOfWorkChild.CommitAsync(); } } }); // test that unit of work provider keeps working after caught exception using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { } } } [Fact] public async Task CommitAsync_CorrectMultipleLayeredReuseParentBehavior() { Func<DbContext> dbContextFactory = () => new Mock<DbContext>().Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); using (var unitOfWorkParent = unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { // 1st level, context 1 using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { // 2nd level, context 1 using (unitOfWorkProvider.Create(DbContextOptions.AlwaysCreateOwnContext)) { // 3rd level, context 2 using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { // 4th level, context 2 using (var unitOfWorkParent3 = unitOfWorkProvider.Create(DbContextOptions.AlwaysCreateOwnContext)) { // 5th level, context 3 using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { // 6th level, context 3 using (unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { } using (var unitOfWorkChild3 = unitOfWorkProvider.Create(DbContextOptions.ReuseParentContext)) { // 7th level, context 3 commit requested await unitOfWorkChild3.CommitAsync(); } } // commit mandatory, context 3 commit pending await unitOfWorkParent3.CommitAsync(); } } } } // commit optional, no reusing child commit pending await unitOfWorkParent.CommitAsync(); } } [Fact] public async Task CommitAsync_UOWHasNotParrent_CallCommitCore() { Func<DbContext> dbContextFactory = () => new Mock<DbContext>().Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); var unitOfWorkParentMock = new Mock<EntityFrameworkUnitOfWork>(unitOfWorkProvider, dbContextFactory, DbContextOptions.ReuseParentContext) { CallBase = true }; using (var unitOfWorkParent = unitOfWorkParentMock.Object) { await unitOfWorkParent.CommitAsync(); } unitOfWorkParentMock.Protected().Verify("CommitAsyncCore", Times.Once(), new CancellationToken()); } [Fact] public async Task Commit_UOWHasChild_CallCommitCore() { Func<DbContext> dbContextFactory = () => new Mock<DbContext>().Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); var unitOfWorkParentMock = new Mock<EntityFrameworkUnitOfWork>(unitOfWorkProvider, dbContextFactory, DbContextOptions.ReuseParentContext) { CallBase = true }; using (var unitOfWorkParent = unitOfWorkParentMock.Object) { unitOfWorkRegistryStub.RegisterUnitOfWork(unitOfWorkParent); using (var unitOfWorkChild = new EntityFrameworkUnitOfWork(unitOfWorkProvider, dbContextFactory, DbContextOptions.ReuseParentContext)) { await unitOfWorkChild.CommitAsync(); } await unitOfWorkParent.CommitAsync(); } unitOfWorkParentMock.Protected().Verify("CommitAsyncCore", Times.Once(), new CancellationToken()); } [Fact] public async Task Commit_Transaction_CallRollback() { var dbContextFactory = GetContextFactory(); var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider<InMemoryDbContext>(unitOfWorkRegistryStub, dbContextFactory); var scopeMock = new Mock<UnitOfWorkTransactionScope<InMemoryDbContext>>(unitOfWorkProvider); var scope = scopeMock.Object; await scope.ExecuteAsync(async uowParent => { Assert.True(uowParent.IsInTransaction); await uowParent.CommitAsync(); Assert.Equal(1, uowParent.CommitsCount); Assert.False(uowParent.CommitPending); using (var uowChild = (EntityFrameworkUnitOfWork<InMemoryDbContext>)unitOfWorkProvider.Create()) { await uowChild.CommitAsync(); Assert.Equal(1, uowChild.CommitsCount); Assert.False(uowChild.CommitPending); Assert.Equal(2, uowParent.CommitsCount); Assert.False(uowParent.CommitPending); } throw Assert.Throws<RollbackRequestedException>(() => { uowParent.RollbackTransaction(); }); }); scopeMock.Protected().Verify("AfterRollback", Times.Once()); } [Fact] public async Task Commit_Transaction_CallRollback_UserCatch() { var dbContextFactory = GetContextFactory(); var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider<InMemoryDbContext>(unitOfWorkRegistryStub, dbContextFactory); var scopeMock = new Mock<UnitOfWorkTransactionScope<InMemoryDbContext>>(unitOfWorkProvider); var scope = scopeMock.Object; await scope.ExecuteAsync(async uowParent => { Assert.True(uowParent.IsInTransaction); await uowParent.CommitAsync(); using (var uowChild = (EntityFrameworkUnitOfWork<InMemoryDbContext>)unitOfWorkProvider.Create()) { await uowChild.CommitAsync(); try { uowParent.RollbackTransaction(); } catch (Exception) { // user catches any exceptions } } }); scopeMock.Protected().Verify("AfterRollback", Times.Once()); } [Fact] public async Task Commit_Transaction_CallRollback_OnException() { var dbContextFactory = GetContextFactory(); var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider<InMemoryDbContext>(unitOfWorkRegistryStub, dbContextFactory); var scope = unitOfWorkProvider.CreateTransactionScope(); var exceptionKey = Guid.NewGuid().ToString(); try { await scope.ExecuteAsync(async uowParent => { Assert.True(uowParent.IsInTransaction); await uowParent.CommitAsync(); throw new Exception(exceptionKey); }); } catch (Exception e) when (e.Message == exceptionKey) { // test exception caught, passed } } [Fact] public async Task Commit_Transaction_CallCommit() { var dbContextFactory = GetContextFactory(); var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider<InMemoryDbContext>(unitOfWorkRegistryStub, dbContextFactory); var scopeMock = new Mock<UnitOfWorkTransactionScope<InMemoryDbContext>>(unitOfWorkProvider); var scope = scopeMock.Object; await scope.ExecuteAsync(async uowParent => { Assert.True(uowParent.IsInTransaction); await uowParent.CommitAsync(); Assert.Equal(1, uowParent.CommitsCount); Assert.False(uowParent.CommitPending); }); scopeMock.Protected().Verify("AfterCommit", Times.Once()); } [Fact] public async Task Commit_Transaction_CallCommit_Nesting() { var dbContextFactory = GetContextFactory(); var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider<InMemoryDbContext>(unitOfWorkRegistryStub, dbContextFactory); var scopeMock = new Mock<UnitOfWorkTransactionScope<InMemoryDbContext>>(unitOfWorkProvider); var scope = scopeMock.Object; await scope.ExecuteAsync(async uowParent => { Assert.True(uowParent.IsInTransaction); await uowParent.CommitAsync(); Assert.Equal(1, uowParent.CommitsCount); Assert.False(uowParent.CommitPending); using (var uowChild = (EntityFrameworkUnitOfWork<InMemoryDbContext>)unitOfWorkProvider.Create()) { await uowChild.CommitAsync(); Assert.Equal(1, uowChild.CommitsCount); Assert.False(uowChild.CommitPending); Assert.Equal(2, uowParent.CommitsCount); Assert.False(uowParent.CommitPending); using (var uowChildChild = (EntityFrameworkUnitOfWork<InMemoryDbContext>)unitOfWorkProvider.Create()) { await uowChildChild.CommitAsync(); } Assert.Equal(2, uowChild.CommitsCount); Assert.False(uowChild.CommitPending); Assert.Equal(3, uowParent.CommitsCount); Assert.False(uowParent.CommitPending); } }); scopeMock.Protected().Verify("AfterCommit", Times.Once()); } [Fact] public void Commit_Transaction_CallCommit_Sync() { var dbContextFactory = GetContextFactory(); var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider<InMemoryDbContext>(unitOfWorkRegistryStub, dbContextFactory); var scopeMock = new Mock<UnitOfWorkTransactionScope<InMemoryDbContext>>(unitOfWorkProvider); var scope = scopeMock.Object; scope.Execute(uowParent => { Assert.True(uowParent.IsInTransaction); uowParent.Commit(); Assert.Equal(1, uowParent.CommitsCount); Assert.False(uowParent.CommitPending); }); scopeMock.Protected().Verify("AfterCommit", Times.Once()); } [Fact] public void TryGetDbContext_UnitOfWorkRegistryHasUnitOfWork_ReturnCorrectDbContext() { var dbContext = new Mock<DbContext>().Object; Func<DbContext> dbContextFactory = () => dbContext; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); var unitOfWork = new EntityFrameworkUnitOfWork(unitOfWorkProvider, dbContextFactory, DbContextOptions.ReuseParentContext); unitOfWorkRegistryStub.RegisterUnitOfWork(unitOfWork); var uowDbContext = EntityFrameworkUnitOfWork.TryGetDbContext(unitOfWorkProvider); Assert.NotNull(uowDbContext); Assert.Same(dbContext, uowDbContext); } [Fact] public void TryGetDbContext_UnitOfWorkRegistryHasNotUnitOfWork_ReturnsNull() { var dbContext = new Mock<DbContext>().Object; Func<DbContext> dbContextFactory = () => dbContext; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); var value = EntityFrameworkUnitOfWork.TryGetDbContext(unitOfWorkProvider); Assert.Null(value); } [Fact] public async Task CommitAsync_WithCancellationTokenInNestedUow_SavedChangesInParentUow() { var dbContext = new Mock<DbContext>(); Func<DbContext> dbContextFactory = () => dbContext.Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); using (var uow = unitOfWorkProvider.Create()) { using (var nested = unitOfWorkProvider.Create()) { await nested.CommitAsync(new CancellationToken()); // verify, that method has NEVER been called dbContext.Verify(x => x.SaveChangesAsync(It.IsAny<CancellationToken>()), Times.Never); } await uow.CommitAsync(new CancellationToken()); // verify, that method has been called ONCE dbContext.Verify(x => x.SaveChangesAsync(It.IsAny<CancellationToken>()), Times.Once); } } [Fact] public async Task CommitAsync_WithoutCancellationTokenInNestedUow_SavedChangesInParentUow() { var dbContext = new Mock<DbContext>(); Func<DbContext> dbContextFactory = () => dbContext.Object; var unitOfWorkRegistryStub = new ThreadLocalUnitOfWorkRegistry(); var unitOfWorkProvider = new EntityFrameworkUnitOfWorkProvider(unitOfWorkRegistryStub, dbContextFactory); using (var uow = unitOfWorkProvider.Create()) { using (var nested = unitOfWorkProvider.Create()) { await nested.CommitAsync(); // verify, that method has NEVER been called dbContext.Verify(x => x.SaveChangesAsync(It.IsAny<CancellationToken>()), Times.Never); } await uow.CommitAsync(); // verify, that method has been called ONCE dbContext.Verify(x => x.SaveChangesAsync(It.IsAny<CancellationToken>()), Times.Once); } } public class InMemoryDbContext : DbContext { #if EFCORE protected override void OnConfiguring(DbContextOptionsBuilder optionsBuilder) { if (!optionsBuilder.IsConfigured) { optionsBuilder .UseInMemoryDatabase(Guid.NewGuid().ToString()) .ConfigureWarnings(w => w.Ignore(InMemoryEventId.TransactionIgnoredWarning)); } } #endif } private static Func<InMemoryDbContext> GetContextFactory() { return () => #if EFCORE new InMemoryDbContext(); #else new Mock<InMemoryDbContext>().Object; #endif } } }
riganti/infrastructure
src/Infrastructure/Tests/Riganti.Utils.Infrastructure.EntityFramework.Tests/UnitOfWork/EntityFrameworkUnitOfWorkTests.cs
C#
apache-2.0
25,742
<?php /** * Created by PhpStorm. * User: vjcspy * Date: 28/05/2016 * Time: 12:56 */ namespace Modules\IzCore\Repositories; use Modules\IzCore\Repositories\Object\DataObject; use Modules\IzCore\Repositories\Theme\View\AdditionViewInterface; use Pingpong\Modules\Repository; /** * Quản lý Theme * Bao gồm: * - Data của theme: Merge data từ bên ngoài * - Quản lý current theme * * @package Modules\IzCore\Repositories */ class Theme extends DataObject { /** * @var */ protected $currentPath; /** * [ *'path'=>['Modules\IzCore\Repositories\Theme\View\AdditionView] * ] * * @var array */ protected $additionData = []; /** * @var \Teepluss\Theme\Contracts\Theme */ protected $theme; /** * @var string */ protected $_currentThemeName; /** * @var string */ protected $_currentLayoutName; /** * [ *'path'=>[] * ] * * @var array */ protected $data = []; /** * @var \Pingpong\Modules\Repository */ protected $module; /** * All Asset * * @var */ private $assets; /** * @var \Modules\IzCore\Entities\Theme */ private $themeModel; /** * Theme constructor. * * @param \Pingpong\Modules\Repository $module * @param array $data */ public function __construct( Repository $module, \Modules\IzCore\Entities\Theme $themeModel, array $data = [] ) { $this->themeModel = $themeModel; $this->module = $module; parent::__construct($data); } /** * Dành cho các module ngoài muốn add data vào 1 path nào đó * * @param $path * @param $data */ public function addAdditionData($path, $data) { foreach ($data as $item) { if (!isset($this->additionData[$path])) $this->additionData[$path] = []; $this->additionData[$path][] = $item; } } /** * Merge data from another modules to current path * * @param null $path * * @return $this */ public function initAdditionData($path = null) { if (is_null($path)) $path = $this->getCurrentPath(); if (isset($this->additionData[$path])) { foreach ($this->additionData[$path] as $item) { /** @var AdditionViewInterface $item */ $item = app()->make($item); if (!isset($this->data[$path])) $this->data[$path] = []; $this->data[$path] = array_merge($this->data[$path], $item->handle()); } } return $this; } /** * @return mixed */ public function getCurrentPath() { return $this->currentPath; } /** * @param mixed $currentPath * * @return $this */ public function setCurrentPath($currentPath) { $this->currentPath = $currentPath; return $this; } /** * Set data to view of current path * * @param \Teepluss\Theme\Theme $theme * @param $path * * @return $this */ public function initViewData(\Teepluss\Theme\Theme $theme, $path) { /*Merge from another modules*/ $this->initAdditionData($path); if (isset($this->data[$path])) { foreach ($this->data[$path] as $items) { foreach ($items as $k => $item) { $theme->set($k, $item); } } } return $this; } /** * Get all assets in each theme in each module * * @return array * @throws \Exception */ public function getAssetsTree() { if (is_null($this->assets)) { $this->assets = []; $pathModules = $this->module->getPath(); $moduleDirs = scandir($pathModules); foreach ($moduleDirs as $moduleDir) { if (!in_array($moduleDir, [".", ".."])) { /*Path Config/Vendor của module hiện tại*/ $currentModuleThemePaths = $pathModules . '/' . $moduleDir . '/themes'; /*Kiểm tra xem module hiện tại có thư mục themes không*/ if (!file_exists($currentModuleThemePaths)) continue; $themePath = scandir($currentModuleThemePaths); foreach ($themePath as $themDir) { if (!in_array($themDir, [".", ".."])) { $currentThemeDir = $currentModuleThemePaths . '/' . $themDir . '/config.php'; // Check file config.php existed if (!file_exists($currentThemeDir)) continue; $themeConfig = (include $currentThemeDir); if (isset($themeConfig['assets'])) { $assetWithThemeName = []; foreach ($themeConfig['assets'] as $k => $asset) { $asset['theme_name'] = $themDir; $assetWithThemeName[$k] = $asset; } $this->assets = array_merge($this->assets, $assetWithThemeName); } } } } } } return $this->assets; } /** * Retrieve current theme name * * @return string */ public function getCurrentThemeName() { if (is_null($this->_currentThemeName)) $this->_currentThemeName = $this->getTheme()->getThemeName(); return $this->_currentThemeName; } /** * @param string $currentThemeName * * @return $this */ public function setCurrentThemeName($currentThemeName) { $this->_currentThemeName = $currentThemeName; return $this; } /** * @return string */ public function getCurrentLayoutName() { if (is_null($this->_currentLayoutName)) $this->_currentLayoutName = $this->getTheme()->getLayoutName(); return $this->_currentLayoutName; } /** * Retrive current layout use in theme * * @param string $currentLayoutName * * @return $this */ public function setCurrentLayoutName($currentLayoutName) { $this->_currentLayoutName = $currentLayoutName; return $this; } /** * @return \Teepluss\Theme\Theme * @throws \Exception */ public function getTheme() { if (is_null($this->theme)) { $this->theme = app()->make('\Teepluss\Theme\Contracts\Theme'); }; return $this->theme; } /** * Khai báo sự tồn tại của theme trong App * Sử dụng để biết view thuộc theme nào. Loại admin hay frontend * * @param $themeName * @param bool $isAdmin * * @return $this */ public function registerTheme($themeName, $isAdmin = true) { /* FIXME: need cache here */ $theme = $this->themeModel->query()->firstOrNew(['name' => $themeName]); $theme->type = $isAdmin == true ? \Modules\IzCore\Entities\Theme::TYPE_ADMIN : \Modules\IzCore\Entities\Theme::TYPE_FRONTEND; $theme->save(); return $this; } }
vjcspy/IzCore
Repositories/Theme.php
PHP
apache-2.0
7,569
package com.ejlchina.searcher.implement; import com.ejlchina.searcher.*; import com.ejlchina.searcher.bean.InheritType; import java.lang.reflect.Field; import java.util.*; import java.lang.reflect.Modifier; import java.util.concurrent.ConcurrentHashMap; /*** * 默认元信息解析器 * @author Troy.Zhou @ 2021-10-30 * @since v3.0.0 */ public class DefaultMetaResolver implements MetaResolver { private final Map<Class<?>, BeanMeta<?>> cache = new ConcurrentHashMap<>(); private SnippetResolver snippetResolver = new DefaultSnippetResolver(); private DbMapping dbMapping; public DefaultMetaResolver() { this(new DefaultDbMapping()); } public DefaultMetaResolver(DbMapping dbMapping) { this.dbMapping = dbMapping; } @Override public <T> BeanMeta<T> resolve(Class<T> beanClass) { @SuppressWarnings("unchecked") BeanMeta<T> beanMeta = (BeanMeta<T>) cache.get(beanClass); if (beanMeta != null) { return beanMeta; } synchronized (cache) { beanMeta = resolveMetadata(beanClass); cache.put(beanClass, beanMeta); return beanMeta; } } protected <T> BeanMeta<T> resolveMetadata(Class<T> beanClass) { DbMapping.Table table = dbMapping.table(beanClass); if (table == null) { throw new SearchException("The class [" + beanClass.getName() + "] can not be searched, because it can not be resolved by " + dbMapping.getClass()); } BeanMeta<T> beanMeta = new BeanMeta<>(beanClass, table.getDataSource(), snippetResolver.resolve(table.getTables()), snippetResolver.resolve(table.getJoinCond()), snippetResolver.resolve(table.getGroupBy()), table.isDistinct()); // 字段解析 Field[] fields = getBeanFields(beanClass); for (int index = 0; index < fields.length; index++) { Field field = fields[index]; if (Modifier.isStatic(field.getModifiers())) { continue; } DbMapping.Column column = dbMapping.column(beanClass, fields[index]); if (column == null) { continue; } field.setAccessible(true); SqlSnippet snippet = snippetResolver.resolve(column.getFieldSql()); // 注意:Oracle 数据库的别名不能以下划线开头 FieldMeta fieldMeta = new FieldMeta(beanMeta, field, snippet, "c_" + index, column.isConditional(), column.getOnlyOn()); beanMeta.addFieldMeta(field.getName(), fieldMeta); } if (beanMeta.getFieldCount() == 0) { throw new SearchException("[" + beanClass.getName() + "] is not a valid SearchBean, because there is no field mapping to database."); } return beanMeta; } protected Field[] getBeanFields(Class<?> beanClass) { InheritType iType = dbMapping.inheritType(beanClass); List<Field> fieldList = new ArrayList<>(); Set<String> fieldNames = new HashSet<>(); while (beanClass != Object.class) { for (Field field : beanClass.getDeclaredFields()) { String name = field.getName(); int modifiers = field.getModifiers(); if (field.isSynthetic() || Modifier.isStatic(modifiers) || Modifier.isTransient(modifiers) || fieldNames.contains(name)) { continue; } fieldList.add(field); fieldNames.add(name); } if (iType != InheritType.FIELD && iType != InheritType.ALL) { break; } beanClass = beanClass.getSuperclass(); } return fieldList.toArray(new Field[0]); } public SnippetResolver getSnippetResolver() { return snippetResolver; } public void setSnippetResolver(SnippetResolver snippetResolver) { this.snippetResolver = Objects.requireNonNull(snippetResolver); } public DbMapping getDbMapping() { return dbMapping; } public void setDbMapping(DbMapping dbMapping) { this.dbMapping = Objects.requireNonNull(dbMapping); } }
ejlchina/bean-searcher
bean-searcher/src/main/java/com/ejlchina/searcher/implement/DefaultMetaResolver.java
Java
apache-2.0
4,320
# AUTOGENERATED FILE FROM balenalib/generic-amd64-debian:bullseye-run ENV NODE_VERSION 14.18.3 ENV YARN_VERSION 1.22.4 RUN buildDeps='curl libatomic1' \ && set -x \ && for key in \ 6A010C5166006599AA17F08146C2130DFD2497F5 \ ; do \ gpg --batch --keyserver pgp.mit.edu --recv-keys "$key" || \ gpg --batch --keyserver keyserver.pgp.com --recv-keys "$key" || \ gpg --batch --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \ done \ && apt-get update && apt-get install -y $buildDeps --no-install-recommends \ && rm -rf /var/lib/apt/lists/* \ && curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-x64.tar.gz" \ && echo "bd96f88e054801d1368787f7eaf77b49cd052b9543c56bd6bc0bfc90310e2756 node-v$NODE_VERSION-linux-x64.tar.gz" | sha256sum -c - \ && tar -xzf "node-v$NODE_VERSION-linux-x64.tar.gz" -C /usr/local --strip-components=1 \ && rm "node-v$NODE_VERSION-linux-x64.tar.gz" \ && curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \ && curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \ && gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \ && mkdir -p /opt/yarn \ && tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \ && ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \ && ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \ && rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \ && npm config set unsafe-perm true -g --unsafe-perm \ && rm -rf /tmp/* CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"] RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/[email protected]" \ && echo "Running test-stack@node" \ && chmod +x [email protected] \ && bash [email protected] \ && rm -rf [email protected] RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: Intel 64-bit (x86-64) \nOS: Debian Bullseye \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v14.18.3, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \ && chmod +x /bin/sh-shim \ && cp /bin/sh /bin/sh.real \ && mv /bin/sh-shim /bin/sh
resin-io-library/base-images
balena-base-images/node/generic-amd64/debian/bullseye/14.18.3/run/Dockerfile
Dockerfile
apache-2.0
2,946
'use strict'; // https://github.com/tc39/proposal-iterator-helpers var aCallable = require('../internals/a-callable'); var anObject = require('../internals/an-object'); var getBuiltIn = require('../internals/get-built-in'); var getMethod = require('../internals/get-method'); var MAX_SAFE_INTEGER = 0x1FFFFFFFFFFFFF; var createMethod = function (TYPE) { var IS_TO_ARRAY = TYPE == 0; var IS_FOR_EACH = TYPE == 1; var IS_EVERY = TYPE == 2; var IS_SOME = TYPE == 3; return function (iterator, fn, target) { anObject(iterator); var Promise = getBuiltIn('Promise'); var next = aCallable(iterator.next); var index = 0; var MAPPING = fn !== undefined; if (MAPPING || !IS_TO_ARRAY) aCallable(fn); return new Promise(function (resolve, reject) { var closeIteration = function (method, argument) { try { var returnMethod = getMethod(iterator, 'return'); if (returnMethod) { return Promise.resolve(returnMethod.call(iterator)).then(function () { method(argument); }, function (error) { reject(error); }); } } catch (error2) { return reject(error2); } method(argument); }; var onError = function (error) { closeIteration(reject, error); }; var loop = function () { try { if (IS_TO_ARRAY && (index > MAX_SAFE_INTEGER) && MAPPING) { throw TypeError('The allowed number of iterations has been exceeded'); } Promise.resolve(anObject(next.call(iterator))).then(function (step) { try { if (anObject(step).done) { if (IS_TO_ARRAY) { target.length = index; resolve(target); } else resolve(IS_SOME ? false : IS_EVERY || undefined); } else { var value = step.value; if (MAPPING) { Promise.resolve(IS_TO_ARRAY ? fn(value, index) : fn(value)).then(function (result) { if (IS_FOR_EACH) { loop(); } else if (IS_EVERY) { result ? loop() : closeIteration(resolve, false); } else if (IS_TO_ARRAY) { target[index++] = result; loop(); } else { result ? closeIteration(resolve, IS_SOME || value) : loop(); } }, onError); } else { target[index++] = value; loop(); } } } catch (error) { onError(error); } }, onError); } catch (error2) { onError(error2); } }; loop(); }); }; }; module.exports = { toArray: createMethod(0), forEach: createMethod(1), every: createMethod(2), some: createMethod(3), find: createMethod(4) };
cloudfoundry-community/asp.net5-buildpack
fixtures/node_apps/angular_dotnet/ClientApp/node_modules/core-js/internals/async-iterator-iteration.js
JavaScript
apache-2.0
2,973
--- layout: blog-archive title: Observatory permalink: /blog/category/observatory/ archive-name: observatory archive-type: Category breadcrumb: blog ---
andrew-m-lab/andrew-m-lab.github.io
_pages/categories/observatory.md
Markdown
apache-2.0
153
/*! * ${copyright} */ sap.ui.require([ "jquery.sap.global", "sap/ui/base/SyncPromise", "sap/ui/model/BindingMode", "sap/ui/model/ChangeReason", "sap/ui/model/ClientListBinding", "sap/ui/model/Context", "sap/ui/model/ContextBinding", "sap/ui/model/Filter", "sap/ui/model/MetaModel", "sap/ui/model/PropertyBinding", "sap/ui/model/Sorter", "sap/ui/model/odata/OperationMode", "sap/ui/model/odata/type/Int64", "sap/ui/model/odata/type/Raw", "sap/ui/model/odata/v4/AnnotationHelper", "sap/ui/model/odata/v4/Context", "sap/ui/model/odata/v4/lib/_Helper", "sap/ui/model/odata/v4/ODataMetaModel", "sap/ui/model/odata/v4/ODataModel", "sap/ui/model/odata/v4/ValueListType", "sap/ui/test/TestUtils", "sap/ui/thirdparty/URI" ], function (jQuery, SyncPromise, BindingMode, ChangeReason, ClientListBinding, BaseContext, ContextBinding, Filter, MetaModel, PropertyBinding, Sorter, OperationMode, Int64, Raw, AnnotationHelper, Context, _Helper, ODataMetaModel, ODataModel, ValueListType, TestUtils, URI) { /*global QUnit, sinon */ /*eslint max-nested-callbacks: 0, no-loop-func: 0, no-warning-comments: 0 */ "use strict"; // Common := com.sap.vocabularies.Common.v1 // tea_busi := com.sap.gateway.default.iwbep.tea_busi.v0001 // tea_busi_product.v0001 := com.sap.gateway.default.iwbep.tea_busi_product.v0001 // tea_busi_supplier.v0001 := com.sap.gateway.default.iwbep.tea_busi_supplier.v0001 // UI := com.sap.vocabularies.UI.v1 var mMostlyEmptyScope = { "$EntityContainer" : "empty.DefaultContainer", "$Version" : "4.0", "empty." : { "$kind" : "Schema" }, "empty.DefaultContainer" : { "$kind" : "EntityContainer" } }, sODataMetaModel = "sap.ui.model.odata.v4.ODataMetaModel", mProductScope = { "$EntityContainer" : "tea_busi_product.v0001.DefaultContainer", "$Reference" : { "../../../../default/iwbep/tea_busi_supplier/0001/$metadata" : { "$Include" : [ "tea_busi_supplier.v0001." ] } }, "$Version" : "4.0", "tea_busi_product.v0001." : { "$kind" : "Schema", "$Annotations" : { // Note: simulate result of _MetadataRequestor#read "tea_busi_product.v0001.Category/CategoryName" : { "@Common.Label" : "CategoryName from tea_busi_product.v0001." } } }, "tea_busi_product.v0001.Category" : { "$kind" : "EntityType", "CategoryName" : { "$kind" : "Property", "$Type" : "Edm.String" } }, "tea_busi_product.v0001.DefaultContainer" : { "$kind" : "EntityContainer" }, "tea_busi_product.v0001.Product" : { "$kind" : "EntityType", "Name" : { "$kind" : "Property", "$Type" : "Edm.String" }, "PRODUCT_2_CATEGORY" : { "$kind" : "NavigationProperty", "$Type" : "tea_busi_product.v0001.Category" }, "PRODUCT_2_SUPPLIER" : { "$kind" : "NavigationProperty", "$Type" : "tea_busi_supplier.v0001.Supplier" } } }, sSampleServiceUrl = "/sap/opu/odata4/sap/zui5_testv4/default/sap/zui5_epm_sample/0002/", mScope = { "$Annotations" : { "name.space.Id" : { "@Common.Label" : "ID" }, "tea_busi.DefaultContainer" : { "@DefaultContainer" : {} }, "tea_busi.DefaultContainer/T€AMS" : { "@T€AMS" : {} }, "tea_busi.TEAM" : { "@Common.Text" : { "$Path" : "Name" }, "@[email protected]" : { "$EnumMember" : "UI.TextArrangementType/TextLast" }, "@UI.Badge" : { "@Common.Label" : "Label inside", "$Type" : "UI.BadgeType", "HeadLine" : { "$Type" : "UI.DataField", "Value" : { "$Path" : "Name" } }, "Title" : { "$Type" : "UI.DataField", "Value" : { "$Path" : "Team_Id" } } }, "@[email protected]" : "Best Badge Ever!", "@UI.LineItem" : [{ "@UI.Importance" : { "$EnumMember" : "UI.ImportanceType/High" }, "$Type" : "UI.DataField", "Label" : "Team ID", "[email protected]" : "Team ID's Label", "Value" : { "$Path" : "Team_Id" } }] }, "tea_busi.TEAM/Team_Id" : { "@Common.Label" : "Team ID", "@Common.Text" : { "$Path" : "Name" }, "@[email protected]" : { "$EnumMember" : "UI.TextArrangementType/TextLast" } }, "tea_busi.Worker" : { "@UI.Facets" : [{ "$Type" : "UI.ReferenceFacet", "Target" : { // term cast "$AnnotationPath" : "@UI.LineItem" } }, { "$Type" : "UI.ReferenceFacet", "Target" : { // term cast at navigation property itself "$AnnotationPath" : "[email protected]" } }, { "$Type" : "UI.ReferenceFacet", "Target" : { // navigation property and term cast "$AnnotationPath" : "EMPLOYEE_2_TEAM/@UI.LineItem" } }, { "$Type" : "UI.ReferenceFacet", "Target" : { // type cast, navigation properties and term cast (at its type) "$AnnotationPath" : "tea_busi.TEAM/TEAM_2_EMPLOYEES/EMPLOYEE_2_TEAM/@UI.LineItem" } }], "@UI.LineItem" : [{ "$Type" : "UI.DataField", "Label" : "Team ID", "Value" : { "$Path" : "EMPLOYEE_2_TEAM/Team_Id" } }] }, "tea_busi.Worker/EMPLOYEE_2_TEAM" : { "@Common.Label" : "Employee's Team" } }, "$EntityContainer" : "tea_busi.DefaultContainer", "empty." : { "$kind" : "Schema" }, "name.space." : { "$kind" : "Schema" }, "tea_busi." : { "$kind" : "Schema", "@Schema" : {} }, "empty.Container" : { "$kind" : "EntityContainer" }, "name.space.BadContainer" : { "$kind" : "EntityContainer", "DanglingActionImport" : { "$kind" : "ActionImport", "$Action" : "not.Found" }, "DanglingFunctionImport" : { "$kind" : "FunctionImport", "$Function" : "not.Found" } }, "name.space.Broken" : { "$kind" : "Term", "$Type" : "not.Found" }, "name.space.BrokenFunction" : [{ "$kind" : "Function", "$ReturnType" : { "$Type" : "not.Found" } }], "name.space.BrokenOverloads" : [{ "$kind" : "Operation" }], "name.space.DerivedPrimitiveFunction" : [{ "$kind" : "Function", "$ReturnType" : { "$Type" : "name.space.Id" } }], "name.space.EmptyOverloads" : [], "name.space.Id" : { "$kind" : "TypeDefinition", "$UnderlyingType" : "Edm.String", "$MaxLength" : 10 }, "name.space.Term" : { // only case with a qualified name and a $Type "$kind" : "Term", "$Type" : "tea_busi.Worker" }, "name.space.OverloadedAction" : [{ "$kind" : "Action", "$IsBound" : true, "$Parameter" : [{ // "$Name" : "_it", "$Type" : "tea_busi.EQUIPMENT" }], "$ReturnType" : { "$Type" : "tea_busi.EQUIPMENT" } }, { "$kind" : "Action", "$IsBound" : true, "$Parameter" : [{ // "$Name" : "_it", "$Type" : "tea_busi.TEAM" }], "$ReturnType" : { "$Type" : "tea_busi.TEAM" } }, { // "An unbound action MAY have the same name as a bound action." "$kind" : "Action", "$ReturnType" : { "$Type" : "tea_busi.ComplexType_Salary" } }, { "$kind" : "Action", "$IsBound" : true, "$Parameter" : [{ // "$Name" : "_it", "$Type" : "tea_busi.Worker" }], "$ReturnType" : { "$Type" : "tea_busi.Worker" } }], "name.space.OverloadedFunction" : [{ "$kind" : "Function", "$ReturnType" : { "$Type" : "Edm.String" } }, { "$kind" : "Function", "$ReturnType" : { "$Type" : "Edm.String" } }], "name.space.VoidAction" : [{ "$kind" : "Action" }], "tea_busi.AcChangeManagerOfTeam" : [{ "$kind" : "Action", "$ReturnType" : { "$Type" : "tea_busi.TEAM", "@Common.Label" : "Hail to the Chief" } }], "tea_busi.ComplexType_Salary" : { "$kind" : "ComplexType", "AMOUNT" : { "$kind" : "Property", "$Type" : "Edm.Decimal" }, "CURRENCY" : { "$kind" : "Property", "$Type" : "Edm.String" } }, "tea_busi.ContainedC" : { "$kind" : "EntityType", "$Key" : ["Id"], "Id" : { "$kind" : "Property", "$Type" : "Edm.String" }, "C_2_EMPLOYEE" : { "$kind" : "NavigationProperty", "$Type" : "tea_busi.Worker" }, "C_2_S" : { "$ContainsTarget" : true, "$kind" : "NavigationProperty", "$Type" : "tea_busi.ContainedS" } }, "tea_busi.ContainedS" : { "$kind" : "EntityType", "$Key" : ["Id"], "Id" : { "$kind" : "Property", "$Type" : "Edm.String" }, "S_2_C" : { "$ContainsTarget" : true, "$kind" : "NavigationProperty", "$isCollection" : true, "$Type" : "tea_busi.ContainedC" }, "S_2_EMPLOYEE" : { "$kind" : "NavigationProperty", "$Type" : "tea_busi.Worker" } }, "tea_busi.DefaultContainer" : { "$kind" : "EntityContainer", "ChangeManagerOfTeam" : { "$kind" : "ActionImport", "$Action" : "tea_busi.AcChangeManagerOfTeam" }, "EMPLOYEES" : { "$kind" : "EntitySet", "$NavigationPropertyBinding" : { "EMPLOYEE_2_TEAM" : "T€AMS", "EMPLOYEE_2_EQUIPM€NTS" : "EQUIPM€NTS" }, "$Type" : "tea_busi.Worker" }, "EQUIPM€NTS" : { "$kind" : "EntitySet", "$Type" : "tea_busi.EQUIPMENT" }, "GetEmployeeMaxAge" : { "$kind" : "FunctionImport", "$Function" : "tea_busi.FuGetEmployeeMaxAge" }, "Me" : { "$kind" : "Singleton", "$NavigationPropertyBinding" : { "EMPLOYEE_2_TEAM" : "T€AMS", "EMPLOYEE_2_EQUIPM€NTS" : "EQUIPM€NTS" }, "$Type" : "tea_busi.Worker" }, "OverloadedAction" : { "$kind" : "ActionImport", "$Action" : "name.space.OverloadedAction" }, "TEAMS" : { "$kind" : "EntitySet", "$NavigationPropertyBinding" : { "TEAM_2_EMPLOYEES" : "EMPLOYEES", "TEAM_2_CONTAINED_S/S_2_EMPLOYEE" : "EMPLOYEES" }, "$Type" : "tea_busi.TEAM" }, "T€AMS" : { "$kind" : "EntitySet", "$NavigationPropertyBinding" : { "TEAM_2_EMPLOYEES" : "EMPLOYEES" }, "$Type" : "tea_busi.TEAM" }, "VoidAction" : { "$kind" : "ActionImport", "$Action" : "name.space.VoidAction" } }, "tea_busi.EQUIPMENT" : { "$kind" : "EntityType", "$Key" : ["ID"], "ID" : { "$kind" : "Property", "$Type" : "Edm.Int32", "$Nullable" : false } }, "tea_busi.FuGetEmployeeMaxAge" : [{ "$kind" : "Function", "$ReturnType" : { "$Type" : "Edm.Int16" } }], "tea_busi.TEAM" : { "$kind" : "EntityType", "$Key" : ["Team_Id"], "Team_Id" : { "$kind" : "Property", "$Type" : "name.space.Id", "$Nullable" : false, "$MaxLength" : 10 }, "Name" : { "$kind" : "Property", "$Type" : "Edm.String", "$Nullable" : false, "$MaxLength" : 40 }, "TEAM_2_EMPLOYEES" : { "$kind" : "NavigationProperty", "$isCollection" : true, "$OnDelete" : "None", "[email protected]" : "None of my business", "$ReferentialConstraint" : { "foo" : "bar", "[email protected]" : "Just a Gigolo" }, "$Type" : "tea_busi.Worker" }, "TEAM_2_CONTAINED_S" : { "$ContainsTarget" : true, "$kind" : "NavigationProperty", "$Type" : "tea_busi.ContainedS" }, "TEAM_2_CONTAINED_C" : { "$ContainsTarget" : true, "$kind" : "NavigationProperty", "$isCollection" : true, "$Type" : "tea_busi.ContainedC" }, // Note: "value" is a symbolic name for an operation's return type iff. it is // primitive "value" : { "$kind" : "Property", "$Type" : "Edm.String" } }, "tea_busi.Worker" : { "$kind" : "EntityType", "$Key" : ["ID"], "ID" : { "$kind" : "Property", "$Type" : "Edm.String", "$Nullable" : false, "$MaxLength" : 4 }, "AGE" : { "$kind" : "Property", "$Type" : "Edm.Int16", "$Nullable" : false }, "EMPLOYEE_2_CONTAINED_S" : { "$ContainsTarget" : true, "$kind" : "NavigationProperty", "$Type" : "tea_busi.ContainedS" }, "EMPLOYEE_2_EQUIPM€NTS" : { "$kind" : "NavigationProperty", "$isCollection" : true, "$Type" : "tea_busi.EQUIPMENT", "$Nullable" : false }, "EMPLOYEE_2_TEAM" : { "$kind" : "NavigationProperty", "$Type" : "tea_busi.TEAM", "$Nullable" : false }, "SALÃRY" : { "$kind" : "Property", "$Type" : "tea_busi.ComplexType_Salary" } }, "$$Loop" : "$$Loop/", // some endless loop "$$Term" : "name.space.Term" // replacement for any reference to the term }, oContainerData = mScope["tea_busi.DefaultContainer"], aOverloadedAction = mScope["name.space.OverloadedAction"], mSupplierScope = { "$Version" : "4.0", "tea_busi_supplier.v0001." : { "$kind" : "Schema" }, "tea_busi_supplier.v0001.Supplier" : { "$kind" : "EntityType", "Supplier_Name" : { "$kind" : "Property", "$Type" : "Edm.String" } } }, oTeamData = mScope["tea_busi.TEAM"], oTeamLineItem = mScope.$Annotations["tea_busi.TEAM"]["@UI.LineItem"], oWorkerData = mScope["tea_busi.Worker"], mXServiceScope = { "$Version" : "4.0", "$Annotations" : {}, // simulate ODataMetaModel#_mergeAnnotations "$EntityContainer" : "tea_busi.v0001.DefaultContainer", "$Reference" : { // Note: Do not reference tea_busi_supplier directly from here! We want to test the // special case that it is only indirectly referenced. "../../../../default/iwbep/tea_busi_foo/0001/$metadata" : { "$Include" : [ "tea_busi_foo.v0001." ] }, "../../../../default/iwbep/tea_busi_product/0001/$metadata" : { "$Include" : [ "ignore.me.", "tea_busi_product.v0001." ] }, "/empty/$metadata" : { "$Include" : [ "empty.", "I.still.haven't.found.what.I'm.looking.for." ] } }, "tea_busi.v0001." : { "$kind" : "Schema" }, "tea_busi.v0001.DefaultContainer" : { "$kind" : "EntityContainer", "EQUIPM€NTS" : { "$kind" : "EntitySet", "$Type" : "tea_busi.v0001.EQUIPMENT" } }, "tea_busi.v0001.EQUIPMENT" : { "$kind" : "EntityType", "EQUIPMENT_2_PRODUCT" : { "$kind" : "NavigationProperty", "$Type" : "tea_busi_product.v0001.Product" } } }, aAllScopes = [ mMostlyEmptyScope, mProductScope, mScope, mSupplierScope, mXServiceScope ]; /** * Checks the "get*" and "request*" methods corresponding to the named "fetch*" method, * using the given arguments. * * @param {object} oTestContext * the QUnit "this" object * @param {object} assert * the QUnit "assert" object * @param {string} sMethodName * method name "fetch*" * @param {object[]} aArguments * method arguments * @param {boolean} [bThrow=false] * whether the "get*" method throws if the promise is not fulfilled * @returns {Promise} * the "request*" method's promise */ function checkGetAndRequest(oTestContext, assert, sMethodName, aArguments, bThrow) { var oExpectation, sGetMethodName = sMethodName.replace("fetch", "get"), oMetaModel = oTestContext.oMetaModel, oReason = new Error("rejected"), oRejectedPromise = Promise.reject(oReason), sRequestMethodName = sMethodName.replace("fetch", "request"), oResult = {}, oSyncPromise = SyncPromise.resolve(oRejectedPromise); // resolve... oExpectation = oTestContext.mock(oMetaModel).expects(sMethodName).exactly(4); oExpectation = oExpectation.withExactArgs.apply(oExpectation, aArguments); oExpectation.returns(SyncPromise.resolve(oResult)); // get: fulfilled assert.strictEqual(oMetaModel[sGetMethodName].apply(oMetaModel, aArguments), oResult); // reject... oExpectation.returns(oSyncPromise); oTestContext.mock(Promise).expects("resolve") .withExactArgs(sinon.match.same(oSyncPromise)) .returns(oRejectedPromise); // return any promise (this is not unwrapping!) // request (promise still pending!) assert.strictEqual(oMetaModel[sRequestMethodName].apply(oMetaModel, aArguments), oRejectedPromise); // get: pending if (bThrow) { assert.throws(function () { oMetaModel[sGetMethodName].apply(oMetaModel, aArguments); }, new Error("Result pending")); } else { assert.strictEqual(oMetaModel[sGetMethodName].apply(oMetaModel, aArguments), undefined, "pending"); } return oSyncPromise.catch(function () { // get: rejected if (bThrow) { assert.throws(function () { oMetaModel[sGetMethodName].apply(oMetaModel, aArguments); }, oReason); } else { assert.strictEqual(oMetaModel[sGetMethodName].apply(oMetaModel, aArguments), undefined, "rejected"); } }); } /** * Returns a clone, that is a deep copy, of the given object. * * @param {object} o * any serializable object * @returns {object} * a deep copy of <code>o</code> */ function clone(o) { return JSON.parse(JSON.stringify(o)); } /** * Runs the given test for each name/value pair in the given fixture. The name is interpreted * as a path "[<sContextPath>'|']<sMetaPath>" and cut accordingly. The test is called with * an almost resolved sPath (just '|' replaced by '/'). * * @param {object} mFixture * map<string, any> * @param {function} fnTest * function(string sPath, any vResult, string sContextPath, string sMetaPath) */ function forEach(mFixture, fnTest) { var sPath; for (sPath in mFixture) { var i = sPath.indexOf("|"), sContextPath = "", sMetaPath = sPath.slice(i + 1), vValue = mFixture[sPath]; if (i >= 0) { sContextPath = sPath.slice(0, i); sPath = sContextPath + "/" + sMetaPath; } fnTest(sPath, vValue, sContextPath, sMetaPath); } } //********************************************************************************************* QUnit.module("sap.ui.model.odata.v4.ODataMetaModel", { // remember copy to ensure test isolation mOriginalScopes : clone(aAllScopes), afterEach : function (assert) { assert.deepEqual(aAllScopes, this.mOriginalScopes, "metadata unchanged"); }, /* * Allow warnings if told to; always suppress debug messages. */ allowWarnings : function (assert, bWarn) { this.mock(jQuery.sap.log).expects("isLoggable").atLeast(1) .withExactArgs(sinon.match.number, sODataMetaModel) .callsFake(function (iLogLevel) { switch (iLogLevel) { case jQuery.sap.log.Level.DEBUG: return false; case jQuery.sap.log.Level.WARNING: return bWarn; default: return true; } }); }, beforeEach : function () { var oMetadataRequestor = { read : function () { throw new Error(); } }, sUrl = "/a/b/c/d/e/$metadata"; this.oLogMock = this.mock(jQuery.sap.log); this.oLogMock.expects("warning").never(); this.oLogMock.expects("error").never(); this.oMetaModel = new ODataMetaModel(oMetadataRequestor, sUrl); this.oMetaModelMock = this.mock(this.oMetaModel); this.oModel = { reportError : function () { throw new Error("Unsupported operation"); }, resolve : ODataModel.prototype.resolve }; }, /* * Expect the given debug message with the given path, but only if debug level is on. */ expectDebug : function (bDebug, sMessage, sPath) { this.oLogMock.expects("isLoggable") .withExactArgs(jQuery.sap.log.Level.DEBUG, sODataMetaModel).returns(bDebug); this.oLogMock.expects("debug").exactly(bDebug ? 1 : 0) .withExactArgs(sMessage, sPath, sODataMetaModel); }, /* * Expects "fetchEntityContainer" to be called at least once on the current meta model, * returning a clone of the given scope. * * @param {object} mScope */ expectFetchEntityContainer : function (mScope) { mScope = clone(mScope); this.oMetaModel.validate("n/a", mScope); // fill mSchema2MetadataUrl! this.oMetaModelMock.expects("fetchEntityContainer").atLeast(1) .returns(SyncPromise.resolve(mScope)); } }); //********************************************************************************************* QUnit.test("basics", function (assert) { var sAnnotationUri = "my/annotation.xml", aAnnotationUris = [ sAnnotationUri, "uri2.xml"], oModel = {}, oMetadataRequestor = this.oMetaModel.oRequestor, sUrl = "/~/$metadata", oMetaModel; // code under test assert.strictEqual(ODataMetaModel.prototype.$$valueAsPromise, true); // code under test oMetaModel = new ODataMetaModel(oMetadataRequestor, sUrl); assert.ok(oMetaModel instanceof MetaModel); assert.strictEqual(oMetaModel.aAnnotationUris, undefined); assert.ok(oMetaModel.hasOwnProperty("aAnnotationUris"), "own property aAnnotationUris"); assert.strictEqual(oMetaModel.oRequestor, oMetadataRequestor); assert.strictEqual(oMetaModel.sUrl, sUrl); assert.strictEqual(oMetaModel.getDefaultBindingMode(), BindingMode.OneTime); assert.strictEqual(oMetaModel.toString(), "sap.ui.model.odata.v4.ODataMetaModel: /~/$metadata"); // code under test oMetaModel.setDefaultBindingMode(BindingMode.OneWay); assert.strictEqual(oMetaModel.getDefaultBindingMode(), BindingMode.OneWay); // code under test oMetaModel = new ODataMetaModel(oMetadataRequestor, sUrl, aAnnotationUris); assert.strictEqual(oMetaModel.aAnnotationUris, aAnnotationUris, "arrays are passed"); // code under test oMetaModel = new ODataMetaModel(oMetadataRequestor, sUrl, sAnnotationUri); assert.deepEqual(oMetaModel.aAnnotationUris, [sAnnotationUri], "single annotation is wrapped"); // code under test oMetaModel = new ODataMetaModel(null, null, null, oModel); // code under test assert.strictEqual(oMetaModel.getAdapterFactoryModulePath(), "sap/ui/model/odata/v4/meta/ODataAdapterFactory"); }); //********************************************************************************************* QUnit.test("forbidden", function (assert) { assert.throws(function () { //TODO implement this.oMetaModel.bindTree(); }, new Error("Unsupported operation: v4.ODataMetaModel#bindTree")); assert.throws(function () { this.oMetaModel.getOriginalProperty(); }, new Error("Unsupported operation: v4.ODataMetaModel#getOriginalProperty")); assert.throws(function () { //TODO implement this.oMetaModel.isList(); }, new Error("Unsupported operation: v4.ODataMetaModel#isList")); assert.throws(function () { this.oMetaModel.refresh(); }, new Error("Unsupported operation: v4.ODataMetaModel#refresh")); assert.throws(function () { this.oMetaModel.setLegacySyntax(); // argument does not matter! }, new Error("Unsupported operation: v4.ODataMetaModel#setLegacySyntax")); assert.throws(function () { this.oMetaModel.setDefaultBindingMode(BindingMode.TwoWay); }); }); //********************************************************************************************* [ undefined, ["/my/annotation.xml"], ["/my/annotation.xml", "/another/annotation.xml"] ].forEach(function (aAnnotationURI) { var title = "fetchEntityContainer - " + JSON.stringify(aAnnotationURI); QUnit.test(title, function (assert) { var oRequestorMock = this.mock(this.oMetaModel.oRequestor), aReadResults, mRootScope = {}, oSyncPromise, that = this; function expectReads(bPrefetch) { oRequestorMock.expects("read") .withExactArgs(that.oMetaModel.sUrl, false, bPrefetch) .returns(Promise.resolve(mRootScope)); aReadResults = []; (aAnnotationURI || []).forEach(function (sAnnotationUrl) { var oAnnotationResult = {}; aReadResults.push(oAnnotationResult); oRequestorMock.expects("read") .withExactArgs(sAnnotationUrl, true, bPrefetch) .returns(Promise.resolve(oAnnotationResult)); }); } this.oMetaModel.aAnnotationUris = aAnnotationURI; this.oMetaModelMock.expects("_mergeAnnotations").never(); expectReads(true); // code under test assert.strictEqual(this.oMetaModel.fetchEntityContainer(true), null); // bPrefetch => no caching expectReads(true); // code under test assert.strictEqual(this.oMetaModel.fetchEntityContainer(true), null); // now test [bPrefetch=false] expectReads(); this.oMetaModelMock.expects("_mergeAnnotations") .withExactArgs(mRootScope, aReadResults); // code under test oSyncPromise = this.oMetaModel.fetchEntityContainer(); // pending assert.strictEqual(oSyncPromise.isPending(), true); // already caching assert.strictEqual(this.oMetaModel.fetchEntityContainer(), oSyncPromise); assert.strictEqual(this.oMetaModel.fetchEntityContainer(true), oSyncPromise, "now bPrefetch makes no difference"); return oSyncPromise.then(function (mRootScope0) { assert.strictEqual(mRootScope0, mRootScope); // still caching assert.strictEqual(that.oMetaModel.fetchEntityContainer(), oSyncPromise); }); }); }); //TODO later support "$Extends" : "<13.1.2 EntityContainer Extends>" //********************************************************************************************* QUnit.test("fetchEntityContainer: _mergeAnnotations fails", function (assert) { var oError = new Error(); this.mock(this.oMetaModel.oRequestor).expects("read") .withExactArgs(this.oMetaModel.sUrl, false, undefined) .returns(Promise.resolve({})); this.oMetaModelMock.expects("_mergeAnnotations").throws(oError); return this.oMetaModel.fetchEntityContainer().then(function () { assert.ok(false, "unexpected success"); }, function (oError0) { assert.strictEqual(oError0, oError); }); }); //********************************************************************************************* QUnit.test("getMetaContext", function (assert) { var oMetaContext; this.oMetaModelMock.expects("getMetaPath") .withExactArgs("/Foo/-1/bar") .returns("/Foo/bar"); // code under test oMetaContext = this.oMetaModel.getMetaContext("/Foo/-1/bar"); assert.strictEqual(oMetaContext.getModel(), this.oMetaModel); assert.strictEqual(oMetaContext.getPath(), "/Foo/bar"); }); //********************************************************************************************* QUnit.test("getMetaPath", function (assert) { var sMetaPath = {}, sPath = {}; this.mock(_Helper).expects("getMetaPath") .withExactArgs(sinon.match.same(sPath)).returns(sMetaPath); assert.strictEqual(this.oMetaModel.getMetaPath(sPath), sMetaPath); }); //********************************************************************************************* forEach({ // absolute path "/" : "/", "/foo/bar|/" : "/", // context is ignored // relative path "" : undefined, // w/o context --> important for MetaModel#createBindingContext etc. "|foo/bar" : undefined, // w/o context "/|" : "/", "/|foo/bar" : "/foo/bar", "/foo|bar" : "/foo/bar", "/foo/bar|" : "/foo/bar", "/foo/|bar" : "/foo/bar", // trailing slash is preserved "/foo/bar/" : "/foo/bar/", "/foo|bar/" : "/foo/bar/", // relative path that starts with a dot "/foo/bar|./" : "/foo/bar/", "/foo|./bar/" : "/foo/bar/", "/foo/|./bar/" : "/foo/bar/", // annotations "/foo|@bar" : "/foo@bar", "/foo/|@bar" : "/foo/@bar", "/foo|./@bar" : "/foo/@bar", "/foo/|./@bar" : "/foo/@bar", // technical properties "/foo|$kind" : "/foo/$kind", "/foo/|$kind" : "/foo/$kind", "/foo|./$kind" : "/foo/$kind", "/foo/|./$kind" : "/foo/$kind" }, function (sPath, sResolvedPath, sContextPath, sMetaPath) { QUnit.test("resolve: " + sContextPath + " > " + sMetaPath, function (assert) { var oContext = sContextPath && this.oMetaModel.getContext(sContextPath); assert.strictEqual(this.oMetaModel.resolve(sMetaPath, oContext), sResolvedPath); }); }); //TODO make sure that Context objects are only created for absolute paths?! //********************************************************************************************* [".bar", ".@bar", ".$kind"].forEach(function (sPath) { QUnit.test("resolve: unsupported relative path " + sPath, function (assert) { var oContext = this.oMetaModel.getContext("/foo"); assert.raises(function () { this.oMetaModel.resolve(sPath, oContext); }, new Error("Unsupported relative path: " + sPath)); }); }); //********************************************************************************************* QUnit.test("resolve: undefined", function (assert) { assert.strictEqual( this.oMetaModel.resolve(undefined, this.oMetaModel.getContext("/")), "/"); }); //********************************************************************************************* //TODO better map meta model path to pure JSON path (look up inside JsonModel)? // what about @sapui.name then, which requires a literal as expected result? // --> we could distinguish "/<path>" from "<literal>" forEach({ // "JSON" drill-down ---------------------------------------------------------------------- "/$EntityContainer" : "tea_busi.DefaultContainer", "/tea_busi./$kind" : "Schema", "/tea_busi.DefaultContainer/$kind" : "EntityContainer", // trailing slash: object vs. name -------------------------------------------------------- "/" : oContainerData, "/$EntityContainer/" : oContainerData, "/T€AMS/" : oTeamData, "/T€AMS/$Type/" : oTeamData, // scope lookup ("17.3 QualifiedName") ---------------------------------------------------- "/$EntityContainer/$kind" : "EntityContainer", "/$EntityContainer/T€AMS/$Type" : "tea_busi.TEAM", "/$EntityContainer/T€AMS/$Type/Team_Id" : oTeamData.Team_Id, // "17.3 QualifiedName", e.g. type cast --------------------------------------------------- "/tea_busi." : mScope["tea_busi."], // access to schema "/tea_busi.DefaultContainer/EMPLOYEES/tea_busi.Worker/AGE" : oWorkerData.AGE, // implicit $Type insertion --------------------------------------------------------------- "/T€AMS/Team_Id" : oTeamData.Team_Id, "/T€AMS/TEAM_2_EMPLOYEES" : oTeamData.TEAM_2_EMPLOYEES, "/T€AMS/TEAM_2_EMPLOYEES/AGE" : oWorkerData.AGE, // scope lookup, then implicit $Type insertion! "/$$Term/AGE" : oWorkerData.AGE, // "17.2 SimpleIdentifier": lookup inside current schema child ---------------------------- "/T€AMS" : oContainerData["T€AMS"], "/T€AMS/$NavigationPropertyBinding/TEAM_2_EMPLOYEES/" : oWorkerData, "/T€AMS/$NavigationPropertyBinding/TEAM_2_EMPLOYEES/$Type" : "tea_busi.Worker", "/T€AMS/$NavigationPropertyBinding/TEAM_2_EMPLOYEES/AGE" : oWorkerData.AGE, // operations ----------------------------------------------------------------------------- "/OverloadedAction" : oContainerData["OverloadedAction"], "/OverloadedAction/$Action" : "name.space.OverloadedAction", "/ChangeManagerOfTeam/" : oTeamData, //TODO mScope[mScope["..."][0].$ReturnType.$Type] is where the next OData simple identifier // would live in case of entity/complex type, but we would like to avoid warnings for // primitive types - how to tell the difference? // "/GetEmployeeMaxAge/" : "Edm.Int16", // Note: "value" is a symbolic name for the whole return type iff. it is primitive "/GetEmployeeMaxAge/value" : mScope["tea_busi.FuGetEmployeeMaxAge"][0].$ReturnType, "/GetEmployeeMaxAge/value/$Type" : "Edm.Int16", // path may continue! "/tea_busi.FuGetEmployeeMaxAge/value" : mScope["tea_busi.FuGetEmployeeMaxAge"][0].$ReturnType, "/name.space.DerivedPrimitiveFunction/value" //TODO merge facets of return type and type definition?! : mScope["name.space.DerivedPrimitiveFunction"][0].$ReturnType, "/ChangeManagerOfTeam/value" : oTeamData.value, // action overloads ----------------------------------------------------------------------- //TODO @$ui5.overload: support for split segments? etc. "/OverloadedAction/@$ui5.overload" : sinon.match.array.deepEquals([aOverloadedAction[2]]), "/OverloadedAction/@$ui5.overload/0" : aOverloadedAction[2], // Note: trailing slash does not make a difference in "JSON" drill-down "/OverloadedAction/@$ui5.overload/0/$ReturnType/" : aOverloadedAction[2].$ReturnType, "/OverloadedAction/@$ui5.overload/0/$ReturnType/$Type" : "tea_busi.ComplexType_Salary", "/OverloadedAction/" : mScope["tea_busi.ComplexType_Salary"], "/name.space.OverloadedAction" : aOverloadedAction, "/T€AMS/NotFound/name.space.OverloadedAction" : aOverloadedAction, "/name.space.OverloadedAction/1" : aOverloadedAction[1], "/OverloadedAction/$Action/1" : aOverloadedAction[1], "/OverloadedAction/@$ui5.overload/AMOUNT" : mScope["tea_busi.ComplexType_Salary"].AMOUNT, "/OverloadedAction/AMOUNT" : mScope["tea_busi.ComplexType_Salary"].AMOUNT, "/T€AMS/name.space.OverloadedAction/Team_Id" : oTeamData.Team_Id, "/T€AMS/name.space.OverloadedAction/@$ui5.overload" : sinon.match.array.deepEquals([aOverloadedAction[1]]), "/name.space.OverloadedAction/@$ui5.overload" : sinon.match.array.deepEquals([]), // only "Action" and "Function" is expected as $kind, but others are not filtered out! "/name.space.BrokenOverloads" : sinon.match.array.deepEquals(mScope["name.space.BrokenOverloads"]), // annotations ---------------------------------------------------------------------------- "/@DefaultContainer" : mScope.$Annotations["tea_busi.DefaultContainer"]["@DefaultContainer"], "/tea_busi.DefaultContainer@DefaultContainer" : mScope.$Annotations["tea_busi.DefaultContainer"]["@DefaultContainer"], "/tea_busi.DefaultContainer/@DefaultContainer" // w/o $Type, slash makes no difference! : mScope.$Annotations["tea_busi.DefaultContainer"]["@DefaultContainer"], "/$EntityContainer@DefaultContainer" // Note: we could change this : mScope.$Annotations["tea_busi.DefaultContainer"]["@DefaultContainer"], "/$EntityContainer/@DefaultContainer" // w/o $Type, slash makes no difference! : mScope.$Annotations["tea_busi.DefaultContainer"]["@DefaultContainer"], "/T€AMS/$Type/@UI.LineItem" : oTeamLineItem, "/T€AMS/@UI.LineItem" : oTeamLineItem, "/T€AMS/@UI.LineItem/0/Label" : oTeamLineItem[0].Label, "/T€AMS/@UI.LineItem/0/@UI.Importance" : oTeamLineItem[0]["@UI.Importance"], "/T€AMS@T€AMS" : mScope.$Annotations["tea_busi.DefaultContainer/T€AMS"]["@T€AMS"], "/T€AMS/@Common.Text" : mScope.$Annotations["tea_busi.TEAM"]["@Common.Text"], "/T€AMS/@[email protected]" : mScope.$Annotations["tea_busi.TEAM"]["@[email protected]"], "/T€AMS/[email protected]" : mScope.$Annotations["tea_busi.TEAM/Team_Id"]["@Common.Text"], "/T€AMS/[email protected]@UI.TextArrangement" : mScope.$Annotations["tea_busi.TEAM/Team_Id"]["@[email protected]"], "/tea_busi./@Schema" : mScope["tea_busi."]["@Schema"], // inline annotations "/ChangeManagerOfTeam/$Action/0/$ReturnType/@Common.Label" : "Hail to the Chief", "/T€AMS/TEAM_2_EMPLOYEES/[email protected]" : "None of my business", "/T€AMS/TEAM_2_EMPLOYEES/$ReferentialConstraint/[email protected]" : "Just a Gigolo", "/T€AMS/@UI.LineItem/0/[email protected]" : "Team ID's Label", "/T€AMS/@[email protected]" : "Best Badge Ever!", // annotation of annotation "/T€AMS/@UI.Badge/@Common.Label" : "Label inside", // annotation of record // "@" to access to all annotations, e.g. for iteration "/T€AMS@" : mScope.$Annotations["tea_busi.DefaultContainer/T€AMS"], "/T€AMS/@" : mScope.$Annotations["tea_busi.TEAM"], "/T€AMS/Team_Id@" : mScope.$Annotations["tea_busi.TEAM/Team_Id"], // "14.5.12 Expression edm:Path" // Note: see integration test "{field>Value/[email protected]}" "/T€AMS/@UI.LineItem/0/Value/[email protected]" : mScope.$Annotations["tea_busi.TEAM/Team_Id"]["@Common.Text"], "/T€AMS/@UI.LineItem/0/Value/$Path/@Common.Label" : mScope.$Annotations["name.space.Id"]["@Common.Label"], "/EMPLOYEES/@UI.LineItem/0/Value/[email protected]" : mScope.$Annotations["tea_busi.TEAM/Team_Id"]["@Common.Text"], // "14.5.2 Expression edm:AnnotationPath" "/EMPLOYEES/@UI.Facets/0/Target/$AnnotationPath/" : mScope.$Annotations["tea_busi.Worker"]["@UI.LineItem"], "/EMPLOYEES/@UI.Facets/1/Target/$AnnotationPath/" : mScope.$Annotations["tea_busi.Worker/EMPLOYEE_2_TEAM"]["@Common.Label"], "/EMPLOYEES/@UI.Facets/2/Target/$AnnotationPath/" : mScope.$Annotations["tea_busi.TEAM"]["@UI.LineItem"], "/EMPLOYEES/@UI.Facets/3/Target/$AnnotationPath/" : mScope.$Annotations["tea_busi.TEAM"]["@UI.LineItem"], // @sapui.name ---------------------------------------------------------------------------- "/@sapui.name" : "tea_busi.DefaultContainer", "/[email protected]" : "tea_busi.DefaultContainer", "/tea_busi.DefaultContainer/@sapui.name" : "tea_busi.DefaultContainer", // no $Type here! "/$EntityContainer/@sapui.name" : "tea_busi.DefaultContainer", "/T€[email protected]" : "T€AMS", "/T€AMS/@sapui.name" : "tea_busi.TEAM", "/T€AMS/[email protected]" : "Team_Id", "/T€AMS/[email protected]" : "TEAM_2_EMPLOYEES", "/T€AMS/$NavigationPropertyBinding/TEAM_2_EMPLOYEES/@sapui.name" : "tea_busi.Worker", "/T€AMS/$NavigationPropertyBinding/TEAM_2_EMPLOYEES/[email protected]" : "AGE", "/T€AMS@T€[email protected]" : "@T€AMS", "/T€AMS@/@T€[email protected]" : "@T€AMS", "/T€AMS@T€AMS/@sapui.name" : "@T€AMS", // no $Type inside @T€AMS, / makes no difference! "/T€AMS@/@T€AMS/@sapui.name" : "@T€AMS", // dito "/T€AMS/@UI.LineItem/0/@UI.Importance/@sapui.name" : "@UI.Importance", // in "JSON" mode "/T€AMS/Team_Id@/@[email protected]" : "@Common.Label" // avoid indirection here! }, function (sPath, vResult) { QUnit.test("fetchObject: " + sPath, function (assert) { var oSyncPromise; this.oMetaModelMock.expects("fetchEntityContainer") .returns(SyncPromise.resolve(mScope)); // code under test oSyncPromise = this.oMetaModel.fetchObject(sPath); assert.strictEqual(oSyncPromise.isFulfilled(), true); if (vResult && typeof vResult === "object" && "test" in vResult) { // Sinon.JS matcher assert.ok(vResult.test(oSyncPromise.getResult()), vResult); } else { assert.strictEqual(oSyncPromise.getResult(), vResult); } // self-guard to avoid that a complex right-hand side evaluates to undefined assert.notStrictEqual(vResult, undefined, "use this test for defined results only!"); }); }); //TODO annotations at enum member ".../<10.2.1 Member Name>@..." (Note: "<10.2.2 Member Value>" // might be a string! Avoid indirection!) //TODO special cases where inline and external targeting annotations need to be merged! //TODO support also external targeting from a different schema! //TODO MySchema.MyFunction/MyParameter --> requires search in array?! //TODO $count? //TODO "For annotations targeting a property of an entity type or complex type, the path // expression is evaluated starting at the outermost entity type or complex type named in the // Target of the enclosing edm:Annotations element, i.e. an empty path resolves to the // outermost type, and the first segment of a non-empty path MUST be a property or navigation // property of the outermost type, a type cast, or a term cast." --> consequences for us? //********************************************************************************************* [ // "JSON" drill-down ---------------------------------------------------------------------- "/$missing", "/tea_busi.DefaultContainer/$missing", "/tea_busi.DefaultContainer/missing", // "17.2 SimpleIdentifier" treated like any property "/tea_busi.FuGetEmployeeMaxAge/0/tea_busi.FuGetEmployeeMaxAge", // "0" switches to JSON "/tea_busi.TEAM/$Key/this.is.missing", "/tea_busi.Worker/missing", // entity container (see above) treated like any schema child // scope lookup ("17.3 QualifiedName") ---------------------------------------------------- "/$EntityContainer/$missing", "/$EntityContainer/missing", // implicit $Type insertion --------------------------------------------------------------- "/T€AMS/$Key", // avoid $Type insertion for following $ segments "/T€AMS/missing", "/T€AMS/$missing", // annotations ---------------------------------------------------------------------------- "/tea_busi.Worker@missing", "/tea_busi.Worker/@missing", // "@" to access to all annotations, e.g. for iteration "/tea_busi.Worker/@/@missing", // operations ----------------------------------------------------------------------------- "/VoidAction/" ].forEach(function (sPath) { QUnit.test("fetchObject: " + sPath + " --> undefined", function (assert) { var oSyncPromise; this.oMetaModelMock.expects("fetchEntityContainer") .returns(SyncPromise.resolve(mScope)); // code under test oSyncPromise = this.oMetaModel.fetchObject(sPath); assert.strictEqual(oSyncPromise.isFulfilled(), true); assert.strictEqual(oSyncPromise.getResult(), undefined); }); }); //********************************************************************************************* QUnit.test("fetchObject: Invalid relative path w/o context", function (assert) { var sMetaPath = "some/relative/path", oSyncPromise; this.oLogMock.expects("error").withExactArgs("Invalid relative path w/o context", sMetaPath, sODataMetaModel); // code under test oSyncPromise = this.oMetaModel.fetchObject(sMetaPath, null); assert.strictEqual(oSyncPromise.isFulfilled(), true); assert.strictEqual(oSyncPromise.getResult(), null); }); //********************************************************************************************* ["/empty.Container/@", "/T€AMS/Name@"].forEach(function (sPath) { QUnit.test("fetchObject returns {} (anonymous empty object): " + sPath, function (assert) { var oSyncPromise; this.oMetaModelMock.expects("fetchEntityContainer") .returns(SyncPromise.resolve(mScope)); // code under test oSyncPromise = this.oMetaModel.fetchObject(sPath); assert.strictEqual(oSyncPromise.isFulfilled(), true); assert.deepEqual(oSyncPromise.getResult(), {}); // strictEqual would not work! }); }); //********************************************************************************************* QUnit.test("fetchObject without $Annotations", function (assert) { var oSyncPromise; this.oMetaModelMock.expects("fetchEntityContainer") .returns(SyncPromise.resolve(mMostlyEmptyScope)); // code under test oSyncPromise = this.oMetaModel.fetchObject("/@DefaultContainer"); assert.strictEqual(oSyncPromise.isFulfilled(), true); assert.deepEqual(oSyncPromise.getResult(), undefined); // strictEqual would not work! }); //TODO if no annotations exist for an external target, avoid {} internally unless "@" is used? //********************************************************************************************* [false, true].forEach(function (bWarn) { forEach({ "/$$Loop/" : "Invalid recursion at /$$Loop", // Invalid segment (warning) ---------------------------------------------------------- "//$Foo" : "Invalid empty segment", "/tea_busi./$Annotations" : "Invalid segment: $Annotations", // entrance forbidden! // Unknown ... ------------------------------------------------------------------------ "/not.Found" : "Unknown qualified name not.Found", "/Me/not.Found" : "Unknown qualified name not.Found", // no "at /.../undefined"! "/not.Found@missing" : "Unknown qualified name not.Found", "/." : "Unknown child . of tea_busi.DefaultContainer", "/Foo" : "Unknown child Foo of tea_busi.DefaultContainer", "/$EntityContainer/$kind/" : "Unknown child EntityContainer" + " of tea_busi.DefaultContainer at /$EntityContainer/$kind", // implicit $Action, $Function, $Type insertion "/name.space.BadContainer/DanglingActionImport/" : "Unknown qualified name not.Found" + " at /name.space.BadContainer/DanglingActionImport/$Action", "/name.space.BadContainer/DanglingFunctionImport/" : "Unknown qualified name not.Found" + " at /name.space.BadContainer/DanglingFunctionImport/$Function", "/name.space.Broken/" : "Unknown qualified name not.Found at /name.space.Broken/$Type", "/name.space.BrokenFunction/" : "Unknown qualified name not.Found" + " at /name.space.BrokenFunction/0/$ReturnType/$Type", //TODO align with "/GetEmployeeMaxAge/" : "Edm.Int16" "/GetEmployeeMaxAge/@sapui.name" : "Unknown qualified name Edm.Int16" + " at /tea_busi.FuGetEmployeeMaxAge/0/$ReturnType/$Type", "/GetEmployeeMaxAge/value/@sapui.name" : "Unknown qualified name Edm.Int16" + " at /tea_busi.FuGetEmployeeMaxAge/0/$ReturnType/$Type", // implicit scope lookup "/name.space.Broken/$Type/" : "Unknown qualified name not.Found at /name.space.Broken/$Type", "/tea_busi.DefaultContainer/$kind/@sapui.name" : "Unknown child EntityContainer" + " of tea_busi.DefaultContainer at /tea_busi.DefaultContainer/$kind", // Unsupported path before @sapui.name ------------------------------------------------ "/[email protected]" : "Unsupported path before @sapui.name", "/tea_busi.FuGetEmployeeMaxAge/[email protected]" : "Unsupported path before @sapui.name", "/tea_busi.TEAM/$Key/not.Found/@sapui.name" : "Unsupported path before @sapui.name", "/GetEmployeeMaxAge/[email protected]" : "Unsupported path before @sapui.name", // Unsupported path after @sapui.name ------------------------------------------------- "/@sapui.name/foo" : "Unsupported path after @sapui.name", "/$EntityContainer/T€AMS/@sapui.name/foo" : "Unsupported path after @sapui.name", // Unsupported path after @@... ------------------------------------------------------- "/EMPLOYEES/@UI.Facets/1/Target/$AnnotationPath@@this.is.ignored/foo" : "Unsupported path after @@this.is.ignored", "/EMPLOYEES/@UI.Facets/1/Target/$AnnotationPath/@@this.is.ignored@foo" : "Unsupported path after @@this.is.ignored", "/EMPLOYEES/@UI.Facets/1/Target/$AnnotationPath@@[email protected]" : "Unsupported path after @@this.is.ignored", // ...is not a function but... -------------------------------------------------------- "/@@sap.ui.model.odata.v4.AnnotationHelper.invalid" : "sap.ui.model.odata.v4.AnnotationHelper.invalid is not a function but: undefined", "/@@sap.ui.model.odata.v4.AnnotationHelper" : "sap.ui.model.odata.v4.AnnotationHelper is not a function but: " + sap.ui.model.odata.v4.AnnotationHelper, // Unsupported overloads -------------------------------------------------------------- "/name.space.EmptyOverloads/" : "Unsupported overloads", "/name.space.OverloadedAction/" : "Unsupported overloads", "/name.space.OverloadedFunction/" : "Unsupported overloads" }, function (sPath, sWarning) { QUnit.test("fetchObject fails: " + sPath + ", warn = " + bWarn, function (assert) { var oSyncPromise; this.oMetaModelMock.expects("fetchEntityContainer") .returns(SyncPromise.resolve(mScope)); this.oLogMock.expects("isLoggable") .withExactArgs(jQuery.sap.log.Level.WARNING, sODataMetaModel).returns(bWarn); this.oLogMock.expects("warning").exactly(bWarn ? 1 : 0) .withExactArgs(sWarning, sPath, sODataMetaModel); // code under test oSyncPromise = this.oMetaModel.fetchObject(sPath); assert.strictEqual(oSyncPromise.isFulfilled(), true); assert.deepEqual(oSyncPromise.getResult(), undefined); }); }); }); //********************************************************************************************* [false, true].forEach(function (bDebug) { forEach({ // Invalid segment (debug) ------------------------------------------------------------ "/$Foo/@bar" : "Invalid segment: @bar", "/$Foo/$Bar" : "Invalid segment: $Bar", "/$Foo/$Bar/$Baz" : "Invalid segment: $Bar", "/$EntityContainer/T€AMS/Team_Id/$MaxLength/." : "Invalid segment: .", "/$EntityContainer/T€AMS/Team_Id/$Nullable/." : "Invalid segment: .", "/$EntityContainer/T€AMS/Team_Id/NotFound/Invalid" : "Invalid segment: Invalid" }, function (sPath, sMessage) { QUnit.test("fetchObject fails: " + sPath + ", debug = " + bDebug, function (assert) { var oSyncPromise; this.oMetaModelMock.expects("fetchEntityContainer") .returns(SyncPromise.resolve(mScope)); this.oLogMock.expects("isLoggable") .withExactArgs(jQuery.sap.log.Level.DEBUG, sODataMetaModel).returns(bDebug); this.oLogMock.expects("debug").exactly(bDebug ? 1 : 0) .withExactArgs(sMessage, sPath, sODataMetaModel); // code under test oSyncPromise = this.oMetaModel.fetchObject(sPath); assert.strictEqual(oSyncPromise.isFulfilled(), true); assert.deepEqual(oSyncPromise.getResult(), undefined); }); }); }); //********************************************************************************************* [ "/EMPLOYEES/@UI.Facets/1/Target/$AnnotationPath", "/EMPLOYEES/@UI.Facets/1/Target/$AnnotationPath/" ].forEach(function (sPath) { QUnit.test("fetchObject: " + sPath + "@@...isMultiple", function (assert) { var oContext, oInput, fnIsMultiple = this.mock(AnnotationHelper).expects("isMultiple"), oResult = {}, oSyncPromise; this.oMetaModelMock.expects("fetchEntityContainer").atLeast(1) // see oInput .returns(SyncPromise.resolve(mScope)); oInput = this.oMetaModel.getObject(sPath); fnIsMultiple .withExactArgs(oInput, sinon.match({ context : sinon.match.object, schemaChildName : "tea_busi.Worker" })).returns(oResult); // code under test oSyncPromise = this.oMetaModel.fetchObject(sPath + "@@sap.ui.model.odata.v4.AnnotationHelper.isMultiple"); assert.strictEqual(oSyncPromise.isFulfilled(), true); assert.strictEqual(oSyncPromise.getResult(), oResult); oContext = fnIsMultiple.args[0][1].context; assert.ok(oContext instanceof BaseContext); assert.strictEqual(oContext.getModel(), this.oMetaModel); assert.strictEqual(oContext.getPath(), sPath); assert.strictEqual(oContext.getObject(), oInput); }); }); //********************************************************************************************* (function () { var sPath, sPathPrefix, mPathPrefix2SchemaChildName = { "/EMPLOYEES/@UI.Facets/1/Target/$AnnotationPath" : "tea_busi.Worker", "/T€AMS/@UI.LineItem/0/Value/[email protected]" : "tea_busi.TEAM", "/T€AMS/@UI.LineItem/0/Value/$Path/@Common.Label" : "name.space.Id" }, sSchemaChildName; for (sPathPrefix in mPathPrefix2SchemaChildName) { sPath = sPathPrefix + "@@.computedAnnotation"; sSchemaChildName = mPathPrefix2SchemaChildName[sPathPrefix]; QUnit.test("fetchObject: " + sPath, function (assert) { var fnComputedAnnotation, oContext, oInput, oResult = {}, oScope = { computedAnnotation : function () {} }, oSyncPromise; this.oMetaModelMock.expects("fetchEntityContainer").atLeast(1) // see oInput .returns(SyncPromise.resolve(mScope)); oInput = this.oMetaModel.getObject(sPathPrefix); fnComputedAnnotation = this.mock(oScope).expects("computedAnnotation"); fnComputedAnnotation .withExactArgs(oInput, sinon.match({ context : sinon.match.object, schemaChildName : sSchemaChildName })).returns(oResult); // code under test oSyncPromise = this.oMetaModel.fetchObject(sPath, null, {scope : oScope}); assert.strictEqual(oSyncPromise.isFulfilled(), true); assert.strictEqual(oSyncPromise.getResult(), oResult); oContext = fnComputedAnnotation.args[0][1].context; assert.ok(oContext instanceof BaseContext); assert.strictEqual(oContext.getModel(), this.oMetaModel); assert.strictEqual(oContext.getPath(), sPathPrefix); assert.strictEqual(oContext.getObject(), oInput); }); } }()); //********************************************************************************************* [false, true].forEach(function (bWarn) { QUnit.test("fetchObject: " + "...@@... throws", function (assert) { var oError = new Error("This call failed intentionally"), sPath = "/@@sap.ui.model.odata.v4.AnnotationHelper.isMultiple", oSyncPromise; this.oMetaModelMock.expects("fetchEntityContainer") .returns(SyncPromise.resolve(mScope)); this.mock(AnnotationHelper).expects("isMultiple") .throws(oError); this.oLogMock.expects("isLoggable") .withExactArgs(jQuery.sap.log.Level.WARNING, sODataMetaModel).returns(bWarn); this.oLogMock.expects("warning").exactly(bWarn ? 1 : 0).withExactArgs( "Error calling sap.ui.model.odata.v4.AnnotationHelper.isMultiple: " + oError, sPath, sODataMetaModel); // code under test oSyncPromise = this.oMetaModel.fetchObject(sPath); assert.strictEqual(oSyncPromise.isFulfilled(), true); assert.strictEqual(oSyncPromise.getResult(), undefined); }); }); //********************************************************************************************* [false, true].forEach(function (bDebug) { QUnit.test("fetchObject: cross-service reference, bDebug = " + bDebug, function (assert) { var mClonedProductScope = clone(mProductScope), aPromises = [], oRequestorMock = this.mock(this.oMetaModel.oRequestor), that = this; /* * Expect the given debug message with the given path. */ function expectDebug(sMessage, sPath) { that.expectDebug(bDebug, sMessage, sPath); } /* * Code under test: ODataMetaModel#fetchObject with the given path should yield the * given expected result. */ function codeUnderTest(sPath, vExpectedResult) { aPromises.push(that.oMetaModel.fetchObject(sPath).then(function (vResult) { assert.strictEqual(vResult, vExpectedResult); })); } this.expectFetchEntityContainer(mXServiceScope); oRequestorMock.expects("read") .withExactArgs("/a/default/iwbep/tea_busi_product/0001/$metadata") .returns(Promise.resolve(mClonedProductScope)); oRequestorMock.expects("read") .withExactArgs("/a/default/iwbep/tea_busi_supplier/0001/$metadata") .returns(Promise.resolve(mSupplierScope)); oRequestorMock.expects("read") .withExactArgs("/empty/$metadata") .returns(Promise.resolve(mMostlyEmptyScope)); expectDebug("Namespace tea_busi_product.v0001. found in $Include" + " of /a/default/iwbep/tea_busi_product/0001/$metadata" + " at /tea_busi.v0001.EQUIPMENT/EQUIPMENT_2_PRODUCT/$Type", "/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/Name"); expectDebug("Reading /a/default/iwbep/tea_busi_product/0001/$metadata" + " at /tea_busi.v0001.EQUIPMENT/EQUIPMENT_2_PRODUCT/$Type", "/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/Name"); expectDebug("Waiting for tea_busi_product.v0001." + " at /tea_busi.v0001.EQUIPMENT/EQUIPMENT_2_PRODUCT/$Type", "/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/Name"); codeUnderTest("/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/Name", mClonedProductScope["tea_busi_product.v0001.Product"].Name); expectDebug("Waiting for tea_busi_product.v0001." + " at /tea_busi.v0001.EQUIPMENT/EQUIPMENT_2_PRODUCT/$Type", "/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_CATEGORY/CategoryName"); codeUnderTest("/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_CATEGORY/CategoryName", mClonedProductScope["tea_busi_product.v0001.Category"].CategoryName); expectDebug("Waiting for tea_busi_product.v0001.", "/tea_busi_product.v0001.Category/CategoryName"); codeUnderTest("/tea_busi_product.v0001.Category/CategoryName", mClonedProductScope["tea_busi_product.v0001.Category"].CategoryName); expectDebug("Waiting for tea_busi_product.v0001.", "/tea_busi_product.v0001.Category/[email protected]"); codeUnderTest("/tea_busi_product.v0001.Category/[email protected]", "CategoryName from tea_busi_product.v0001."); expectDebug("Waiting for tea_busi_product.v0001." + " at /tea_busi.v0001.EQUIPMENT/EQUIPMENT_2_PRODUCT/$Type", "/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_SUPPLIER/Supplier_Name"); codeUnderTest("/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_SUPPLIER/Supplier_Name", mSupplierScope["tea_busi_supplier.v0001.Supplier"].Supplier_Name); expectDebug("Namespace empty. found in $Include of /empty/$metadata", "/empty.DefaultContainer"); expectDebug("Reading /empty/$metadata", "/empty.DefaultContainer"); expectDebug("Waiting for empty.", "/empty.DefaultContainer"); codeUnderTest("/empty.DefaultContainer", mMostlyEmptyScope["empty.DefaultContainer"]); // Note: these are logged asynchronously! expectDebug("Including tea_busi_product.v0001." + " from /a/default/iwbep/tea_busi_product/0001/$metadata" + " at /tea_busi.v0001.EQUIPMENT/EQUIPMENT_2_PRODUCT/$Type", "/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/Name"); expectDebug("Including empty. from /empty/$metadata", "/empty.DefaultContainer"); expectDebug("Namespace tea_busi_supplier.v0001. found in $Include" + " of /a/default/iwbep/tea_busi_supplier/0001/$metadata" + " at /tea_busi_product.v0001.Product/PRODUCT_2_SUPPLIER/$Type", "/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_SUPPLIER/Supplier_Name"); expectDebug("Reading /a/default/iwbep/tea_busi_supplier/0001/$metadata" + " at /tea_busi_product.v0001.Product/PRODUCT_2_SUPPLIER/$Type", "/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_SUPPLIER/Supplier_Name"); expectDebug("Waiting for tea_busi_supplier.v0001." + " at /tea_busi_product.v0001.Product/PRODUCT_2_SUPPLIER/$Type", "/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_SUPPLIER/Supplier_Name"); expectDebug("Including tea_busi_supplier.v0001." + " from /a/default/iwbep/tea_busi_supplier/0001/$metadata" + " at /tea_busi_product.v0001.Product/PRODUCT_2_SUPPLIER/$Type", "/EQUIPM€NTS/EQUIPMENT_2_PRODUCT/PRODUCT_2_SUPPLIER/Supplier_Name"); return Promise.all(aPromises); }); }); //TODO Decision: It is an error if a namespace is referenced multiple times with different URIs. // This should be checked even when load-on-demand is used. // (It should not even be included multiple times with the same URI!) //TODO Check that no namespace is included which is already present! //TODO API to load "transitive closure" //TODO support for sync. XML Templating //********************************************************************************************* [false, true].forEach(function (bWarn) { var sTitle = "fetchObject: missing cross-service reference, bWarn = " + bWarn; QUnit.test(sTitle, function (assert) { var sPath = "/not.found", oSyncPromise; this.expectFetchEntityContainer(mMostlyEmptyScope); this.oLogMock.expects("isLoggable") .withExactArgs(jQuery.sap.log.Level.WARNING, sODataMetaModel).returns(bWarn); this.oLogMock.expects("warning").exactly(bWarn ? 1 : 0) .withExactArgs("Unknown qualified name not.found", sPath, sODataMetaModel); // code under test oSyncPromise = this.oMetaModel.fetchObject(sPath); assert.strictEqual(oSyncPromise.isFulfilled(), true); assert.deepEqual(oSyncPromise.getResult(), undefined); }); }); //********************************************************************************************* [false, true].forEach(function (bWarn) { var sTitle = "fetchObject: referenced metadata does not contain included schema, bWarn = " + bWarn; QUnit.test(sTitle, function (assert) { var sSchemaName = "I.still.haven't.found.what.I'm.looking.for.", sQualifiedName = sSchemaName + "Child", sPath = "/" + sQualifiedName; this.expectFetchEntityContainer(mXServiceScope); this.mock(this.oMetaModel.oRequestor).expects("read") .withExactArgs("/empty/$metadata") .returns(Promise.resolve(mMostlyEmptyScope)); this.allowWarnings(assert, bWarn); this.oLogMock.expects("warning").exactly(bWarn ? 1 : 0) .withExactArgs("/empty/$metadata does not contain " + sSchemaName, sPath, sODataMetaModel); this.oLogMock.expects("warning").exactly(bWarn ? 1 : 0) .withExactArgs("Unknown qualified name " + sQualifiedName, sPath, sODataMetaModel); // code under test return this.oMetaModel.fetchObject(sPath).then(function (vResult) { assert.deepEqual(vResult, undefined); }); }); }); //********************************************************************************************* [false, true].forEach(function (bWarn) { var sTitle = "fetchObject: cross-service reference, respect $Include; bWarn = " + bWarn; QUnit.test(sTitle, function (assert) { var mScope0 = { "$Version" : "4.0", "$Reference" : { "../../../../default/iwbep/tea_busi_product/0001/$metadata" : { "$Include" : [ "not.found.", "tea_busi_product.v0001.", "tea_busi_supplier.v0001." ] } } }, mReferencedScope = { "$Version" : "4.0", "must.not.be.included." : { "$kind" : "Schema" }, "tea_busi_product.v0001." : { "$kind" : "Schema" }, "tea_busi_supplier.v0001." : { "$kind" : "Schema" } }, oRequestorMock = this.mock(this.oMetaModel.oRequestor), that = this; this.expectFetchEntityContainer(mScope0); oRequestorMock.expects("read") .withExactArgs("/a/default/iwbep/tea_busi_product/0001/$metadata") .returns(Promise.resolve(mReferencedScope)); this.allowWarnings(assert, bWarn); // code under test return this.oMetaModel.fetchObject("/tea_busi_product.v0001.").then(function (vResult) { var oSyncPromise; assert.strictEqual(vResult, mReferencedScope["tea_busi_product.v0001."]); assert.ok(that.oMetaModel.mSchema2MetadataUrl["tea_busi_product.v0001."] ["/a/default/iwbep/tea_busi_product/0001/$metadata"], "document marked as read"); that.oLogMock.expects("warning").exactly(bWarn ? 1 : 0) .withExactArgs("Unknown qualified name must.not.be.included.", "/must.not.be.included.", sODataMetaModel); assert.strictEqual(that.oMetaModel.getObject("/must.not.be.included."), undefined, "must not include schemata which are not mentioned in edmx:Include"); assert.strictEqual(that.oMetaModel.getObject("/tea_busi_supplier.v0001."), mReferencedScope["tea_busi_supplier.v0001."]); // now check that "not.found." does not trigger another read(), // does finish synchronously and logs a warning that.oLogMock.expects("warning").exactly(bWarn ? 1 : 0) .withExactArgs("/a/default/iwbep/tea_busi_product/0001/$metadata" + " does not contain not.found.", "/not.found.", sODataMetaModel); that.oLogMock.expects("warning").exactly(bWarn ? 1 : 0) .withExactArgs("Unknown qualified name not.found.", "/not.found.", sODataMetaModel); // code under test oSyncPromise = that.oMetaModel.fetchObject("/not.found."); assert.strictEqual(oSyncPromise.isFulfilled(), true); assert.strictEqual(oSyncPromise.getResult(), undefined); }); }); }); //********************************************************************************************* QUnit.test("fetchObject: cross-service reference - validation failure", function (assert) { var oError = new Error(), mReferencedScope = {}, sUrl = "/a/default/iwbep/tea_busi_product/0001/$metadata"; this.expectFetchEntityContainer(mXServiceScope); this.mock(this.oMetaModel.oRequestor).expects("read").withExactArgs(sUrl) .returns(Promise.resolve(mReferencedScope)); this.oMetaModelMock.expects("validate") .withExactArgs(sUrl, mReferencedScope) .throws(oError); return this.oMetaModel.fetchObject("/tea_busi_product.v0001.Product").then(function () { assert.ok(false); }, function (oError0) { assert.strictEqual(oError0, oError); }); }); //********************************************************************************************* QUnit.test("fetchObject: cross-service reference - document loaded from different URI", function (assert) { var sMessage = "A schema cannot span more than one document: schema is referenced by" + " following URLs: /a/default/iwbep/tea_busi_product/0001/$metadata," + " /second/reference", sSchema = "tea_busi_product.v0001."; this.expectFetchEntityContainer(mXServiceScope); this.oLogMock.expects("error") .withExactArgs(sMessage, sSchema, sODataMetaModel); // simulate 2 references for a schema this.oMetaModel.mSchema2MetadataUrl["tea_busi_product.v0001."]["/second/reference"] = false; // code under test return this.oMetaModel.fetchObject("/tea_busi_product.v0001.Product").then(function () { assert.ok(false); }, function (oError0) { assert.strictEqual(oError0.message, sSchema + ": " + sMessage); }); }); //********************************************************************************************* QUnit.test("fetchObject: cross-service reference - duplicate include", function (assert) { var oRequestorMock = this.mock(this.oMetaModel.oRequestor), // root service includes both A and B, A also includes B mScope0 = { "$Version" : "4.0", "$Reference" : { "/A/$metadata" : { "$Include" : [ "A." ] }, "/B/$metadata" : { "$Include" : [ "B." ] } } }, mScopeA = { "$Version" : "4.0", "$Reference" : { "/B/$metadata" : { "$Include" : [ "B.", "B.B." // includes additional namespace from already read document ] } }, "A." : { "$kind" : "Schema" } }, mScopeB = { "$Version" : "4.0", "B." : { "$kind" : "Schema" }, "B.B." : { "$kind" : "Schema" } }, that = this; this.expectFetchEntityContainer(mScope0); oRequestorMock.expects("read").withExactArgs("/A/$metadata") .returns(Promise.resolve(mScopeA)); oRequestorMock.expects("read").withExactArgs("/B/$metadata") .returns(Promise.resolve(mScopeB)); return this.oMetaModel.fetchObject("/B.") .then(function (vResult) { assert.strictEqual(vResult, mScopeB["B."]); // code under test - we must not overwrite our "$ui5.read" promise! return that.oMetaModel.fetchObject("/A.") .then(function (vResult) { assert.strictEqual(vResult, mScopeA["A."]); // Note: must not trigger read() again! return that.oMetaModel.fetchObject("/B.B.") .then(function (vResult) { assert.strictEqual(vResult, mScopeB["B.B."]); }); }); }); }); //TODO Implement consistency checks that the same namespace is always included from the same // reference URI, no matter which referencing document. //********************************************************************************************* [undefined, false, true].forEach(function (bSupportReferences) { var sTitle = "fetchObject: cross-service reference - supportReferences: " + bSupportReferences; QUnit.test(sTitle, function (assert) { var mClonedProductScope = clone(mProductScope), oModel = new ODataModel({ // code under test serviceUrl : "/a/b/c/d/e/", supportReferences : bSupportReferences, synchronizationMode : "None" }), sPath = "/tea_busi_product.v0001.Product", sUrl = "/a/default/iwbep/tea_busi_product/0001/$metadata"; this.oMetaModel = oModel.getMetaModel(); this.oMetaModelMock = this.mock(this.oMetaModel); bSupportReferences = bSupportReferences !== false; // default is true! assert.strictEqual(this.oMetaModel.bSupportReferences, bSupportReferences); this.expectFetchEntityContainer(mXServiceScope); this.mock(this.oMetaModel.oRequestor).expects("read") .exactly(bSupportReferences ? 1 : 0) .withExactArgs(sUrl) .returns(Promise.resolve(mClonedProductScope)); this.allowWarnings(assert, true); this.oLogMock.expects("warning").exactly(bSupportReferences ? 0 : 1) .withExactArgs("Unknown qualified name " + sPath.slice(1), sPath, sODataMetaModel); // code under test return this.oMetaModel.fetchObject(sPath).then(function (vResult) { assert.strictEqual(vResult, bSupportReferences ? mClonedProductScope["tea_busi_product.v0001.Product"] : undefined); }); }); }); //********************************************************************************************* QUnit.test("getObject, requestObject", function (assert) { return checkGetAndRequest(this, assert, "fetchObject", ["sPath", {/*oContext*/}]); }); //********************************************************************************************* [{ $Type : "Edm.Boolean" },{ $Type : "Edm.Byte" }, { $Type : "Edm.Date" }, { $Type : "Edm.DateTimeOffset" },{ $Precision : 7, $Type : "Edm.DateTimeOffset", __constraints : {precision : 7} }, { $Type : "Edm.Decimal" }, { $Precision : 20, $Scale : 5, $Type : "Edm.Decimal", __constraints : {maximum : "100.00", maximumExclusive : true, minimum : "0.00", precision : 20, scale : 5} }, { $Precision : 20, $Scale : "variable", $Type : "Edm.Decimal", __constraints : {precision : 20, scale : Infinity} }, { $Type : "Edm.Double" }, { $Type : "Edm.Guid" }, { $Type : "Edm.Int16" }, { $Type : "Edm.Int32" }, { $Type : "Edm.Int64" }, { $Type : "Edm.SByte" }, { $Type : "Edm.Single" }, { $Type : "Edm.Stream" }, { $Type : "Edm.String" }, { $MaxLength : 255, $Type : "Edm.String", __constraints : {maxLength : 255} }, { $Type : "Edm.String", __constraints : {isDigitSequence : true} }, { $Type : "Edm.TimeOfDay" }, { $Precision : 3, $Type : "Edm.TimeOfDay", __constraints : {precision : 3} }].forEach(function (oProperty0) { // Note: take care not to modify oProperty0, clone it first! [false, true].forEach(function (bNullable) { // Note: JSON.parse(JSON.stringify(...)) cannot clone Infinity! var oProperty = jQuery.extend(true, {}, oProperty0), oConstraints = oProperty.__constraints; delete oProperty.__constraints; if (!bNullable) { oProperty.$Nullable = false; oConstraints = oConstraints || {}; oConstraints.nullable = false; } QUnit.test("fetchUI5Type: " + JSON.stringify(oProperty), function (assert) { // Note: just spy on fetchModule() to make sure that the real types are used // which check correctness of constraints var fnFetchModuleSpy = this.spy(this.oMetaModel, "fetchModule"), sPath = "/EMPLOYEES/0/ENTRYDATE", oMetaContext = this.oMetaModel.getMetaContext(sPath), that = this; this.oMetaModelMock.expects("fetchObject").twice() .withExactArgs(undefined, oMetaContext) .returns(SyncPromise.resolve(oProperty)); if (oProperty.$Type === "Edm.String") { // simulate annotation for strings this.oMetaModelMock.expects("fetchObject") .withExactArgs("@com.sap.vocabularies.Common.v1.IsDigitSequence", oMetaContext) .returns( SyncPromise.resolve(oConstraints && oConstraints.isDigitSequence)); } else if (oProperty.$Type === "Edm.Decimal") { // simulate annotation for decimals this.oMetaModelMock.expects("fetchObject") .withExactArgs("@Org.OData.Validation.V1.Minimum/$Decimal", oMetaContext) .returns( SyncPromise.resolve(oConstraints && oConstraints.minimum)); this.oMetaModelMock.expects("fetchObject") .withExactArgs( "@Org.OData.Validation.V1.Minimum@Org.OData.Validation.V1.Exclusive", oMetaContext) .returns( SyncPromise.resolve(oConstraints && oConstraints.minimumExlusive)); this.oMetaModelMock.expects("fetchObject") .withExactArgs("@Org.OData.Validation.V1.Maximum/$Decimal", oMetaContext) .returns( SyncPromise.resolve(oConstraints && oConstraints.maximum)); this.oMetaModelMock.expects("fetchObject") .withExactArgs( "@Org.OData.Validation.V1.Maximum@Org.OData.Validation.V1.Exclusive", oMetaContext) .returns( SyncPromise.resolve(oConstraints && oConstraints.maximumExclusive)); } // code under test return this.oMetaModel.fetchUI5Type(sPath).then(function (oType) { var sExpectedTypeName = "sap.ui.model.odata.type." + oProperty.$Type.slice(4)/*cut off "Edm."*/; assert.strictEqual(fnFetchModuleSpy.callCount, 1); assert.ok(fnFetchModuleSpy.calledOn(that.oMetaModel)); assert.ok(fnFetchModuleSpy.calledWithExactly(sExpectedTypeName), fnFetchModuleSpy.printf("%C")); assert.strictEqual(oType.getName(), sExpectedTypeName); assert.deepEqual(oType.oConstraints, oConstraints); assert.strictEqual(that.oMetaModel.getUI5Type(sPath), oType, "cached"); }); }); }); }); //TODO later: support for facet DefaultValue? //********************************************************************************************* QUnit.test("fetchUI5Type: $count", function (assert) { var sPath = "/T€AMS/$count", oType; // code under test oType = this.oMetaModel.fetchUI5Type(sPath).getResult(); assert.strictEqual(oType.getName(), "sap.ui.model.odata.type.Int64"); assert.strictEqual(this.oMetaModel.getUI5Type(sPath), oType, "cached"); }); //********************************************************************************************* QUnit.test("fetchUI5Type: collection", function (assert) { var sPath = "/EMPLOYEES/0/foo", that = this; this.oMetaModelMock.expects("fetchObject").thrice() .withExactArgs(undefined, this.oMetaModel.getMetaContext(sPath)) .returns(SyncPromise.resolve({ $isCollection : true, $Nullable : false, // must not be turned into a constraint for Raw! $Type : "Edm.String" })); this.oLogMock.expects("warning").withExactArgs( "Unsupported collection type, using sap.ui.model.odata.type.Raw", sPath, sODataMetaModel); return Promise.all([ // code under test this.oMetaModel.fetchUI5Type(sPath).then(function (oType) { assert.strictEqual(oType.getName(), "sap.ui.model.odata.type.Raw"); assert.strictEqual(that.oMetaModel.getUI5Type(sPath), oType, "cached"); }), // code under test this.oMetaModel.fetchUI5Type(sPath).then(function (oType) { assert.strictEqual(oType.getName(), "sap.ui.model.odata.type.Raw"); }) ]); }); //********************************************************************************************* //TODO make Edm.Duration work with OData V4 ["acme.Type", "Edm.Duration", "Edm.GeographyPoint"].forEach(function (sQualifiedName) { QUnit.test("fetchUI5Type: unsupported type " + sQualifiedName, function (assert) { var sPath = "/EMPLOYEES/0/foo", that = this; this.oMetaModelMock.expects("fetchObject").twice() .withExactArgs(undefined, this.oMetaModel.getMetaContext(sPath)) .returns(SyncPromise.resolve({ $Nullable : false, // must not be turned into a constraint for Raw! $Type : sQualifiedName })); this.oLogMock.expects("warning").withExactArgs( "Unsupported type '" + sQualifiedName + "', using sap.ui.model.odata.type.Raw", sPath, sODataMetaModel); // code under test return this.oMetaModel.fetchUI5Type(sPath).then(function (oType) { assert.strictEqual(oType.getName(), "sap.ui.model.odata.type.Raw"); assert.strictEqual(that.oMetaModel.getUI5Type(sPath), oType, "cached"); }); }); }); //********************************************************************************************* QUnit.test("fetchUI5Type: invalid path", function (assert) { var sPath = "/EMPLOYEES/0/invalid", that = this; this.oMetaModelMock.expects("fetchObject").twice() .withExactArgs(undefined, this.oMetaModel.getMetaContext(sPath)) .returns(SyncPromise.resolve(/*no property metadata for path*/)); this.oLogMock.expects("warning").twice().withExactArgs( "No metadata for path '" + sPath + "', using sap.ui.model.odata.type.Raw", undefined, sODataMetaModel); // code under test return this.oMetaModel.fetchUI5Type(sPath).then(function (oType) { assert.strictEqual(oType.getName(), "sap.ui.model.odata.type.Raw"); // code under test assert.strictEqual(that.oMetaModel.getUI5Type(sPath), oType, "Type is cached"); }); }); //********************************************************************************************* QUnit.test("getUI5Type, requestUI5Type", function (assert) { return checkGetAndRequest(this, assert, "fetchUI5Type", ["sPath"], true); }); //********************************************************************************************* [{ // simple entity from a set dataPath : "/TEAMS/0", canonicalUrl : "/TEAMS(~1)", requests : [{ entityType : "tea_busi.TEAM", predicate : "(~1)" }] }, { // simple entity in transient context dataPath : "/TEAMS/-1", canonicalUrl : "/TEAMS(~1)", requests : [{ entityType : "tea_busi.TEAM", // TODO a transient entity does not necessarily have all key properties, but this is // required to create a dependent cache predicate : "(~1)" }] }, { // simple entity by key predicate dataPath : "/TEAMS('4%3D2')", canonicalUrl : "/TEAMS('4%3D2')", requests : [] }, { // simple singleton dataPath : "/Me", canonicalUrl : "/Me", requests : [] }, { // navigation to root entity dataPath : "/TEAMS/0/TEAM_2_EMPLOYEES/1", canonicalUrl : "/EMPLOYEES(~1)", requests : [{ entityType : "tea_busi.Worker", predicate : "(~1)" }] }, { // navigation to root entity dataPath : "/TEAMS('42')/TEAM_2_EMPLOYEES/1", canonicalUrl : "/EMPLOYEES(~1)", requests : [{ entityType : "tea_busi.Worker", predicate : "(~1)" }] }, { // navigation to root entity with key predicate dataPath : "/TEAMS('42')/TEAM_2_EMPLOYEES('23')", canonicalUrl : "/EMPLOYEES('23')", requests : [] }, { // multiple navigation to root entity dataPath : "/TEAMS/0/TEAM_2_EMPLOYEES/1/EMPLOYEE_2_TEAM", canonicalUrl : "/T%E2%82%ACAMS(~1)", requests : [{ entityType : "tea_busi.TEAM", predicate : "(~1)" }] }, { // navigation from entity set to single contained entity dataPath : "/TEAMS/0/TEAM_2_CONTAINED_S", canonicalUrl : "/TEAMS(~1)/TEAM_2_CONTAINED_S", requests : [{ entityType : "tea_busi.TEAM", path : "/TEAMS/0", predicate : "(~1)" }] }, { // navigation from singleton to single contained entity dataPath : "/Me/EMPLOYEE_2_CONTAINED_S", canonicalUrl : "/Me/EMPLOYEE_2_CONTAINED_S", requests : [] }, { // navigation to contained entity within a collection dataPath : "/TEAMS/0/TEAM_2_CONTAINED_C/1", canonicalUrl : "/TEAMS(~1)/TEAM_2_CONTAINED_C(~2)", requests : [{ entityType : "tea_busi.TEAM", path : "/TEAMS/0", predicate : "(~1)" }, { entityType : "tea_busi.ContainedC", path : "/TEAMS/0/TEAM_2_CONTAINED_C/1", predicate : "(~2)" }] }, { // navigation to contained entity with a key predicate dataPath : "/TEAMS('42')/TEAM_2_CONTAINED_C('foo')", canonicalUrl : "/TEAMS('42')/TEAM_2_CONTAINED_C('foo')", requests : [] }, { // navigation from contained entity to contained entity dataPath : "/TEAMS/0/TEAM_2_CONTAINED_S/S_2_C/1", canonicalUrl : "/TEAMS(~1)/TEAM_2_CONTAINED_S/S_2_C(~2)", requests : [{ entityType : "tea_busi.TEAM", path : "/TEAMS/0", predicate : "(~1)" }, { entityType : "tea_busi.ContainedC", path : "/TEAMS/0/TEAM_2_CONTAINED_S/S_2_C/1", predicate : "(~2)" }] }, { // navigation from contained to root entity // must be appended nevertheless since we only have a type, but no set dataPath : "/TEAMS/0/TEAM_2_CONTAINED_C/5/C_2_EMPLOYEE", canonicalUrl : "/TEAMS(~1)/TEAM_2_CONTAINED_C(~2)/C_2_EMPLOYEE", requests : [{ entityType : "tea_busi.TEAM", path : "/TEAMS/0", predicate : "(~1)" }, { entityType : "tea_busi.ContainedC", path : "/TEAMS/0/TEAM_2_CONTAINED_C/5", predicate : "(~2)" }] }, { // navigation from entity w/ key predicate to contained to root entity dataPath : "/TEAMS('42')/TEAM_2_CONTAINED_C/5/C_2_EMPLOYEE", canonicalUrl : "/TEAMS('42')/TEAM_2_CONTAINED_C(~1)/C_2_EMPLOYEE", requests : [{ entityType : "tea_busi.ContainedC", path : "/TEAMS('42')/TEAM_2_CONTAINED_C/5", predicate : "(~1)" }] }, { // decode entity set initially, encode it finally dataPath : "/T%E2%82%ACAMS/0", canonicalUrl : "/T%E2%82%ACAMS(~1)", requests : [{ entityType : "tea_busi.TEAM", predicate : "(~1)" }] }, { // decode navigation property, encode entity set when building sCandidate dataPath : "/EMPLOYEES('7')/EMPLOYEE_2_EQUIPM%E2%82%ACNTS(42)", canonicalUrl : "/EQUIPM%E2%82%ACNTS(42)", requests : [] }].forEach(function (oFixture) { QUnit.test("fetchCanonicalPath: " + oFixture.dataPath, function (assert) { var oContext = Context.create(this.oModel, undefined, oFixture.dataPath), oContextMock = this.mock(oContext), oPromise; this.oMetaModelMock.expects("getMetaPath").withExactArgs(oFixture.dataPath) .returns("metapath"); this.oMetaModelMock.expects("fetchObject").withExactArgs("metapath") .returns(SyncPromise.resolve()); this.oMetaModelMock.expects("fetchEntityContainer") .returns(SyncPromise.resolve(mScope)); oFixture.requests.forEach(function (oRequest) { var oEntityInstance = {"@$ui5._" : {"predicate" : oRequest.predicate}}; oContextMock.expects("fetchValue") .withExactArgs(oRequest.path || oFixture.dataPath) .returns(SyncPromise.resolve(oEntityInstance)); }); // code under test oPromise = this.oMetaModel.fetchCanonicalPath(oContext); assert.ok(!oPromise.isRejected()); return oPromise.then(function (sCanonicalUrl) { assert.strictEqual(sCanonicalUrl, oFixture.canonicalUrl); }); }); }); //********************************************************************************************* [{ // simple singleton path : "/Me|ID", editUrl : "Me" }, { // simple entity by key predicate path : "/TEAMS('42')|Name", editUrl : "TEAMS('42')" }, { // simple entity from a set path : "/TEAMS/0|Name", fetchPredicates : { "/TEAMS/0" : "tea_busi.TEAM" }, editUrl : "TEAMS(~0)" }, { // simple entity from a set, complex property path : "/EMPLOYEES/0|SAL%C3%83RY/CURRENCY", fetchPredicates : { "/EMPLOYEES/0" : "tea_busi.Worker" }, editUrl : "EMPLOYEES(~0)" }, { // navigation to root entity path : "/TEAMS/0/TEAM_2_EMPLOYEES/1|ID", fetchPredicates : { "/TEAMS/0/TEAM_2_EMPLOYEES/1" : "tea_busi.Worker" }, editUrl : "EMPLOYEES(~0)" }, { // navigation to root entity path : "/TEAMS('42')/TEAM_2_EMPLOYEES/1|ID", fetchPredicates : { "/TEAMS('42')/TEAM_2_EMPLOYEES/1" : "tea_busi.Worker" }, editUrl : "EMPLOYEES(~0)" }, { // navigation to root entity with key predicate path : "/TEAMS('42')/TEAM_2_EMPLOYEES('23')|ID", editUrl : "EMPLOYEES('23')" }, { // multiple navigation to root entity path : "/TEAMS/0/TEAM_2_EMPLOYEES/1/EMPLOYEE_2_TEAM|Name", fetchPredicates : { "/TEAMS/0/TEAM_2_EMPLOYEES/1/EMPLOYEE_2_TEAM" : "tea_busi.TEAM" }, editUrl : "T%E2%82%ACAMS(~0)" }, { // navigation from entity set to single contained entity path : "/TEAMS/0/TEAM_2_CONTAINED_S|Id", fetchPredicates : { "/TEAMS/0" : "tea_busi.TEAM" }, editUrl : "TEAMS(~0)/TEAM_2_CONTAINED_S" }, { // navigation from singleton to single contained entity path : "/Me/EMPLOYEE_2_CONTAINED_S|Id", editUrl : "Me/EMPLOYEE_2_CONTAINED_S" }, { // navigation to contained entity within a collection path : "/TEAMS/0/TEAM_2_CONTAINED_C/1|Id", fetchPredicates : { "/TEAMS/0" : "tea_busi.TEAM", "/TEAMS/0/TEAM_2_CONTAINED_C/1" : "tea_busi.ContainedC" }, editUrl : "TEAMS(~0)/TEAM_2_CONTAINED_C(~1)" }, { // navigation to contained entity with a key predicate path : "/TEAMS('42')/TEAM_2_CONTAINED_C('foo')|Id", editUrl : "TEAMS('42')/TEAM_2_CONTAINED_C('foo')" }, { // navigation from contained entity to contained entity path : "/TEAMS/0/TEAM_2_CONTAINED_S/S_2_C/1|Id", fetchPredicates : { "/TEAMS/0" : "tea_busi.TEAM", "/TEAMS/0/TEAM_2_CONTAINED_S/S_2_C/1" : "tea_busi.ContainedC" }, editUrl : "TEAMS(~0)/TEAM_2_CONTAINED_S/S_2_C(~1)" }, { // navigation from contained to root entity, resolved via navigation property binding path path : "/TEAMS/0/TEAM_2_CONTAINED_S/S_2_EMPLOYEE|ID", fetchPredicates : { "/TEAMS/0/TEAM_2_CONTAINED_S/S_2_EMPLOYEE" : "tea_busi.Worker" }, editUrl : "EMPLOYEES(~0)" }, { // navigation from entity w/ key predicate to contained to root entity path : "/TEAMS('42')/TEAM_2_CONTAINED_C/5/C_2_EMPLOYEE|ID", fetchPredicates : { "/TEAMS('42')/TEAM_2_CONTAINED_C/5" : "tea_busi.ContainedC" }, editUrl : "TEAMS('42')/TEAM_2_CONTAINED_C(~0)/C_2_EMPLOYEE" }, { // decode entity set initially, encode it finally path : "/T%E2%82%ACAMS/0|Name", fetchPredicates : { "/T%E2%82%ACAMS/0" : "tea_busi.TEAM" }, editUrl : "T%E2%82%ACAMS(~0)" }, { // decode navigation property, encode entity set path : "/EMPLOYEES('7')/EMPLOYEE_2_EQUIPM%E2%82%ACNTS(42)|ID", editUrl : "EQUIPM%E2%82%ACNTS(42)" }].forEach(function (oFixture) { QUnit.test("fetchUpdateData: " + oFixture.path, function (assert) { var i = oFixture.path.indexOf("|"), sContextPath = oFixture.path.slice(0, i), sPropertyPath = oFixture.path.slice(i + 1), oContext = Context.create(this.oModel, undefined, sContextPath), oContextMock = this.mock(oContext), oPromise, that = this; this.oMetaModelMock.expects("getMetaPath") .withExactArgs(oFixture.path.replace("|", "/")).returns("~"); this.oMetaModelMock.expects("fetchObject").withExactArgs("~") .returns(SyncPromise.resolve(Promise.resolve()).then(function () { that.oMetaModelMock.expects("fetchEntityContainer") .returns(SyncPromise.resolve(mScope)); Object.keys(oFixture.fetchPredicates || {}).forEach(function (sPath, i) { var oEntityInstance = {"@$ui5._" : {"predicate" : "(~" + i + ")"}}; // Note: the entity instance is delivered asynchronously oContextMock.expects("fetchValue") .withExactArgs(sPath) .returns(SyncPromise.resolve(Promise.resolve(oEntityInstance))); }); })); // code under test oPromise = this.oMetaModel.fetchUpdateData(sPropertyPath, oContext); assert.ok(!oPromise.isRejected()); return oPromise.then(function (oResult) { assert.strictEqual(oResult.editUrl, oFixture.editUrl); assert.strictEqual(oResult.entityPath, sContextPath); assert.strictEqual(oResult.propertyPath, sPropertyPath); }); }); }); //TODO support collection properties (-> path containing index not leading to predicate) //TODO prefer instance annotation at payload for "odata.editLink"?! //TODO target URLs like "com.sap.gateway.default.iwbep.tea_busi_product.v0001.Container/Products(...)"? //TODO type casts, operations? //********************************************************************************************* QUnit.test("fetchUpdateData: transient entity", function(assert) { var oContext = Context.create(this.oModel, undefined, "/TEAMS/-1"), sPropertyPath = "Name"; this.oMetaModelMock.expects("fetchEntityContainer").twice() .returns(SyncPromise.resolve(mScope)); this.mock(oContext).expects("fetchValue").withExactArgs("/TEAMS/-1") .returns(SyncPromise.resolve({"@$ui5._" : {"transient" : "update"}})); // code under test return this.oMetaModel.fetchUpdateData(sPropertyPath, oContext).then(function (oResult) { assert.deepEqual(oResult, { entityPath : "/TEAMS/-1", editUrl : undefined, propertyPath : "Name" }); }); }); //********************************************************************************************* QUnit.test("fetchUpdateData: fetchObject fails", function(assert) { var oModel = this.oModel, oContext = { getModel : function () { return oModel; } }, oExpectedError = new Error(), oMetaModelMock = this.mock(this.oMetaModel), sPath = "some/invalid/path/to/a/property"; this.mock(oModel).expects("resolve") .withExactArgs(sPath, sinon.match.same(oContext)) .returns("~1"); oMetaModelMock.expects("getMetaPath").withExactArgs("~1").returns("~2"); oMetaModelMock.expects("fetchObject").withExactArgs("~2") .returns(Promise.reject(oExpectedError)); // code under test return this.oMetaModel.fetchUpdateData(sPath, oContext).then(function () { assert.ok(false); }, function (oError) { assert.strictEqual(oError, oExpectedError); }); }); //********************************************************************************************* [{ dataPath : "/Foo/Bar", message : "Not an entity set: Foo", warning : "Unknown child Foo of tea_busi.DefaultContainer" }, { dataPath : "/TEAMS/0/Foo/Bar", message : "Not a (navigation) property: Foo" }, { dataPath : "/TEAMS/0/TEAM_2_CONTAINED_S", instance : undefined, message : "No instance to calculate key predicate at /TEAMS/0" }, { dataPath : "/TEAMS/0/TEAM_2_CONTAINED_S", instance : {}, message : "No key predicate known at /TEAMS/0" }, { dataPath : "/TEAMS/0/TEAM_2_CONTAINED_S", instance : new Error("failed to load team"), message : "failed to load team at /TEAMS/0" }].forEach(function (oFixture) { QUnit.test("fetchUpdateData: " + oFixture.message, function (assert) { var oContext = Context.create(this.oModel, undefined, oFixture.dataPath), oPromise; this.oMetaModelMock.expects("fetchEntityContainer").atLeast(1) .returns(SyncPromise.resolve(mScope)); if ("instance" in oFixture) { this.mock(oContext).expects("fetchValue") .returns(oFixture.instance instanceof Error ? SyncPromise.reject(oFixture.instance) : SyncPromise.resolve(oFixture.instance)); } if (oFixture.warning) { this.oLogMock.expects("isLoggable") .withExactArgs(jQuery.sap.log.Level.WARNING, sODataMetaModel) .returns(true); this.oLogMock.expects("warning") .withExactArgs(oFixture.warning, oFixture.dataPath, sODataMetaModel); } this.mock(this.oModel).expects("reportError") .withExactArgs(oFixture.message, sODataMetaModel, sinon.match({ message : oFixture.dataPath + ": " + oFixture.message, name : "Error" })); oPromise = this.oMetaModel.fetchUpdateData("", oContext); assert.ok(oPromise.isRejected()); assert.strictEqual(oPromise.getResult().message, oFixture.dataPath + ": " + oFixture.message); oPromise.caught(); // avoid "Uncaught (in promise)" }); }); //********************************************************************************************* QUnit.test("fetchCanonicalPath: success", function(assert) { var oContext = {}; this.mock(this.oMetaModel).expects("fetchUpdateData") .withExactArgs("", sinon.match.same(oContext)) .returns(SyncPromise.resolve(Promise.resolve({ editUrl : "edit('URL')", propertyPath : "" }))); // code under test return this.oMetaModel.fetchCanonicalPath(oContext).then(function (oCanonicalPath) { assert.strictEqual(oCanonicalPath, "/edit('URL')"); }); }); //********************************************************************************************* QUnit.test("fetchCanonicalPath: not an entity", function(assert) { var oContext = { getPath : function () { return "/TEAMS('4711')/Name"; } }; this.mock(this.oMetaModel).expects("fetchUpdateData") .withExactArgs("", sinon.match.same(oContext)) .returns(SyncPromise.resolve(Promise.resolve({ entityPath : "/TEAMS('4711')", editUrl : "TEAMS('4711')", propertyPath : "Name" }))); // code under test return this.oMetaModel.fetchCanonicalPath(oContext).then(function () { assert.ok(false); }, function (oError) { assert.strictEqual(oError.message, "Context " + oContext.getPath() + " does not point to an entity. It should be " + "/TEAMS('4711')"); }); }); //********************************************************************************************* QUnit.test("fetchCanonicalPath: fetchUpdateData fails", function(assert) { var oContext = {}, oExpectedError = new Error(); this.mock(this.oMetaModel).expects("fetchUpdateData") .withExactArgs("", sinon.match.same(oContext)) .returns(SyncPromise.resolve(Promise.reject(oExpectedError))); // code under test return this.oMetaModel.fetchCanonicalPath(oContext).then(function () { assert.ok(false); }, function (oError) { assert.strictEqual(oError, oExpectedError); }); }); //********************************************************************************************* QUnit.test("getProperty = getObject", function (assert) { assert.strictEqual(this.oMetaModel.getProperty, this.oMetaModel.getObject); }); //********************************************************************************************* QUnit.test("bindProperty", function (assert) { var oBinding, oContext = {}, mParameters = {}, sPath = "foo"; // code under test oBinding = this.oMetaModel.bindProperty(sPath, oContext, mParameters); assert.ok(oBinding instanceof PropertyBinding); assert.ok(oBinding.hasOwnProperty("vValue")); assert.strictEqual(oBinding.getContext(), oContext); assert.strictEqual(oBinding.getModel(), this.oMetaModel); assert.strictEqual(oBinding.getPath(), sPath); assert.strictEqual(oBinding.mParameters, mParameters, "mParameters available internally"); assert.strictEqual(oBinding.getValue(), undefined); // code under test: must not call getProperty() again! assert.strictEqual(oBinding.getExternalValue(), undefined); // code under test assert.throws(function () { oBinding.setExternalValue("foo"); }, /Unsupported operation: ODataMetaPropertyBinding#setValue/); }); //********************************************************************************************* [undefined, {}, {$$valueAsPromise : false}].forEach(function (mParameters, i) { QUnit.test("ODataMetaPropertyBinding#checkUpdate: " + i, function (assert) { var oBinding, oContext = {}, sPath = "foo", oValue = {}, oPromise = SyncPromise.resolve(Promise.resolve(oValue)); oBinding = this.oMetaModel.bindProperty(sPath, oContext, mParameters); this.oMetaModelMock.expects("fetchObject") .withExactArgs(sPath, sinon.match.same(oContext), sinon.match.same(mParameters)) .returns(oPromise); this.mock(oBinding).expects("_fireChange") .withExactArgs({reason : ChangeReason.Change}); // code under test oBinding.checkUpdate(); assert.strictEqual(oBinding.getValue(), undefined); oPromise.then(function () { assert.strictEqual(oBinding.getValue(), oValue); }); return oPromise; }); }); //********************************************************************************************* QUnit.test("ODataMetaPropertyBinding#checkUpdate: $$valueAsPromise=true, sync", function (assert) { var oBinding, oContext = {}, mParameters = {$$valueAsPromise : true}, sPath = "foo", oValue = {}, oPromise = SyncPromise.resolve(oValue); oBinding = this.oMetaModel.bindProperty(sPath, oContext, mParameters); this.oMetaModelMock.expects("fetchObject") .withExactArgs(sPath, sinon.match.same(oContext), sinon.match.same(mParameters)) .returns(oPromise); this.mock(oBinding).expects("_fireChange").withExactArgs({reason : ChangeReason.Change}); // code under test oBinding.checkUpdate(); assert.strictEqual(oBinding.getValue(), oValue, "Value sync"); return oPromise; }); //********************************************************************************************* QUnit.test("ODataMetaPropertyBinding#checkUpdate: no event", function (assert) { var oBinding, oContext = {}, mParameters = {}, sPath = "foo", oValue = {}, oPromise = SyncPromise.resolve(Promise.resolve(oValue)); oBinding = this.oMetaModel.bindProperty(sPath, oContext, mParameters); oBinding.vValue = oValue; this.oMetaModelMock.expects("fetchObject") .withExactArgs(sPath, sinon.match.same(oContext), sinon.match.same(mParameters)) .returns(oPromise); this.mock(oBinding).expects("_fireChange").never(); // code under test oBinding.checkUpdate(); return oPromise; }); //********************************************************************************************* QUnit.test("ODataMetaPropertyBinding#checkUpdate: bForceUpdate, sChangeReason", function (assert) { var oBinding, oContext = {}, mParameters = {}, sPath = "foo", oValue = {}, oPromise = SyncPromise.resolve(Promise.resolve(oValue)); oBinding = this.oMetaModel.bindProperty(sPath, oContext, mParameters); oBinding.vValue = oValue; this.oMetaModelMock.expects("fetchObject") .withExactArgs(sPath, sinon.match.same(oContext), sinon.match.same(mParameters)) .returns(oPromise); this.mock(oBinding).expects("_fireChange").withExactArgs({reason : "Foo"}); // code under test oBinding.checkUpdate(true, "Foo"); return oPromise; }); //********************************************************************************************* QUnit.test("ODataMetaPropertyBinding#checkUpdate: $$valueAsPromise = true", function (assert) { var oBinding, oContext = {}, mParameters = { $$valueAsPromise : true }, sPath = "foo", oValue = {}, oPromise = SyncPromise.resolve(Promise.resolve(oValue)); oBinding = this.oMetaModel.bindProperty(sPath, oContext, mParameters); oBinding.vValue = oValue; this.oMetaModelMock.expects("fetchObject") .withExactArgs(sPath, sinon.match.same(oContext), sinon.match.same(mParameters)) .returns(oPromise); this.mock(oBinding).expects("_fireChange") .withExactArgs({reason : "Foo"}) .twice() .onFirstCall().callsFake(function () { assert.ok(oBinding.getValue().isPending(), "Value is still a pending SyncPromise"); }) .onSecondCall().callsFake(function () { assert.strictEqual(oBinding.getValue(), oValue, "Value resolved"); }); // code under test oBinding.checkUpdate(false, "Foo"); assert.ok(oBinding.getValue().isPending(), "Value is a pending SyncPromise"); return oBinding.getValue().then(function (oResult) { assert.strictEqual(oResult, oValue); assert.strictEqual(oBinding.getValue(), oValue); }); }); //********************************************************************************************* QUnit.test("ODataMetaPropertyBinding#setContext", function (assert) { var oBinding, oBindingMock, oContext = {}; oBinding = this.oMetaModel.bindProperty("Foo", oContext); oBindingMock = this.mock(oBinding); oBindingMock.expects("checkUpdate").never(); // code under test oBinding.setContext(oContext); oBindingMock.expects("checkUpdate").withExactArgs(false, ChangeReason.Context); // code under test oBinding.setContext(undefined); assert.strictEqual(oBinding.getContext(), undefined); oBinding = this.oMetaModel.bindProperty("/Foo"); this.mock(oBinding).expects("checkUpdate").never(); // code under test oBinding.setContext(oContext); }); //********************************************************************************************* ["ENTRYDATE", "/EMPLOYEES/ENTRYDATE"].forEach(function (sPath) { QUnit.test("bindContext: " + sPath, function (assert) { var bAbsolutePath = sPath[0] === "/", oBinding, oBoundContext, iChangeCount = 0, oContext = this.oMetaModel.getMetaContext("/EMPLOYEES"), oContextCopy = this.oMetaModel.getMetaContext("/EMPLOYEES"), oNewContext = this.oMetaModel.getMetaContext("/T€AMS"); // without context oBinding = this.oMetaModel.bindContext(sPath, null); assert.ok(oBinding instanceof ContextBinding); assert.strictEqual(oBinding.getModel(), this.oMetaModel); assert.strictEqual(oBinding.getPath(), sPath); assert.strictEqual(oBinding.getContext(), null); assert.strictEqual(oBinding.isInitial(), true); assert.strictEqual(oBinding.getBoundContext(), null); // with context oBinding = this.oMetaModel.bindContext(sPath, oContextCopy); assert.ok(oBinding instanceof ContextBinding); assert.strictEqual(oBinding.getModel(), this.oMetaModel); assert.strictEqual(oBinding.getPath(), sPath); assert.strictEqual(oBinding.getContext(), oContextCopy); assert.strictEqual(oBinding.isInitial(), true); assert.strictEqual(oBinding.getBoundContext(), null); // setContext ********** oBinding.attachChange(function (oEvent) { assert.strictEqual(oEvent.getId(), "change"); iChangeCount += 1; }); // code under test oBinding.setContext(oContext); assert.strictEqual(iChangeCount, 0, "still initial"); assert.strictEqual(oBinding.isInitial(), true); assert.strictEqual(oBinding.getBoundContext(), null); assert.strictEqual(oBinding.getContext(), oContext); // code under test oBinding.initialize(); assert.strictEqual(iChangeCount, 1, "ManagedObject relies on 'change' event!"); assert.strictEqual(oBinding.isInitial(), false); oBoundContext = oBinding.getBoundContext(); assert.strictEqual(oBoundContext.getModel(), this.oMetaModel); assert.strictEqual(oBoundContext.getPath(), bAbsolutePath ? sPath : oContext.getPath() + "/" + sPath); // code under test - same context oBinding.setContext(oContext); assert.strictEqual(iChangeCount, 1, "context unchanged"); assert.strictEqual(oBinding.getBoundContext(), oBoundContext); // code under test oBinding.setContext(oContextCopy); assert.strictEqual(iChangeCount, 1, "context unchanged"); assert.strictEqual(oBinding.getBoundContext(), oBoundContext); // code under test // Note: checks equality on resolved path, not simply object identity of context! oBinding.setContext(oNewContext); if (bAbsolutePath) { assert.strictEqual(iChangeCount, 1, "context unchanged"); assert.strictEqual(oBinding.getBoundContext(), oBoundContext); } else { assert.strictEqual(iChangeCount, 2, "context changed"); oBoundContext = oBinding.getBoundContext(); assert.strictEqual(oBoundContext.getModel(), this.oMetaModel); assert.strictEqual(oBoundContext.getPath(), oNewContext.getPath() + "/" + sPath); } // code under test oBinding.setContext(null); if (bAbsolutePath) { assert.strictEqual(iChangeCount, 1, "context unchanged"); assert.strictEqual(oBinding.getBoundContext(), oBoundContext); } else { assert.strictEqual(iChangeCount, 3, "context changed"); assert.strictEqual(oBinding.isInitial(), false); assert.strictEqual(oBinding.getBoundContext(), null); } }); }); //********************************************************************************************* QUnit.test("bindList", function (assert) { var oBinding, oContext = this.oMetaModel.getContext("/EMPLOYEES"), aFilters = [], sPath = "@", aSorters = []; // avoid request to backend during initialization this.oMetaModelMock.expects("fetchObject").returns(SyncPromise.resolve()); // code under test oBinding = this.oMetaModel.bindList(sPath, oContext, aSorters, aFilters); assert.ok(oBinding instanceof ClientListBinding); assert.strictEqual(oBinding.getModel(), this.oMetaModel); assert.strictEqual(oBinding.getPath(), sPath); assert.strictEqual(oBinding.getContext(), oContext); assert.strictEqual(oBinding.aSorters, aSorters); assert.strictEqual(oBinding.aApplicationFilters, aFilters); }); //********************************************************************************************* QUnit.test("ODataMetaListBinding#setContexts", function (assert) { var oBinding, oBindingMock, oContext = this.oMetaModel.getContext("/EMPLOYEES"), aContexts = [], sPath = "path"; // avoid request to backend during initialization this.oMetaModelMock.expects("fetchObject").returns(SyncPromise.resolve()); oBinding = this.oMetaModel.bindList(sPath, oContext); oBindingMock = this.mock(oBinding); oBindingMock.expects("updateIndices").withExactArgs(); oBindingMock.expects("applyFilter").withExactArgs(); oBindingMock.expects("applySort").withExactArgs(); oBindingMock.expects("_getLength").withExactArgs().returns(42); // code under test oBinding.setContexts(aContexts); assert.strictEqual(oBinding.oList, aContexts); assert.strictEqual(oBinding.iLength, 42); }); //********************************************************************************************* QUnit.test("ODataMetaListBinding#update (sync)", function (assert) { var oBinding, oBindingMock, oContext = this.oMetaModel.getContext("/EMPLOYEES"), aContexts = [{}], sPath = "path"; // avoid request to backend during initialization this.oMetaModelMock.expects("fetchObject").returns(SyncPromise.resolve()); oBinding = this.oMetaModel.bindList(sPath, oContext); oBindingMock = this.mock(oBinding); oBindingMock.expects("fetchContexts").withExactArgs() .returns(SyncPromise.resolve(aContexts)); oBindingMock.expects("setContexts").withExactArgs(sinon.match.same(aContexts)); oBindingMock.expects("_fireChange").never(); // code under test oBinding.update(); }); //********************************************************************************************* QUnit.test("ODataMetaListBinding#update (async)", function (assert) { var oBinding, oBindingMock, oContext = this.oMetaModel.getContext("/EMPLOYEES"), aContexts = [{}], sPath = "path", oFetchPromise = SyncPromise.resolve(Promise.resolve()).then(function () { // This is expected to happen after the promise is resolved oBindingMock.expects("setContexts").withExactArgs(sinon.match.same(aContexts)); oBindingMock.expects("_fireChange").withExactArgs({reason : ChangeReason.Change}); return aContexts; }); // avoid request to backend during initialization this.oMetaModelMock.expects("fetchObject").returns(SyncPromise.resolve()); oBinding = this.oMetaModel.bindList(sPath, oContext); oBindingMock = this.mock(oBinding); oBindingMock.expects("fetchContexts").withExactArgs().returns(oFetchPromise); oBindingMock.expects("setContexts").withExactArgs([]); oBindingMock.expects("_fireChange").never(); // initially // code under test oBinding.update(); return oFetchPromise; }); //********************************************************************************************* QUnit.test("ODataMetaListBinding#checkUpdate", function (assert) { var oBinding, oBindingMock, oContext = this.oMetaModel.getContext("/"), sPath = ""; // avoid request to backend during initialization this.oMetaModelMock.expects("fetchObject").returns(SyncPromise.resolve()); oBinding = this.oMetaModel.bindList(sPath, oContext); oBindingMock = this.mock(oBinding); this.mock(oBinding).expects("update").thrice().callsFake(function () { this.oList = [{/*a context*/}]; }); oBindingMock.expects("_fireChange").withExactArgs({reason : ChangeReason.Change}); // code under test oBinding.checkUpdate(); // code under test: The second call must call update, but not fire an event oBinding.checkUpdate(); oBindingMock.expects("_fireChange").withExactArgs({reason : ChangeReason.Change}); // code under test: Must fire a change event oBinding.checkUpdate(true); }); //********************************************************************************************* QUnit.test("ODataMetaListBinding#getContexts, getCurrentContexts", function (assert) { var oBinding, oMetaModel = this.oMetaModel, // instead of "that = this" oContext = oMetaModel.getMetaContext("/EMPLOYEES"), sPath = ""; function assertContextPaths(aContexts, aPaths) { assert.notOk("diff" in aContexts, "extended change detection is ignored"); assert.deepEqual(aContexts.map(function (oContext) { assert.strictEqual(oContext.getModel(), oMetaModel); return oContext.getPath().replace("/EMPLOYEES/", ""); }), aPaths); assert.deepEqual(oBinding.getCurrentContexts(), aContexts); } this.oMetaModelMock.expects("fetchEntityContainer").atLeast(1) .returns(SyncPromise.resolve(mScope)); oBinding = oMetaModel.bindList(sPath, oContext); // code under test: should be ignored oBinding.enableExtendedChangeDetection(); assertContextPaths(oBinding.getContexts(0, 2), ["ID", "AGE"]); assertContextPaths(oBinding.getContexts(1, 2), ["AGE", "EMPLOYEE_2_CONTAINED_S"]); assertContextPaths(oBinding.getContexts(), ["ID", "AGE", "EMPLOYEE_2_CONTAINED_S", "EMPLOYEE_2_EQUIPM€NTS", "EMPLOYEE_2_TEAM", "SALÃRY"]); assertContextPaths(oBinding.getContexts(0, 10), ["ID", "AGE", "EMPLOYEE_2_CONTAINED_S", "EMPLOYEE_2_EQUIPM€NTS", "EMPLOYEE_2_TEAM", "SALÃRY"]); oMetaModel.setSizeLimit(2); assertContextPaths(oBinding.getContexts(), ["ID", "AGE"]); oBinding.attachEvent("sort", function () { assert.ok(false, "unexpected sort event"); }); oMetaModel.setSizeLimit(100); oBinding.sort(new Sorter("@sapui.name")); assertContextPaths(oBinding.getContexts(), ["AGE", "EMPLOYEE_2_CONTAINED_S", "EMPLOYEE_2_EQUIPM€NTS", "EMPLOYEE_2_TEAM", "ID", "SALÃRY"]); oBinding.attachEvent("filter", function () { assert.ok(false, "unexpected filter event"); }); oBinding.filter(new Filter("$kind", "EQ", "Property")); assertContextPaths(oBinding.getContexts(), ["AGE", "ID", "SALÃRY"]); }); //********************************************************************************************* [{ contextPath : undefined, metaPath : "@", result : [] }, { // <template:repeat list="{entitySet>}" ...> // Iterate all OData path segments, i.e. (navigation) properties. // Implicit $Type insertion happens here! //TODO support for $BaseType contextPath : "/EMPLOYEES", metaPath : "", result : [ "/EMPLOYEES/ID", "/EMPLOYEES/AGE", "/EMPLOYEES/EMPLOYEE_2_CONTAINED_S", "/EMPLOYEES/EMPLOYEE_2_EQUIPM€NTS", "/EMPLOYEES/EMPLOYEE_2_TEAM", "/EMPLOYEES/SALÃRY" ] }, { // <template:repeat list="{meta>EMPLOYEES}" ...> // same as before, but with non-empty path contextPath : "/", metaPath : "EMPLOYEES", result : [ "/EMPLOYEES/ID", "/EMPLOYEES/AGE", "/EMPLOYEES/EMPLOYEE_2_CONTAINED_S", "/EMPLOYEES/EMPLOYEE_2_EQUIPM€NTS", "/EMPLOYEES/EMPLOYEE_2_TEAM", "/EMPLOYEES/SALÃRY" ] }, { // <template:repeat list="{meta>/}" ...> // Iterate all OData path segments, i.e. entity sets and imports. // Implicit scope lookup happens here! metaPath : "/", result :[ "/ChangeManagerOfTeam", "/EMPLOYEES", "/EQUIPM€NTS", "/GetEmployeeMaxAge", "/Me", "/OverloadedAction", "/TEAMS", "/T€AMS", "/VoidAction" ] }, { // <template:repeat list="{property>@}" ...> // Iterate all external targeting annotations. contextPath : "/T€AMS/Team_Id", metaPath : "@", result : [ "/T€AMS/[email protected]", "/T€AMS/[email protected]", "/T€AMS/[email protected]@UI.TextArrangement" ] }, { // <template:repeat list="{property>@}" ...> // Iterate all external targeting annotations. contextPath : "/T€AMS/Name", metaPath : "@", result : [] }, { // <template:repeat list="{field>./@}" ...> // Iterate all inline annotations. contextPath : "/T€AMS/$Type/@UI.LineItem/0", metaPath : "./@", result : [ "/T€AMS/$Type/@UI.LineItem/0/@UI.Importance" ] }, { // <template:repeat list="{at>}" ...> // Iterate all inline annotations (edge case with empty relative path). contextPath : "/T€AMS/$Type/@UI.LineItem/0/@", metaPath : "", result : [ "/T€AMS/$Type/@UI.LineItem/0/@UI.Importance" ] }, { contextPath : undefined, metaPath : "/Unknown", result : [], warning : ["Unknown child Unknown of tea_busi.DefaultContainer", "/Unknown/"] }].forEach(function (oFixture) { var sPath = oFixture.contextPath ? oFixture.contextPath + "|"/*make cut more visible*/ + oFixture.metaPath : oFixture.metaPath; QUnit.test("ODataMetaListBinding#fetchContexts (sync): " + sPath, function (assert) { var oBinding, oMetaModel = this.oMetaModel, // instead of "that = this" oContext = oFixture.contextPath && oMetaModel.getContext(oFixture.contextPath); if (oFixture.warning) { // Note that _getContexts is called twice in this test: once from bindList via the // constructor, once directly from the test this.oLogMock.expects("isLoggable").twice() .withExactArgs(jQuery.sap.log.Level.WARNING, sODataMetaModel) .returns(true); this.oLogMock.expects("warning").twice() .withExactArgs(oFixture.warning[0], oFixture.warning[1], sODataMetaModel); } this.oMetaModelMock.expects("fetchEntityContainer").atLeast(0) .returns(SyncPromise.resolve(mScope)); oBinding = this.oMetaModel.bindList(oFixture.metaPath, oContext); // code under test assert.deepEqual(oBinding.fetchContexts().getResult().map(function (oContext) { assert.strictEqual(oContext.getModel(), oMetaModel); return oContext.getPath(); }), oFixture.result); }); }); //********************************************************************************************* QUnit.test("ODataMetaListBinding#fetchContexts (async)", function (assert) { var oBinding, oMetaModel = this.oMetaModel, sPath = "/foo"; // Note that fetchObject is called twice in this test: once from bindList via the // constructor, once from fetchContexts this.oMetaModelMock.expects("fetchObject").twice() .withExactArgs(sPath + "/") .returns(SyncPromise.resolve(Promise.resolve({bar: "", baz: ""}))); oBinding = this.oMetaModel.bindList(sPath); return oBinding.fetchContexts().then(function (oResult) { assert.deepEqual(oResult.map(function (oContext) { assert.strictEqual(oContext.getModel(), oMetaModel); return oContext.getPath(); }), ["/foo/bar", "/foo/baz"]); }); }); //TODO iterate mix of inline and external targeting annotations //TODO iterate annotations like "foo@..." for our special cases, e.g. annotations of annotation //********************************************************************************************* QUnit.test("events", function (assert) { assert.throws(function () { this.oMetaModel.attachParseError(); }, new Error("Unsupported event 'parseError': v4.ODataMetaModel#attachEvent")); assert.throws(function () { this.oMetaModel.attachRequestCompleted(); }, new Error("Unsupported event 'requestCompleted': v4.ODataMetaModel#attachEvent")); assert.throws(function () { this.oMetaModel.attachRequestFailed(); }, new Error("Unsupported event 'requestFailed': v4.ODataMetaModel#attachEvent")); assert.throws(function () { this.oMetaModel.attachRequestSent(); }, new Error("Unsupported event 'requestSent': v4.ODataMetaModel#attachEvent")); }); //********************************************************************************************* QUnit.test("validate: mSchema2MetadataUrl", function (assert) { var mScope = { "$Version" : "4.0", "$Reference" : { "/A/$metadata" : { "$Include" : [ "A.", "A.A." ] }, "/B/$metadata" : { "$Include" : [ "B.", "B.B." ] }, "/C/$metadata" : { "$Include" : ["C."] }, "../../../../default/iwbep/tea_busi_product/0001/$metadata" : { "$Include" : [ "tea_busi_product." ] } } }, sUrl = "/~/$metadata"; assert.deepEqual(this.oMetaModel.mSchema2MetadataUrl, {}); // simulate a previous reference to a schema with the _same_ reference URI --> allowed! this.oMetaModel.mSchema2MetadataUrl["A."] = {"/A/$metadata" : false}; // simulate a previous reference to a schema with the _different_ reference URI // --> allowed as long as the document is not yet read (and will never be read) this.oMetaModel.mSchema2MetadataUrl["B.B."] = {"/B/V2/$metadata" : false}; // simulate a previous reference to a schema with the _same_ reference URI, already loaded this.oMetaModel.mSchema2MetadataUrl["C."] = {"/C/$metadata" : true}; // code under test assert.strictEqual(this.oMetaModel.validate(sUrl, mScope), mScope); assert.deepEqual(this.oMetaModel.mSchema2MetadataUrl, { "A." : {"/A/$metadata" : false}, "A.A." : {"/A/$metadata" : false}, "B." : {"/B/$metadata" : false}, "B.B." : { "/B/$metadata" : false, "/B/V2/$metadata" : false }, "C." : {"/C/$metadata" : true}, "tea_busi_product." : {"/a/default/iwbep/tea_busi_product/0001/$metadata" : false} }); }); //********************************************************************************************* QUnit.test("getLastModified", function (assert) { var mEmptyScope = { "$Version" : "4.0" }, mNewScope = { "$Version" : "4.0", "$Date" : "Tue, 18 Apr 2017 14:40:29 GMT" }, iNow = Date.now(), mOldScope = { "$Version" : "4.0", "$Date" : "Tue, 18 Apr 2017 14:40:29 GMT", // $LastModified wins! "$LastModified" : "Fri, 07 Apr 2017 11:21:50 GMT" }, mOldScopeClone = clone(mOldScope), sUrl = "/~/$metadata"; // Note: in real life, each URL is read at most once! // code under test (together with c'tor) assert.strictEqual(this.oMetaModel.getLastModified().getTime(), 0, "initial value"); // code under test assert.strictEqual(this.oMetaModel.validate(sUrl, mOldScope), mOldScope); assert.strictEqual(this.oMetaModel.getLastModified().toISOString(), "2017-04-07T11:21:50.000Z", "old $LastModified is used"); assert.notOk("$LastModified" in mOldScope); // code under test assert.strictEqual(this.oMetaModel.validate(sUrl, mNewScope), mNewScope); assert.strictEqual(this.oMetaModel.getLastModified().toISOString(), "2017-04-18T14:40:29.000Z", "new $Date is used"); assert.notOk("$Date" in mNewScope); // code under test assert.strictEqual(this.oMetaModel.validate(sUrl, mOldScopeClone), mOldScopeClone); assert.strictEqual(this.oMetaModel.getLastModified().toISOString(), "2017-04-18T14:40:29.000Z", "new $Date wins, old $LastModified is ignored"); assert.notOk("$LastModified" in mOldScopeClone); // code under test assert.strictEqual(this.oMetaModel.validate(sUrl, mEmptyScope), mEmptyScope); assert.ok(this.oMetaModel.getLastModified().getTime() >= iNow, "missing $Date/$LastModified is like 'now': " + this.oMetaModel.getLastModified()); }); //********************************************************************************************* QUnit.test("getETags", function (assert) { var sETag = 'W/"..."', mETags, that = this; function codeUnderTest(sUrl, mScope) { // code under test assert.strictEqual(that.oMetaModel.validate(sUrl, mScope), mScope); assert.notOk("$ETag" in mScope); assert.notOk("$LastModified" in mScope); } // code under test (together with c'tor) assert.deepEqual(this.oMetaModel.getETags(), {}, "initial value"); codeUnderTest("/~/A", { "$Version" : "4.0", "$LastModified" : "Fri, 07 Apr 2017 11:21:50 GMT" }); codeUnderTest("/~/B", { "$Version" : "4.0", "$LastModified" : "Tue, 18 Apr 2017 14:40:29 GMT" }); codeUnderTest("/~/C", { "$Version" : "4.0" }); codeUnderTest("/~/D", { "$Version" : "4.0", "$ETag" : sETag }); // code under test mETags = this.oMetaModel.getETags(); assert.deepEqual(mETags, { "/~/A" : new Date(Date.UTC(2017, 3, 7, 11, 21, 50)), "/~/B" : new Date(Date.UTC(2017, 3, 18, 14, 40, 29)), "/~/C" : null, "/~/D" : sETag // wins over null! }); }); //********************************************************************************************* [{ message : "Unsupported IncludeAnnotations", scope : { "$Version" : "4.0", "$Reference" : { "/A/$metadata" : { "$Include" : [ "A." ] }, "/B/$metadata" : { "$IncludeAnnotations" : [{ "$TermNamespace" : "com.sap.vocabularies.Common.v1" }] } } } }, { message : "A schema cannot span more than one document: tea_busi." + " - is both included and defined", scope : { "$Version" : "4.0", "$Reference" : { "/B/$metadata" : { "$Include" : [ "foo.", "tea_busi." ] } }, "tea_busi." : { "$kind" : "Schema" } } }, { message : "A schema cannot span more than one document: existing." + " - expected reference URI /B/v1/$metadata but instead saw /B/v2/$metadata", scope : { "$Version" : "4.0", "$Reference" : { "/A/$metadata" : { "$Include" : [ "foo.", "bar." ] }, "/B/v2/$metadata" : { "$Include" : [ "baz.", "existing." ] } } } }].forEach(function (oFixture) { [false, true].forEach(function (bSupportReferences) { var sMessage = oFixture.message, sTitle = "validate: " + sMessage + ", supportReferences: " + bSupportReferences; QUnit.test(sTitle, function (assert) { var sUrl = "/~/$metadata", that = this; function codeUnderTest() { var oResult = that.oMetaModel.validate(sUrl, oFixture.scope); assert.strictEqual(oResult, oFixture.scope); } this.oMetaModel.bSupportReferences = bSupportReferences; // simulate a schema that has been loaded or referenced before this.oMetaModel.mSchema2MetadataUrl = { // simulate schema that is already read "existing." : {"/B/v1/$metadata" : true} }; if (bSupportReferences) { this.oLogMock.expects("error") .withExactArgs(sMessage, sUrl, sODataMetaModel); } if (bSupportReferences) { assert.throws(codeUnderTest, new Error(sUrl + ": " + sMessage)); } else { codeUnderTest(); } }); }); }); //********************************************************************************************* QUnit.test("_mergeAnnotations: without annotation files", function (assert) { // Note: target elements have been omitted for brevity var mExpectedAnnotations = { "same.target" : { "@Common.Description" : "", "@Common.Label" : { "old" : true // Note: no aggregation of properties here! }, "@Common.Text" : "" }, "another.target" : { "@Common.Label" : "" } }, mScope = { "A." : { "$kind" : "Schema", "$Annotations" : { "same.target" : { "@Common.Label" : { "old" : true }, "@Common.Text" : "" } } }, "B." : { "$kind" : "Schema", "$Annotations" : { "same.target" : { "@Common.Description" : "", "@Common.Label" : { // illegal overwrite within $metadata, ignored! "new" : true } }, "another.target" : { "@Common.Label" : "" } } }, "B.B" : {} }; this.oMetaModelMock.expects("validate") .withExactArgs(this.oMetaModel.sUrl, mScope); assert.deepEqual(this.oMetaModel.mSchema2MetadataUrl, {}); // code under test this.oMetaModel._mergeAnnotations(mScope, []); assert.deepEqual(mScope.$Annotations, mExpectedAnnotations, "$Annotations have been shifted and merged from schemas to root"); assert.notOk("$Annotations" in mScope["A."], "$Annotations removed from schema"); assert.notOk("$Annotations" in mScope["B."], "$Annotations removed from schema"); assert.deepEqual(this.oMetaModel.mSchema2MetadataUrl, { "A." : {"/a/b/c/d/e/$metadata" : false}, "B." : {"/a/b/c/d/e/$metadata" : false} }); }); //********************************************************************************************* QUnit.test("_mergeAnnotations: validation failure for $metadata", function (assert) { var oError = new Error(), mScope = {}; this.oMetaModelMock.expects("validate") .withExactArgs(this.oMetaModel.sUrl, mScope) .throws(oError); assert.throws(function () { // code under test this.oMetaModel._mergeAnnotations(mScope, []); }, oError); }); //********************************************************************************************* QUnit.test("_mergeAnnotations: validation failure in annotation file", function (assert) { var oError = new Error(), mScope = {}, mAnnotationScope1 = {}, mAnnotationScope2 = {}; this.oMetaModel.aAnnotationUris = ["n/a", "/my/annotation.xml"]; this.oMetaModelMock.expects("validate") .withExactArgs(this.oMetaModel.sUrl, mScope); this.oMetaModelMock.expects("validate") .withExactArgs("n/a", mAnnotationScope1); this.oMetaModelMock.expects("validate") .withExactArgs("/my/annotation.xml", mAnnotationScope2) .throws(oError); assert.throws(function () { // code under test this.oMetaModel._mergeAnnotations(mScope, [mAnnotationScope1, mAnnotationScope2]); }, oError); }); //********************************************************************************************* QUnit.test("_mergeAnnotations: with annotation files (legacy)", function (assert) { var sNamespace = "com.sap.gateway.default.iwbep.tea_busi.v0001.", sWorker = sNamespace + "Worker/", sBasicSalaryCurr = sWorker + "SALARY/BASIC_SALARY_CURR", sBasicSalaryCurr2 = "another.schema.2.SALARY/BASIC_SALARY_CURR", sBonusCurr = sWorker + "SALARY/BONUS_CURR", sCommonLabel = "@com.sap.vocabularies.Common.v1.Label", sCommonQuickInfo = "@com.sap.vocabularies.Common.v1.QuickInfo", sCommonText = "@com.sap.vocabularies.Common.v1.Text", sBaseUrl = "/" + window.location.pathname.split("/")[1] + "/test-resources/sap/ui/core/qunit/odata/v4/data/", oMetadata = jQuery.sap.sjax({url : sBaseUrl + "metadata.json", dataType : 'json'}).data, oExpectedResult = clone(oMetadata), oAnnotation = jQuery.sap.sjax({ url : sBaseUrl + "legacy_annotations.json", dataType : 'json' }).data, oAnnotationCopy = clone(oAnnotation); // the examples are unrealistic and only need to work in 'legacy mode' this.oMetaModel.bSupportReferences = false; this.oMetaModel.aAnnotationUris = ["n/a"]; this.oMetaModelMock.expects("validate") .withExactArgs(this.oMetaModel.sUrl, oMetadata); this.oMetaModelMock.expects("validate") .withExactArgs("n/a", oAnnotation); oExpectedResult.$Annotations = oMetadata[sNamespace].$Annotations; delete oExpectedResult[sNamespace].$Annotations; // all entries with $kind are merged oExpectedResult["my.schema.2.FuGetEmployeeMaxAge"] = oAnnotationCopy["my.schema.2.FuGetEmployeeMaxAge"]; oExpectedResult["my.schema.2.Entity"] = oAnnotationCopy["my.schema.2.Entity"]; oExpectedResult["my.schema.2.DefaultContainer"] = oAnnotationCopy["my.schema.2.DefaultContainer"]; oExpectedResult["my.schema.2."] = oAnnotationCopy["my.schema.2."]; oExpectedResult["another.schema.2."] = oAnnotationCopy["another.schema.2."]; // update annotations oExpectedResult.$Annotations[sBasicSalaryCurr][sCommonLabel] = oAnnotationCopy["my.schema.2."].$Annotations[sBasicSalaryCurr][sCommonLabel]; oExpectedResult.$Annotations[sBasicSalaryCurr][sCommonQuickInfo] = oAnnotationCopy["my.schema.2."].$Annotations[sBasicSalaryCurr][sCommonQuickInfo]; oExpectedResult.$Annotations[sBonusCurr][sCommonText] = oAnnotationCopy["my.schema.2."].$Annotations[sBonusCurr][sCommonText]; oExpectedResult.$Annotations[sBasicSalaryCurr2] = oAnnotationCopy["another.schema.2."].$Annotations[sBasicSalaryCurr2]; delete oExpectedResult["my.schema.2."].$Annotations; delete oExpectedResult["another.schema.2."].$Annotations; // code under test this.oMetaModel._mergeAnnotations(oMetadata, [oAnnotation]); assert.deepEqual(oMetadata, oExpectedResult, "merged metadata as expected"); }); //********************************************************************************************* QUnit.test("_mergeAnnotations: with annotation files", function (assert) { var mScope0 = { "$EntityContainer" : "tea_busi.DefaultContainer", "$Reference" : { "../../../../default/iwbep/tea_busi_foo/0001/$metadata" : { "$Include" : [ "tea_busi_foo.v0001." ] } }, "$Version" : "4.0", "tea_busi." : { "$kind" : "Schema", "$Annotations" : { "tea_busi.DefaultContainer" : { "@A" : "from $metadata", "@B" : "from $metadata", "@C" : "from $metadata" }, "tea_busi.TEAM" : { "@D" : ["from $metadata"], "@E" : ["from $metadata"], "@F" : ["from $metadata"] } } }, "tea_busi.DefaultContainer" : { "$kind" : "EntityContainer" }, "tea_busi.EQUIPMENT" : { "$kind" : "EntityType" }, "tea_busi.TEAM" : { "$kind" : "EntityType" }, "tea_busi.Worker" : { "$kind" : "EntityType" } }, mScope1 = { "$Version" : "4.0", "tea_busi_foo.v0001." : { "$kind" : "Schema", "$Annotations" : { "tea_busi_foo.v0001.Product/Name" : { "@Common.Label" : "from $metadata" } } }, "tea_busi_foo.v0001.Product" : { "$kind" : "EntityType", "Name" : { "$kind" : "Property", "$Type" : "Edm.String" } } }, mAnnotationScope1 = { "$Version" : "4.0", "foo." : { "$kind" : "Schema", "$Annotations" : { "tea_busi.DefaultContainer" : { "@B" : "from annotation #1", "@C" : "from annotation #1" }, "tea_busi.TEAM" : { "@E" : ["from annotation #1"], "@F" : ["from annotation #1"] }, "tea_busi.Worker" : { "@From.Annotation" : { "$Type" : "some.Record", "Label" : "from annotation #1" }, "@From.Annotation1" : "from annotation #1" } } } }, mAnnotationScope2 = { "$Version" : "4.0", "bar." : { "$kind" : "Schema", "$Annotations" : { "tea_busi.DefaultContainer" : { "@C" : "from annotation #2" }, "tea_busi.EQUIPMENT" : { "@From.Annotation2" : "from annotation #2" }, "tea_busi.TEAM" : { "@F" : ["from annotation #2"] }, "tea_busi.Worker" : { "@From.Annotation" : { "$Type" : "some.Record", "Value" : "from annotation #2" } }, "tea_busi_foo.v0001.Product/Name" : { "@Common.Label" : "from annotation #2" } } } }, mExpectedScope = { "$Annotations" : { "tea_busi.DefaultContainer" : { "@A" : "from $metadata", "@B" : "from annotation #1", "@C" : "from annotation #2" }, "tea_busi.EQUIPMENT" : { "@From.Annotation2" : "from annotation #2" }, "tea_busi.TEAM" : { // Note: no aggregation of array elements here! "@D" : ["from $metadata"], "@E" : ["from annotation #1"], "@F" : ["from annotation #2"] }, "tea_busi.Worker" : { "@From.Annotation" : { "$Type" : "some.Record", // Note: no "Label" here! "Value" : "from annotation #2" }, "@From.Annotation1" : "from annotation #1" }, "tea_busi_foo.v0001.Product/Name" : { "@Common.Label" : "from annotation #2" } }, "$EntityContainer" : "tea_busi.DefaultContainer", "$Reference" : { "../../../../default/iwbep/tea_busi_foo/0001/$metadata" : { "$Include" : [ "tea_busi_foo.v0001." ] } }, "$Version" : "4.0", "bar." : { "$kind" : "Schema" }, "foo." : { "$kind" : "Schema" }, "tea_busi." : { "$kind" : "Schema" }, "tea_busi.DefaultContainer" : { "$kind" : "EntityContainer" }, "tea_busi.EQUIPMENT" : { "$kind" : "EntityType" }, "tea_busi.TEAM" : { "$kind" : "EntityType" }, "tea_busi.Worker" : { "$kind" : "EntityType" } }; this.oMetaModel.aAnnotationUris = ["/URI/1", "/URI/2"]; this.oMetaModelMock.expects("validate") .withExactArgs(this.oMetaModel.sUrl, mScope0); this.oMetaModelMock.expects("validate") .withExactArgs("/URI/1", mAnnotationScope1); this.oMetaModelMock.expects("validate") .withExactArgs("/URI/2", mAnnotationScope2); assert.deepEqual(this.oMetaModel.mSchema2MetadataUrl, {}); // code under test this.oMetaModel._mergeAnnotations(mScope0, [mAnnotationScope1, mAnnotationScope2]); assert.deepEqual(mScope0, mExpectedScope); assert.strictEqual(mScope0["tea_busi."].$Annotations, undefined); assert.strictEqual(mAnnotationScope1["foo."].$Annotations, undefined); assert.strictEqual(mAnnotationScope2["bar."].$Annotations, undefined); assert.deepEqual(this.oMetaModel.mSchema2MetadataUrl, { "bar." : {"/URI/2" : false}, "foo." : {"/URI/1" : false}, "tea_busi." : {"/a/b/c/d/e/$metadata" : false} }); // prepare to load "cross-service reference" // simulate #validate of mScope0 this.oMetaModel.mSchema2MetadataUrl["tea_busi_foo.v0001."] = {"/a/default/iwbep/tea_busi_foo/0001/$metadata" : false}; this.oMetaModelMock.expects("fetchEntityContainer").atLeast(1) .returns(SyncPromise.resolve(mScope0)); this.mock(this.oMetaModel.oRequestor).expects("read") .withExactArgs("/a/default/iwbep/tea_busi_foo/0001/$metadata") .returns(Promise.resolve(mScope1)); this.oMetaModelMock.expects("validate") .withExactArgs("/a/default/iwbep/tea_busi_foo/0001/$metadata", mScope1) .returns(mScope1); // code under test return this.oMetaModel.fetchObject("/tea_busi_foo.v0001.Product/[email protected]") .then(function (sLabel) { assert.strictEqual(sLabel, "from annotation #2", "not overwritten by $metadata"); }); }); //********************************************************************************************* QUnit.test("_mergeAnnotations - error (legacy)", function (assert) { var oAnnotation1 = { "tea_busi.NewType1" : { "$kind" : "EntityType" } }, oAnnotation2 = { "tea_busi.NewType2" : { "$kind" : "EntityType" }, "tea_busi.ExistingType" : { "$kind" : "EntityType" } }, sMessage = "A schema cannot span more than one document: tea_busi.ExistingType", oMetadata = { "tea_busi.ExistingType" : { "$kind" : "EntityType" } }; this.oMetaModel.aAnnotationUris = ["n/a", "/my/annotation.xml"]; // legacy behavior: $Version is not checked, tea_busi.NewType2 is allowed this.oMetaModel.bSupportReferences = false; this.oMetaModelMock.expects("validate") .withExactArgs(this.oMetaModel.sUrl, oMetadata); this.oMetaModelMock.expects("validate") .withExactArgs("n/a", oAnnotation1); this.oMetaModelMock.expects("validate") .withExactArgs("/my/annotation.xml", oAnnotation2); this.oLogMock.expects("error") .withExactArgs(sMessage, "/my/annotation.xml", sODataMetaModel); assert.throws(function () { // code under test this.oMetaModel._mergeAnnotations(oMetadata, [oAnnotation1, oAnnotation2]); }, new Error("/my/annotation.xml: " + sMessage)); }); //********************************************************************************************* QUnit.test("_mergeAnnotations - a schema cannot span more than one document", function (assert) { var oAnnotation = { "$Version" : "4.0", "tea_busi." : { "$kind" : "Schema" } }, sMessage = "A schema cannot span more than one document: tea_busi.", oMetadata = { "$Version" : "4.0", "tea_busi." : { "$kind" : "Schema" } }; this.oMetaModel.aAnnotationUris = ["n/a", "/my/annotation.xml"]; this.oLogMock.expects("error") .withExactArgs(sMessage, "/my/annotation.xml", sODataMetaModel); assert.throws(function () { // code under test this.oMetaModel._mergeAnnotations(oMetadata, [{"$Version" : "4.0"}, oAnnotation]); }, new Error("/my/annotation.xml: " + sMessage)); } ); //********************************************************************************************* QUnit.test("getOrCreateValueListModel", function (assert) { var oModel = new ODataModel({ serviceUrl : "/Foo/DataService/", synchronizationMode : "None" }), oMetaModel = oModel.getMetaModel(), oValueListModel; oModel.oRequestor.mHeaders["X-CSRF-Token"] = "xyz"; // code under test oValueListModel = oMetaModel.getOrCreateValueListModel("../ValueListService/$metadata"); assert.ok(oValueListModel instanceof ODataModel); assert.strictEqual(oValueListModel.sServiceUrl, "/Foo/ValueListService/"); assert.strictEqual(oValueListModel.getDefaultBindingMode(), BindingMode.OneWay); assert.strictEqual(oValueListModel.sOperationMode, OperationMode.Server); assert.strictEqual(oValueListModel.oRequestor.mHeaders["X-CSRF-Token"], "xyz"); // code under test assert.strictEqual(oMetaModel.getOrCreateValueListModel("/Foo/ValueListService/$metadata"), oValueListModel); // code under test assert.strictEqual(oValueListModel.getMetaModel() .getOrCreateValueListModel("/Foo/ValueListService/$metadata"), oValueListModel); // code under test assert.strictEqual(oValueListModel.getMetaModel().getOrCreateValueListModel("$metadata"), oValueListModel); oModel = new ODataModel({ serviceUrl : "/Foo/DataService2/", synchronizationMode : "None" }); // code under test - even a totally different model gets the very same value list model assert.strictEqual(oModel.getMetaModel() .getOrCreateValueListModel("../ValueListService/$metadata"), oValueListModel); }); //********************************************************************************************* QUnit.test("getOrCreateValueListModel: relative data service URL", function (assert) { var sRelativePath = "../../../DataService/", sAbsolutePath = new URI(sRelativePath).absoluteTo(document.baseURI).pathname().toString(), oModel = new ODataModel({ serviceUrl : sRelativePath, synchronizationMode : "None" }), oValueListModel; // code under test oValueListModel = oModel.getMetaModel() .getOrCreateValueListModel("../ValueListService/$metadata"); assert.strictEqual(oValueListModel.sServiceUrl, new URI("../ValueListService/").absoluteTo(sAbsolutePath).toString()); }); //********************************************************************************************* QUnit.test("fetchValueListType: unknown property", function (assert) { var oContext = {}, sPath = "/Products('HT-1000')/Foo"; this.oMetaModelMock.expects("getMetaContext").withExactArgs(sPath).returns(oContext); this.oMetaModelMock.expects("fetchObject") .withExactArgs(undefined, sinon.match.same(oContext)) .returns(Promise.resolve()); // code under test return this.oMetaModel.fetchValueListType(sPath).then(function () { assert.ok(false); }, function (oError) { assert.ok(oError.message, "No metadata for " + sPath); }); }); //********************************************************************************************* [{ mAnnotations : { "@some.other.Annotation" : true }, sValueListType : ValueListType.None }, { mAnnotations : { "@com.sap.vocabularies.Common.v1.ValueListReferences" : [], "@com.sap.vocabularies.Common.v1.ValueListWithFixedValues" : true }, sValueListType : ValueListType.Fixed }, { mAnnotations : { "@com.sap.vocabularies.Common.v1.ValueListReferences" : [] }, sValueListType : ValueListType.Standard }, { mAnnotations : { "@com.sap.vocabularies.Common.v1.ValueListReferences#foo" : [], "@com.sap.vocabularies.Common.v1.ValueListWithFixedValues" : false }, sValueListType : ValueListType.Standard }, { mAnnotations : { "@com.sap.vocabularies.Common.v1.ValueListMapping#foo" : {}, "@com.sap.vocabularies.Common.v1.ValueListWithFixedValues" : false }, sValueListType : ValueListType.Standard }].forEach(function (oFixture) { QUnit.test("fetchValueListType: " + oFixture.sValueListType, function (assert) { var oContext = {}, sPropertyPath = "/ProductList('HT-1000')/Status"; this.oMetaModelMock.expects("getMetaContext") .withExactArgs(sPropertyPath).returns(oContext); this.oMetaModelMock.expects("fetchObject") .withExactArgs(undefined, sinon.match.same(oContext)) .returns(SyncPromise.resolve({})); this.oMetaModelMock.expects("getObject") .withExactArgs("@", sinon.match.same(oContext)) .returns(oFixture.mAnnotations); // code under test this.oMetaModel.fetchValueListType(sPropertyPath).then(function (sValueListType) { assert.strictEqual(sValueListType, oFixture.sValueListType); }); }); }); //********************************************************************************************* QUnit.test("getValueListType, requestValueListType", function (assert) { return checkGetAndRequest(this, assert, "fetchValueListType", ["sPath"], true); }); //********************************************************************************************* QUnit.test("fetchValueListMappings: success", function (assert) { var oModel = new ODataModel({ serviceUrl : "/Foo/DataService/", synchronizationMode : "None" }), oMetaModelMock = this.mock(oModel.getMetaModel()), oDefaultMapping = { "CollectionPath" : "VH_Category1Set", "Parameters" : [{"p1" : "foo"}] }, oFooMapping = { "CollectionPath" : "VH_Category2Set", "Parameters" : [{"p2" : "bar"}] }, oProperty = {}, oValueListMetadata = { "$Annotations" : { "zui5_epm_sample.Product/Category" : { "@com.sap.vocabularies.Common.v1.ValueListMapping" : oDefaultMapping, "@com.sap.vocabularies.Common.v1.ValueListMapping#foo" : oFooMapping }, "some.other.Target" : {} } }, oValueListModel = { getMetaModel : function () { return { fetchEntityContainer : function () { return Promise.resolve(oValueListMetadata); } }; } }; oMetaModelMock.expects("getObject") .withExactArgs("/zui5_epm_sample.Product/Category") .returns(oProperty); // code under test return oModel.getMetaModel() .fetchValueListMappings(oValueListModel, "zui5_epm_sample", oProperty) .then(function (oValueListMappings) { assert.deepEqual(oValueListMappings, { "" : oDefaultMapping, "foo" : oFooMapping }); }); }); //********************************************************************************************* [{ annotations : { "zui5_epm_sample.Product/CurrencyCode/type.cast" : true }, error : "Unexpected annotation target 'zui5_epm_sample.Product/CurrencyCode/type.cast' " + "with namespace of data service in /Foo/ValueListService" }, { annotations : { "zui5_epm_sample.Product/Category" : { "@some.other.Term" : true } }, error : "Unexpected annotation 'some.other.Term' for target " + "'zui5_epm_sample.Product/Category' with namespace of data service " + "in /Foo/ValueListService" }, { annotations : {}, error : "No annotation 'com.sap.vocabularies.Common.v1.ValueListMapping' " + "in /Foo/ValueListService" }].forEach(function (oFixture) { QUnit.test("fetchValueListMappings: " + oFixture.error, function (assert) { var oModel = new ODataModel({ serviceUrl : "/Foo/DataService/", synchronizationMode : "None" }), oMetaModel = oModel.getMetaModel(), oMetaModelMock = this.mock(oMetaModel), oProperty = {}, oValueListMetadata = { "$Annotations" : oFixture.annotations }, oValueListModel = { getMetaModel : function () { return { fetchEntityContainer : function () { return Promise.resolve(oValueListMetadata); } }; }, sServiceUrl : "/Foo/ValueListService" }, sTarget = Object.keys(oFixture.annotations)[0]; oMetaModelMock.expects("getObject").atLeast(0) .withExactArgs("/" + sTarget) .returns(sTarget === "zui5_epm_sample.Product/Category" ? oProperty : undefined); // code under test return oMetaModel .fetchValueListMappings(oValueListModel, "zui5_epm_sample", oProperty) .then(function () { assert.ok(false); }, function (oError) { assert.strictEqual(oError.message, oFixture.error); }); }); }); //********************************************************************************************* QUnit.test("fetchValueListMappings: value list model is data model", function (assert) { var oModel = new ODataModel({ serviceUrl : "/Foo/DataService/", synchronizationMode : "None" }), oMetaModelMock = this.mock(oModel.getMetaModel()), oMapping = { "CollectionPath" : "VH_CountrySet", "Parameters" : [{"p1" : "foo"}] }, oProperty = { "$kind" : "Property" }, oMetadata = { "$EntityContainer" : "value_list.Container", "value_list.VH_BusinessPartner" : { "$kind" : "Entity", "Country" : oProperty }, "$Annotations" : { // value list on value list "value_list.VH_BusinessPartner/Country" : { "@com.sap.vocabularies.Common.v1.Label" : "Country", "@com.sap.vocabularies.Common.v1.ValueListMapping" : oMapping }, "value_list.VH_BusinessPartner/Foo" : {/* some other field w/ value list*/} } }; oMetaModelMock.expects("fetchEntityContainer").atLeast(1) .returns(SyncPromise.resolve(oMetadata)); // code under test return oModel.getMetaModel() .fetchValueListMappings(oModel, "value_list", oProperty) .then(function (oValueListMappings) { assert.deepEqual(oValueListMappings, { "" : oMapping }); }); }); //********************************************************************************************* [{ sPropertyPath : "/EMPLOYEES/unknown", sExpectedError : "No metadata" }, { sPropertyPath : "/EMPLOYEES/AGE", sExpectedError : "No annotation 'com.sap.vocabularies.Common.v1.ValueListReferences'" }].forEach(function (oFixture) { QUnit.test("requestValueListInfo: " + oFixture.sExpectedError, function (assert) { var oModel = new ODataModel({ serviceUrl : "/~/", synchronizationMode : "None" }); this.mock(oModel.getMetaModel()).expects("fetchEntityContainer").atLeast(1) .returns(SyncPromise.resolve(mScope)); // code under test return oModel.getMetaModel().requestValueListInfo(oFixture.sPropertyPath) .then(function () { assert.ok(false); }, function (oError) { assert.strictEqual(oError.message, oFixture.sExpectedError + " for " + oFixture.sPropertyPath); }); }); }); //********************************************************************************************* [false, true].forEach(function (bDuplicate) { QUnit.test("requestValueListInfo: duplicate=" + bDuplicate, function (assert) { var sMappingUrl1 = "../ValueListService1/$metadata", sMappingUrl2 = "../ValueListService2/$metadata", sMappingUrlBar = "../ValueListServiceBar/$metadata", oModel = new ODataModel({ serviceUrl : "/Foo/DataService/", synchronizationMode : "None" }), oMetaModelMock = this.mock(oModel.getMetaModel()), oProperty = { "$kind" : "Property" }, sPropertyPath = "/ProductList('HT-1000')/Category", oMetadata = { "$EntityContainer" : "zui5_epm_sample.Container", "zui5_epm_sample.Product" : { "$kind" : "Entity", "Category" : oProperty }, "$Annotations" : { "zui5_epm_sample.Product/Category" : { "@com.sap.vocabularies.Common.v1.ValueListReferences" : [sMappingUrl1, sMappingUrl2], "@com.sap.vocabularies.Common.v1.ValueListReferences#bar" : [sMappingUrlBar], "@com.sap.vocabularies.Common.v1.ValueListReferences#[email protected]" : true, "@some.other.Annotation" : true } }, "zui5_epm_sample.Container" : { "ProductList" : { "$kind" : "EntitySet", "$Type" : "zui5_epm_sample.Product" } } }, oValueListMappings1 = { "" : {CollectionPath : ""} }, oValueListMappings2 = { "foo" : {CollectionPath : "foo"} }, oValueListMappingsBar = {}, oValueListModel1 = {sServiceUrl : sMappingUrl1}, oValueListModel2 = {sServiceUrl : sMappingUrl2}, oValueListModelBar = {sServiceUrl : sMappingUrlBar}; oValueListMappingsBar[bDuplicate ? "" : "bar"] = {CollectionPath : "bar"}; oMetaModelMock.expects("fetchEntityContainer").atLeast(1) .returns(SyncPromise.resolve(oMetadata)); oMetaModelMock.expects("getOrCreateValueListModel") .withExactArgs(sMappingUrl1) .returns(oValueListModel1); oMetaModelMock.expects("fetchValueListMappings") .withExactArgs(sinon.match.same(oValueListModel1), "zui5_epm_sample", sinon.match.same(oProperty)) .returns(Promise.resolve(oValueListMappings1)); oMetaModelMock.expects("getOrCreateValueListModel") .withExactArgs(sMappingUrl2) .returns(oValueListModel2); oMetaModelMock.expects("fetchValueListMappings") .withExactArgs(sinon.match.same(oValueListModel2), "zui5_epm_sample", sinon.match.same(oProperty)) .returns(Promise.resolve(oValueListMappings2)); oMetaModelMock.expects("getOrCreateValueListModel") .withExactArgs(sMappingUrlBar) .returns(oValueListModelBar); oMetaModelMock.expects("fetchValueListMappings") .withExactArgs(sinon.match.same(oValueListModelBar), "zui5_epm_sample", sinon.match.same(oProperty)) .returns(SyncPromise.resolve(oValueListMappingsBar)); // code under test return oModel.getMetaModel() .requestValueListInfo(sPropertyPath) .then(function (oResult) { assert.ok(!bDuplicate); assert.deepEqual(oResult, { "" : { $model : oValueListModel1, CollectionPath : "" }, "foo" : { $model : oValueListModel2, CollectionPath : "foo" }, "bar" : { $model : oValueListModelBar, CollectionPath : "bar" } }); }, function (oError) { assert.ok(bDuplicate); assert.strictEqual(oError.message, "Annotations 'com.sap.vocabularies.Common.v1.ValueListMapping' with " + "identical qualifier '' for property " + sPropertyPath + " in " + sMappingUrlBar + " and " + sMappingUrl1); }); }); }); //********************************************************************************************* QUnit.test("requestValueListInfo: same model w/o reference", function (assert) { var oProperty = { "$kind" : "Property" }, oValueListMappingFoo = {CollectionPath : "foo"}, oMetadata = { "$EntityContainer" : "value_list.Container", "value_list.Container" : { "$kind" : "EntityContainer", "VH_BusinessPartnerSet" : { "$kind" : "EntitySet", "$Type" : "value_list.VH_BusinessPartner" } }, "value_list.VH_BusinessPartner" : { "$kind" : "Entity", "Country" : oProperty }, "$Annotations" : { "value_list.VH_BusinessPartner/Country" : { "@com.sap.vocabularies.Common.v1.ValueListMapping#foo" : oValueListMappingFoo, "@com.sap.vocabularies.Common.v1.ValueListMapping#bar" : {CollectionPath : "bar"} } } }, oModel = new ODataModel({ serviceUrl : "/Foo/ValueListService/", synchronizationMode : "None" }), oMetaModelMock = this.mock(oModel.getMetaModel()), sPropertyPath = "/VH_BusinessPartnerSet('0100000000')/Country"; oMetaModelMock.expects("fetchEntityContainer").atLeast(1) .returns(SyncPromise.resolve(oMetadata)); // code under test return oModel.getMetaModel().requestValueListInfo(sPropertyPath).then(function (oResult) { assert.strictEqual(oResult.foo.$model, oModel); assert.strictEqual(oResult.bar.$model, oModel); assert.notOk("$model" in oValueListMappingFoo); delete oResult.foo.$model; delete oResult.bar.$model; assert.deepEqual(oResult, { "foo" : {CollectionPath : "foo"}, "bar" : {CollectionPath : "bar"} }); }); }); //********************************************************************************************* [false, true].forEach(function (bDuplicate) { var sTitle = "requestValueListInfo: fixed values: duplicate=" + bDuplicate; QUnit.test(sTitle, function (assert) { var oValueListMapping = {CollectionPath : "foo"}, oAnnotations = { "@com.sap.vocabularies.Common.v1.ValueListWithFixedValues" : true, "@com.sap.vocabularies.Common.v1.ValueListMapping#foo" : oValueListMapping }, oMetadata = { "$EntityContainer" : "value_list.Container", "value_list.Container" : { "$kind" : "EntityContainer", "VH_BusinessPartnerSet" : { "$kind" : "EntitySet", "$Type" : "value_list.VH_BusinessPartner" } }, "value_list.VH_BusinessPartner" : { "$kind" : "Entity", "Country" : {} }, "$Annotations" : { "value_list.VH_BusinessPartner/Country" : oAnnotations } }, oModel = new ODataModel({ serviceUrl : "/Foo/ValueListService/", synchronizationMode : "None" }), sPropertyPath = "/VH_BusinessPartnerSet('42')/Country"; if (bDuplicate) { oAnnotations["@com.sap.vocabularies.Common.v1.ValueListMapping#bar"] = {}; } this.mock(oModel.getMetaModel()).expects("fetchEntityContainer").atLeast(1) .returns(SyncPromise.resolve(oMetadata)); // code under test return oModel.getMetaModel().requestValueListInfo(sPropertyPath) .then(function (oResult) { assert.notOk(bDuplicate); assert.strictEqual(oResult[""].$model, oModel); delete oResult[""].$model; assert.deepEqual(oResult, { "" : {CollectionPath : "foo"} }); }, function (oError) { assert.ok(bDuplicate); assert.strictEqual(oError.message, "Annotation " + "'com.sap.vocabularies.Common.v1.ValueListWithFixedValues' but multiple " + "'com.sap.vocabularies.Common.v1.ValueListMapping' for property " + sPropertyPath); }); }); }); // ********************************************************************************************* QUnit.test("requestValueListInfo: property in cross-service reference", function (assert) { var sMappingUrl = "../ValueListService/$metadata", oModel = new ODataModel({ serviceUrl : "/Foo/DataService/", synchronizationMode : "None" }), oMetaModelMock = this.mock(oModel.getMetaModel()), oProperty = { "$kind" : "Property" }, oMetadata = { "$Version" : "4.0", "$Reference" : { "/Foo/EpmSample/$metadata" : { "$Include" : ["zui5_epm_sample."] } }, "$EntityContainer" : "base.Container", "base.Container" : { "BusinessPartnerList" : { "$kind" : "EntitySet", "$Type" : "base.BusinessPartner" } }, "base.BusinessPartner" : { "$kind" : "EntityType", "BP_2_PRODUCT" : { "$kind" : "NavigationProperty", "$Type" : "zui5_epm_sample.Product" } } }, oMetadataProduct = { "$Version" : "4.0", "zui5_epm_sample.Product" : { "$kind" : "Entity", "Category" : oProperty }, "zui5_epm_sample." : { "$kind" : "Schema", "$Annotations" : { "zui5_epm_sample.Product/Category" : { "@com.sap.vocabularies.Common.v1.ValueListReferences" : [sMappingUrl] } } } }, sPropertyPath = "/BusinessPartnerList('0100000000')/BP_2_PRODUCT('HT-1000')/Category", oRequestorMock = this.mock(oModel.oMetaModel.oRequestor), oValueListMappings = { "" : {CollectionPath : ""} }, oValueListModel = {sServiceUrl : sMappingUrl}; oRequestorMock.expects("read").withExactArgs("/Foo/DataService/$metadata", false, undefined) .returns(Promise.resolve(oMetadata)); oRequestorMock.expects("read").withExactArgs("/Foo/EpmSample/$metadata") .returns(Promise.resolve(oMetadataProduct)); oMetaModelMock.expects("getOrCreateValueListModel") .withExactArgs(sMappingUrl) .returns(oValueListModel); oMetaModelMock.expects("fetchValueListMappings") .withExactArgs(sinon.match.same(oValueListModel), "zui5_epm_sample", sinon.match.same(oProperty)) .returns(Promise.resolve(oValueListMappings)); // code under test return oModel.getMetaModel().requestValueListInfo(sPropertyPath).then(function (oResult) { assert.deepEqual(oResult, { "" : { $model : oValueListModel, CollectionPath : "" } }); }); }); // ********************************************************************************************* QUnit.test("requestValueListInfo: same qualifier in reference and local", function (assert) { var sMappingUrl = "../ValueListService/$metadata", oProperty = { "$kind" : "Property" }, oMetadata = { "$EntityContainer" : "zui5_epm_sample.Container", "zui5_epm_sample.Container" : { "$kind" : "EntityContainer", "ProductList" : { "$kind" : "EntitySet", "$Type" : "zui5_epm_sample.Product" } }, "zui5_epm_sample.Product" : { "$kind" : "Entity", "Category" : oProperty }, "$Annotations" : { "zui5_epm_sample.Product/Category" : { "@com.sap.vocabularies.Common.v1.ValueListReferences" : [sMappingUrl], "@com.sap.vocabularies.Common.v1.ValueListMapping#foo" : {} } } }, oModel = new ODataModel({ serviceUrl : "/Foo/ValueListService/", synchronizationMode : "None" }), oMetaModelMock = this.mock(oModel.getMetaModel()), sPropertyPath = "/ProductList('HT-1000')/Category", oValueListModel = {}; oMetaModelMock.expects("fetchEntityContainer").atLeast(1) .returns(SyncPromise.resolve(oMetadata)); oMetaModelMock.expects("getOrCreateValueListModel") .withExactArgs(sMappingUrl) .returns(oValueListModel); oMetaModelMock.expects("fetchValueListMappings") .withExactArgs(sinon.match.same(oValueListModel), "zui5_epm_sample", sinon.match.same(oProperty)) .returns(Promise.resolve({"foo" : {}})); // code under test return oModel.getMetaModel().requestValueListInfo(sPropertyPath).then(function () { assert.ok(false); }, function (oError) { assert.strictEqual(oError.message, "Annotations 'com.sap.vocabularies.Common.v1.ValueListMapping' with identical " + "qualifier 'foo' for property " + sPropertyPath + " in " + oModel.sServiceUrl + "$metadata and " + sMappingUrl); }); }); // ********************************************************************************************* QUnit.test("fetchModule: synchronously", function (assert) { var vModule = {}; this.mock(sap.ui).expects("require") .withExactArgs("sap/ui/model/odata/type/Int") .returns(vModule); // requested module already loaded // code under test assert.strictEqual(this.oMetaModel.fetchModule("sap.ui.model.odata.type.Int").getResult(), vModule); }); // ********************************************************************************************* QUnit.test("fetchModule, asynchronous", function (assert) { var vModule = {}, sModuleName = "sap/ui/model/odata/type/Int64", oSapUiMock = this.mock(sap.ui); oSapUiMock.expects("require") .withExactArgs(sModuleName) .returns(undefined); // requested module not yet loaded oSapUiMock.expects("require") .withExactArgs([sModuleName], sinon.match.func) .callsArgWithAsync(1, vModule); // code under test return this.oMetaModel.fetchModule("sap.ui.model.odata.type.Int64") .then(function (oResult) { assert.strictEqual(oResult, vModule); }); }); //********************************************************************************************* if (TestUtils.isRealOData()) { //***************************************************************************************** QUnit.test("getValueListType, requestValueListInfo: realOData", function (assert) { var sPath = new URI(TestUtils.proxy(sSampleServiceUrl)) .absoluteTo(window.location.pathname).toString(), oModel = new ODataModel({ serviceUrl : sPath, synchronizationMode : "None" }), oMetaModel = oModel.getMetaModel(), sPropertyPath = "/ProductList('HT-1000')/Category"; return oMetaModel.requestObject("/ProductList/").then(function () { assert.strictEqual(oMetaModel.getValueListType( "/com.sap.gateway.default.zui5_epm_sample.v0002.Contact/Sex"), ValueListType.Fixed); assert.strictEqual(oMetaModel.getValueListType(sPropertyPath), ValueListType.Standard); return oMetaModel.requestValueListInfo(sPropertyPath).then(function (oResult) { var oValueListInfo = oResult[""]; assert.strictEqual(oValueListInfo.CollectionPath, "H_EPM_PD_CATS_SH_Set"); }); }); }); //***************************************************************************************** QUnit.test("requestValueListInfo: same model w/o reference, realOData", function (assert) { var oModel = new ODataModel({ serviceUrl : TestUtils.proxy(sSampleServiceUrl), synchronizationMode : "None" }), oMetaModel = oModel.getMetaModel(), sPropertyPath = "/ProductList/0/CurrencyCode", oValueListMetaModel; return oMetaModel.requestObject("/ProductList/").then(function () { // value list in the data service assert.strictEqual(oMetaModel.getValueListType(sPropertyPath), ValueListType.Standard); return oMetaModel.requestValueListInfo(sPropertyPath); }).then(function (oValueListInfo) { var sPropertyPath2 = "/H_TCURC_SH_Set/1/WAERS"; // value list in the value list service oValueListMetaModel = oValueListInfo[""].$model.getMetaModel(); assert.strictEqual(oValueListMetaModel.getValueListType(sPropertyPath2), ValueListType.Standard); assert.strictEqual(oValueListInfo[""].CollectionPath, "H_TCURC_SH_Set"); return oValueListMetaModel.requestValueListInfo(sPropertyPath2); }).then(function (oValueListInfo) { assert.strictEqual(oValueListInfo[""].$model.getMetaModel(), oValueListMetaModel); assert.strictEqual(oValueListInfo[""].CollectionPath, "TCURC_CT_Set"); }); }); } }); //TODO getContext vs. createBindingContext; map of "singletons" vs. memory leak
cschuff/openui5
src/sap.ui.core/test/sap/ui/core/qunit/odata/v4/ODataMetaModel.qunit.js
JavaScript
apache-2.0
161,426
# coding=utf-8 # Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for StatementVisitor.""" from __future__ import unicode_literals import re import subprocess import textwrap import unittest from grumpy_tools.compiler import block from grumpy_tools.compiler import imputil from grumpy_tools.compiler import shard_test from grumpy_tools.compiler import stmt from grumpy_tools.compiler import util from grumpy_tools.vendor import pythonparser from grumpy_tools.vendor.pythonparser import ast class StatementVisitorTest(unittest.TestCase): def testAssertNoMsg(self): self.assertEqual((0, 'AssertionError()\n'), _GrumpRun(textwrap.dedent("""\ try: assert False except AssertionError as e: print repr(e)"""))) def testAssertMsg(self): want = (0, "AssertionError('foo',)\n") self.assertEqual(want, _GrumpRun(textwrap.dedent("""\ try: assert False, 'foo' except AssertionError as e: print repr(e)"""))) def testBareAssert(self): # Assertion errors at the top level of a block should raise: # https://github.com/google/grumpy/issues/18 want = (0, 'ok\n') self.assertEqual(want, _GrumpRun(textwrap.dedent("""\ def foo(): assert False try: foo() except AssertionError: print 'ok' else: print 'bad'"""))) def testAssignAttribute(self): self.assertEqual((0, '123\n'), _GrumpRun(textwrap.dedent("""\ e = Exception() e.foo = 123 print e.foo"""))) def testAssignName(self): self.assertEqual((0, 'bar\n'), _GrumpRun(textwrap.dedent("""\ foo = 'bar' print foo"""))) def testAssignMultiple(self): self.assertEqual((0, 'baz baz\n'), _GrumpRun(textwrap.dedent("""\ foo = bar = 'baz' print foo, bar"""))) def testAssignSubscript(self): self.assertEqual((0, "{'bar': None}\n"), _GrumpRun(textwrap.dedent("""\ foo = {} foo['bar'] = None print foo"""))) def testAssignTuple(self): self.assertEqual((0, 'a b\n'), _GrumpRun(textwrap.dedent("""\ baz = ('a', 'b') foo, bar = baz print foo, bar"""))) def testAugAssign(self): self.assertEqual((0, '42\n'), _GrumpRun(textwrap.dedent("""\ foo = 41 foo += 1 print foo"""))) def testAugAssignBitAnd(self): self.assertEqual((0, '3\n'), _GrumpRun(textwrap.dedent("""\ foo = 7 foo &= 3 print foo"""))) def testAugAssignPow(self): self.assertEqual((0, '64\n'), _GrumpRun(textwrap.dedent("""\ foo = 8 foo **= 2 print foo"""))) def testClassDef(self): self.assertEqual((0, "<type 'type'>\n"), _GrumpRun(textwrap.dedent("""\ class Foo(object): pass print type(Foo)"""))) def testClassDefWithVar(self): self.assertEqual((0, 'abc\n'), _GrumpRun(textwrap.dedent("""\ class Foo(object): bar = 'abc' print Foo.bar"""))) def testDeleteAttribute(self): self.assertEqual((0, 'False\n'), _GrumpRun(textwrap.dedent("""\ class Foo(object): bar = 42 del Foo.bar print hasattr(Foo, 'bar')"""))) def testDeleteClassLocal(self): self.assertEqual((0, 'False\n'), _GrumpRun(textwrap.dedent("""\ class Foo(object): bar = 'baz' del bar print hasattr(Foo, 'bar')"""))) def testDeleteGlobal(self): self.assertEqual((0, 'False\n'), _GrumpRun(textwrap.dedent("""\ foo = 42 del foo print 'foo' in globals()"""))) def testDeleteLocal(self): self.assertEqual((0, 'ok\n'), _GrumpRun(textwrap.dedent("""\ def foo(): bar = 123 del bar try: print bar raise AssertionError except UnboundLocalError: print 'ok' foo()"""))) def testDeleteNonexistentLocal(self): self.assertRaisesRegexp( util.ParseError, 'cannot delete nonexistent local', _ParseAndVisit, 'def foo():\n del bar') def testDeleteSubscript(self): self.assertEqual((0, '{}\n'), _GrumpRun(textwrap.dedent("""\ foo = {'bar': 'baz'} del foo['bar'] print foo"""))) def testExprCall(self): self.assertEqual((0, 'bar\n'), _GrumpRun(textwrap.dedent("""\ def foo(): print 'bar' foo()"""))) def testExprNameGlobal(self): self.assertEqual((0, ''), _GrumpRun(textwrap.dedent("""\ foo = 42 foo"""))) def testExprNameLocal(self): self.assertEqual((0, ''), _GrumpRun(textwrap.dedent("""\ foo = 42 def bar(): foo bar()"""))) def testFor(self): self.assertEqual((0, '1\n2\n3\n'), _GrumpRun(textwrap.dedent("""\ for i in (1, 2, 3): print i"""))) def testForBreak(self): self.assertEqual((0, '1\n'), _GrumpRun(textwrap.dedent("""\ for i in (1, 2, 3): print i break"""))) def testForContinue(self): self.assertEqual((0, '1\n2\n3\n'), _GrumpRun(textwrap.dedent("""\ for i in (1, 2, 3): print i continue raise AssertionError"""))) def testForElse(self): self.assertEqual((0, 'foo\nbar\n'), _GrumpRun(textwrap.dedent("""\ for i in (1,): print 'foo' else: print 'bar'"""))) def testForElseBreakNotNested(self): self.assertRaisesRegexp( util.ParseError, "'continue' not in loop", _ParseAndVisit, 'for i in (1,):\n pass\nelse:\n continue') def testForElseContinueNotNested(self): self.assertRaisesRegexp( util.ParseError, "'continue' not in loop", _ParseAndVisit, 'for i in (1,):\n pass\nelse:\n continue') def testFunctionDecorator(self): self.assertEqual((0, '<b>foo</b>\n'), _GrumpRun(textwrap.dedent("""\ def bold(fn): return lambda: '<b>' + fn() + '</b>' @bold def foo(): return 'foo' print foo()"""))) def testFunctionDecoratorWithArg(self): self.assertEqual((0, '<b id=red>foo</b>\n'), _GrumpRun(textwrap.dedent("""\ def tag(name): def bold(fn): return lambda: '<b id=' + name + '>' + fn() + '</b>' return bold @tag('red') def foo(): return 'foo' print foo()"""))) def testFunctionDef(self): self.assertEqual((0, 'bar baz\n'), _GrumpRun(textwrap.dedent("""\ def foo(a, b): print a, b foo('bar', 'baz')"""))) def testFunctionDefGenerator(self): self.assertEqual((0, "['foo', 'bar']\n"), _GrumpRun(textwrap.dedent("""\ def gen(): yield 'foo' yield 'bar' print list(gen())"""))) def testFunctionDefGeneratorReturnValue(self): self.assertRaisesRegexp( util.ParseError, 'returning a value in a generator function', _ParseAndVisit, 'def foo():\n yield 1\n return 2') def testFunctionDefLocal(self): self.assertEqual((0, 'baz\n'), _GrumpRun(textwrap.dedent("""\ def foo(): def bar(): print 'baz' bar() foo()"""))) def testIf(self): self.assertEqual((0, 'foo\n'), _GrumpRun(textwrap.dedent("""\ if 123: print 'foo' if '': print 'bar'"""))) def testIfElif(self): self.assertEqual((0, 'foo\nbar\n'), _GrumpRun(textwrap.dedent("""\ if True: print 'foo' elif False: print 'bar' if False: print 'foo' elif True: print 'bar'"""))) def testIfElse(self): self.assertEqual((0, 'foo\nbar\n'), _GrumpRun(textwrap.dedent("""\ if True: print 'foo' else: print 'bar' if False: print 'foo' else: print 'bar'"""))) def testImport(self): self.assertEqual((0, "<type 'dict'>\n"), _GrumpRun(textwrap.dedent("""\ import sys print type(sys.modules)"""))) def testImportFutureLateRaises(self): regexp = 'from __future__ imports must occur at the beginning of the file' self.assertRaisesRegexp(util.ImportError, regexp, _ParseAndVisit, 'foo = bar\nfrom __future__ import print_function') def testFutureUnicodeLiterals(self): want = "u'foo'\n" self.assertEqual((0, want), _GrumpRun(textwrap.dedent("""\ from __future__ import unicode_literals print repr('foo')"""))) def testImportMember(self): self.assertEqual((0, "<type 'dict'>\n"), _GrumpRun(textwrap.dedent("""\ from sys import modules print type(modules)"""))) def testImportConflictingPackage(self): self.assertEqual((0, ''), _GrumpRun(textwrap.dedent("""\ import time from "__go__/time" import Now"""))) def testImportNative(self): self.assertEqual((0, '1 1000000000\n'), _GrumpRun(textwrap.dedent("""\ from "__go__/time" import Nanosecond, Second print Nanosecond, Second"""))) def testImportGrumpy(self): self.assertEqual((0, ''), _GrumpRun(textwrap.dedent("""\ from "__go__/grumpy" import Assert Assert(__frame__(), True, 'bad')"""))) def testImportNativeType(self): self.assertEqual((0, "<type 'Duration'>\n"), _GrumpRun(textwrap.dedent("""\ from "__go__/time" import Duration print Duration"""))) def testImportWildcardMemberRaises(self): regexp = 'wildcard member import is not implemented' self.assertRaisesRegexp(util.ImportError, regexp, _ParseAndVisit, 'from foo import *') self.assertRaisesRegexp(util.ImportError, regexp, _ParseAndVisit, 'from "__go__/foo" import *') def testPrintStatement(self): self.assertEqual((0, 'abc 123\nfoo bar\n'), _GrumpRun(textwrap.dedent("""\ print 'abc', print '123' print 'foo', 'bar'"""))) def testPrintFunction(self): want = "abc\n123\nabc 123\nabcx123\nabc 123 " self.assertEqual((0, want), _GrumpRun(textwrap.dedent("""\ "module docstring is ok to proceed __future__" from __future__ import print_function print('abc') print(123) print('abc', 123) print('abc', 123, sep='x') print('abc', 123, end=' ')"""))) def testRaiseExitStatus(self): self.assertEqual(1, _GrumpRun('raise Exception')[0]) def testRaiseInstance(self): self.assertEqual((0, 'foo\n'), _GrumpRun(textwrap.dedent("""\ try: raise RuntimeError('foo') print 'bad' except RuntimeError as e: print e"""))) def testRaiseTypeAndArg(self): self.assertEqual((0, 'foo\n'), _GrumpRun(textwrap.dedent("""\ try: raise KeyError('foo') print 'bad' except KeyError as e: print e"""))) def testRaiseAgain(self): self.assertEqual((0, 'foo\n'), _GrumpRun(textwrap.dedent("""\ try: try: raise AssertionError('foo') except AssertionError: raise except Exception as e: print e"""))) def testRaiseTraceback(self): self.assertEqual((0, ''), _GrumpRun(textwrap.dedent("""\ import sys try: try: raise Exception except: e, _, tb = sys.exc_info() raise e, None, tb except: e2, _, tb2 = sys.exc_info() assert e is e2 assert tb is tb2"""))) def testReturn(self): self.assertEqual((0, 'bar\n'), _GrumpRun(textwrap.dedent("""\ def foo(): return 'bar' print foo()"""))) def testTryBareExcept(self): self.assertEqual((0, ''), _GrumpRun(textwrap.dedent("""\ try: raise AssertionError except: pass"""))) def testTryElse(self): self.assertEqual((0, 'foo baz\n'), _GrumpRun(textwrap.dedent("""\ try: print 'foo', except: print 'bar' else: print 'baz'"""))) def testTryMultipleExcept(self): self.assertEqual((0, 'bar\n'), _GrumpRun(textwrap.dedent("""\ try: raise AssertionError except RuntimeError: print 'foo' except AssertionError: print 'bar' except: print 'baz'"""))) def testTryFinally(self): result = _GrumpRun(textwrap.dedent("""\ try: print 'foo', finally: print 'bar' try: print 'foo', raise Exception finally: print 'bar'""")) self.assertEqual(1, result[0]) self.assertIn('foo bar\nfoo bar\n', result[1]) self.assertIn('Exception\n', result[1]) def testWhile(self): self.assertEqual((0, '2\n1\n'), _GrumpRun(textwrap.dedent("""\ i = 2 while i: print i i -= 1"""))) def testWhileElse(self): self.assertEqual((0, 'bar\n'), _GrumpRun(textwrap.dedent("""\ while False: print 'foo' else: print 'bar'"""))) def testWith(self): self.assertEqual((0, 'enter\n1\nexit\nenter\n2\nexit\n3\n'), _GrumpRun(textwrap.dedent("""\ class ContextManager(object): def __enter__(self): print "enter" def __exit__(self, exc_type, value, traceback): print "exit" a = ContextManager() with a: print 1 try: with a: print 2 raise RuntimeError except RuntimeError: print 3 """))) def testWithAs(self): self.assertEqual((0, '1 2 3\n'), _GrumpRun(textwrap.dedent("""\ class ContextManager(object): def __enter__(self): return (1, (2, 3)) def __exit__(self, *args): pass with ContextManager() as [x, (y, z)]: print x, y, z """))) def testWriteExceptDispatcherBareExcept(self): visitor = stmt.StatementVisitor(_MakeModuleBlock()) handlers = [ast.ExceptHandler(type=ast.Name(id='foo')), ast.ExceptHandler(type=None)] self.assertEqual(visitor._write_except_dispatcher( # pylint: disable=protected-access 'exc', 'tb', handlers), [1, 2]) expected = re.compile(r'ResolveGlobal\(.*foo.*\bIsInstance\(.*' r'goto Label1.*goto Label2', re.DOTALL) self.assertRegexpMatches(visitor.writer.getvalue(), expected) def testWriteExceptDispatcherBareExceptionNotLast(self): visitor = stmt.StatementVisitor(_MakeModuleBlock()) handlers = [ast.ExceptHandler(type=None), ast.ExceptHandler(type=ast.Name(id='foo'))] self.assertRaisesRegexp(util.ParseError, r"default 'except:' must be last", visitor._write_except_dispatcher, # pylint: disable=protected-access 'exc', 'tb', handlers) def testWriteExceptDispatcherMultipleExcept(self): visitor = stmt.StatementVisitor(_MakeModuleBlock()) handlers = [ast.ExceptHandler(type=ast.Name(id='foo')), ast.ExceptHandler(type=ast.Name(id='bar'))] self.assertEqual(visitor._write_except_dispatcher( # pylint: disable=protected-access 'exc', 'tb', handlers), [1, 2]) expected = re.compile( r'ResolveGlobal\(.*foo.*\bif .*\bIsInstance\(.*\{.*goto Label1.*' r'ResolveGlobal\(.*bar.*\bif .*\bIsInstance\(.*\{.*goto Label2.*' r'\bRaise\(exc\.ToObject\(\), nil, tb\.ToObject\(\)\)', re.DOTALL) self.assertRegexpMatches(visitor.writer.getvalue(), expected) def _MakeModuleBlock(): return block.ModuleBlock(None, '__main__', '<test>', '', imputil.FutureFeatures()) def _ParseAndVisit(source): mod = pythonparser.parse(source) _, future_features = imputil.parse_future_features(mod) importer = imputil.Importer(None, 'foo', 'foo.py', False) b = block.ModuleBlock(importer, '__main__', '<test>', source, future_features) visitor = stmt.StatementVisitor(b) visitor.visit(mod) return visitor def _GrumpRun(cmd): p = subprocess.Popen(['grumpy', 'run'], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) out, _ = p.communicate(cmd) return p.returncode, out if __name__ == '__main__': shard_test.main()
corona10/grumpy
grumpy-tools-src/grumpy_tools/compiler/stmt_test.py
Python
apache-2.0
16,929
/* * Copyright 2017-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.lettuce.core.cluster.api.async; import java.util.List; import java.util.Set; import io.lettuce.core.GeoAddArgs; import io.lettuce.core.GeoArgs; import io.lettuce.core.GeoCoordinates; import io.lettuce.core.GeoRadiusStoreArgs; import io.lettuce.core.GeoSearch; import io.lettuce.core.GeoValue; import io.lettuce.core.GeoWithin; import io.lettuce.core.Value; /** * Asynchronous executed commands on a node selection for the Geo-API. * * @author Mark Paluch * @since 4.0 * @generated by io.lettuce.apigenerator.CreateAsyncNodeSelectionClusterApi */ public interface NodeSelectionGeoAsyncCommands<K, V> { /** * Single geo add. * * @param key the key of the geo set. * @param longitude the longitude coordinate according to WGS84. * @param latitude the latitude coordinate according to WGS84. * @param member the member to add. * @return Long integer-reply the number of elements that were added to the set. */ AsyncExecutions<Long> geoadd(K key, double longitude, double latitude, V member); /** * Single geo add. * * @param key the key of the geo set. * @param longitude the longitude coordinate according to WGS84. * @param latitude the latitude coordinate according to WGS84. * @param member the member to add. * @param args additional arguments. * @return Long integer-reply the number of elements that were added to the set. * @since 6.1 */ AsyncExecutions<Long> geoadd(K key, double longitude, double latitude, V member, GeoAddArgs args); /** * Multi geo add. * * @param key the key of the geo set. * @param lngLatMember triplets of double longitude, double latitude and V member. * @return Long integer-reply the number of elements that were added to the set. */ AsyncExecutions<Long> geoadd(K key, Object... lngLatMember); /** * Multi geo add. * * @param key the key of the geo set. * @param values {@link io.lettuce.core.GeoValue} values to add. * @return Long integer-reply the number of elements that were added to the set. * @since 6.1 */ AsyncExecutions<Long> geoadd(K key, GeoValue<V>... values); /** * Multi geo add. * * @param key the key of the geo set. * @param args additional arguments. * @param lngLatMember triplets of double longitude, double latitude and V member. * @return Long integer-reply the number of elements that were added to the set. * @since 6.1 */ AsyncExecutions<Long> geoadd(K key, GeoAddArgs args, Object... lngLatMember); /** * Multi geo add. * * @param key the key of the geo set. * @param args additional arguments. * @param values {@link io.lettuce.core.GeoValue} values to add. * @return Long integer-reply the number of elements that were added to the set. * @since 6.1 */ AsyncExecutions<Long> geoadd(K key, GeoAddArgs args, GeoValue<V>... values); /** * Retrieve distance between points {@code from} and {@code to}. If one or more elements are missing {@code null} is * returned. Default in meters by, otherwise according to {@code unit} * * @param key the key of the geo set. * @param from from member. * @param to to member. * @param unit distance unit. * @return distance between points {@code from} and {@code to}. If one or more elements are missing {@code null} is * returned. */ AsyncExecutions<Double> geodist(K key, V from, V to, GeoArgs.Unit unit); /** * Retrieve Geohash strings representing the position of one or more elements in a sorted set value representing a * geospatial index. * * @param key the key of the geo set. * @param members the members. * @return bulk reply Geohash strings in the order of {@code members}. Returns {@code null} if a member is not found. */ AsyncExecutions<List<Value<String>>> geohash(K key, V... members); /** * Get geo coordinates for the {@code members}. * * @param key the key of the geo set. * @param members the members. * @return a list of {@link GeoCoordinates}s representing the x,y position of each element specified in the arguments. For * missing elements {@code null} is returned. */ AsyncExecutions<List<GeoCoordinates>> geopos(K key, V... members); /** * Retrieve members selected by distance with the center of {@code longitude} and {@code latitude}. * * @param key the key of the geo set. * @param longitude the longitude coordinate according to WGS84. * @param latitude the latitude coordinate according to WGS84. * @param distance radius distance. * @param unit distance unit. * @return bulk reply. */ AsyncExecutions<Set<V>> georadius(K key, double longitude, double latitude, double distance, GeoArgs.Unit unit); /** * Retrieve members selected by distance with the center of {@code longitude} and {@code latitude}. * * @param key the key of the geo set. * @param longitude the longitude coordinate according to WGS84. * @param latitude the latitude coordinate according to WGS84. * @param distance radius distance. * @param unit distance unit. * @param geoArgs args to control the result. * @return nested multi-bulk reply. The {@link GeoWithin} contains only fields which were requested by {@link GeoArgs}. */ AsyncExecutions<List<GeoWithin<V>>> georadius(K key, double longitude, double latitude, double distance, GeoArgs.Unit unit, GeoArgs geoArgs); /** * Perform a {@link #georadius(Object, double, double, double, GeoArgs.Unit, GeoArgs)} query and store the results in a * sorted set. * * @param key the key of the geo set. * @param longitude the longitude coordinate according to WGS84. * @param latitude the latitude coordinate according to WGS84. * @param distance radius distance. * @param unit distance unit. * @param geoRadiusStoreArgs args to store either the resulting elements with their distance or the resulting elements with * their locations a sorted set. * @return Long integer-reply the number of elements in the result. */ AsyncExecutions<Long> georadius(K key, double longitude, double latitude, double distance, GeoArgs.Unit unit, GeoRadiusStoreArgs<K> geoRadiusStoreArgs); /** * Retrieve members selected by distance with the center of {@code member}. The member itself is always contained in the * results. * * @param key the key of the geo set. * @param member reference member. * @param distance radius distance. * @param unit distance unit. * @return set of members. */ AsyncExecutions<Set<V>> georadiusbymember(K key, V member, double distance, GeoArgs.Unit unit); /** * Retrieve members selected by distance with the center of {@code member}. The member itself is always contained in the * results. * * @param key the key of the geo set. * @param member reference member. * @param distance radius distance. * @param unit distance unit. * @param geoArgs args to control the result. * @return nested multi-bulk reply. The {@link GeoWithin} contains only fields which were requested by {@link GeoArgs}. */ AsyncExecutions<List<GeoWithin<V>>> georadiusbymember(K key, V member, double distance, GeoArgs.Unit unit, GeoArgs geoArgs); /** * Perform a {@link #georadiusbymember(Object, Object, double, GeoArgs.Unit, GeoArgs)} query and store the results in a * sorted set. * * @param key the key of the geo set. * @param member reference member. * @param distance radius distance. * @param unit distance unit. * @param geoRadiusStoreArgs args to store either the resulting elements with their distance or the resulting elements with * their locations a sorted set. * @return Long integer-reply the number of elements in the result. */ AsyncExecutions<Long> georadiusbymember(K key, V member, double distance, GeoArgs.Unit unit, GeoRadiusStoreArgs<K> geoRadiusStoreArgs); /** * Retrieve members selected by distance with the center of {@code reference} the search {@code predicate}. * Use {@link GeoSearch} to create reference and predicate objects. * * @param key the key of the geo set. * @param reference the reference member or longitude/latitude coordinates. * @param predicate the bounding box or radius to search in. * @return bulk reply. * @since 6.1 */ AsyncExecutions<Set<V>> geosearch(K key, GeoSearch.GeoRef<K> reference, GeoSearch.GeoPredicate predicate); /** * Retrieve members selected by distance with the center of {@code reference} the search {@code predicate}. * Use {@link GeoSearch} to create reference and predicate objects. * * @param key the key of the geo set. * @param reference the reference member or longitude/latitude coordinates. * @param predicate the bounding box or radius to search in. * @param geoArgs args to control the result. * @return nested multi-bulk reply. The {@link GeoWithin} contains only fields which were requested by {@link GeoArgs}. * @since 6.1 */ AsyncExecutions<List<GeoWithin<V>>> geosearch(K key, GeoSearch.GeoRef<K> reference, GeoSearch.GeoPredicate predicate, GeoArgs geoArgs); /** * Perform a {@link #geosearch(Object, GeoSearch.GeoRef, GeoSearch.GeoPredicate, GeoArgs)} query and store the results in a * sorted set. * * @param destination the destination where to store results. * @param key the key of the geo set. * @param reference the reference member or longitude/latitude coordinates. * @param predicate the bounding box or radius to search in. * @param geoArgs args to control the result. * @param storeDist stores the items in a sorted set populated with their distance from the center of the circle or box, as a floating-point number, in the same unit specified for that shape. * @return Long integer-reply the number of elements in the result. * @since 6.1 */ AsyncExecutions<Long> geosearchstore(K destination, K key, GeoSearch.GeoRef<K> reference, GeoSearch.GeoPredicate predicate, GeoArgs geoArgs, boolean storeDist); }
lettuce-io/lettuce-core
src/main/java/io/lettuce/core/cluster/api/async/NodeSelectionGeoAsyncCommands.java
Java
apache-2.0
11,078
<html> <head> <meta http-equiv="Content-Type" content="text/html;charset=utf-8"> <link rel="stylesheet" type="text/css" href="licenses.css"> </head> <body> <h2>fh-service-mongodb-cloud</h2> <table> <tr> <th>Package Group</th> <th>Package Artifact</th> <th>Package Version</th> <th>Remote Licenses</th> <th>Local Licenses</th> </tr> <tr> <td>N/A</td> <td>accepts</td> <td>1.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>accepts</td> <td>1.3.4</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=ACCEPTS_MIT.TXT>ACCEPTS_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>adm-zip</td> <td>0.4.7</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>ajv</td> <td>4.11.5</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=AJV_MIT.TXT>AJV_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>ajv</td> <td>5.5.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=AJV_MIT.TXT>AJV_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>amqp</td> <td>0.2.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>ansi-regex</td> <td>2.1.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=ANSI-REGEX_MIT.TXT>ANSI-REGEX_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>ansi-styles</td> <td>2.2.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=ANSI-STYLES_MIT.TXT>ANSI-STYLES_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>append-field</td> <td>0.1.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=APPEND-FIELD_MIT.TXT>APPEND-FIELD_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>archiver</td> <td>1.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=ARCHIVER_MIT.TXT>ARCHIVER_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>archiver-utils</td> <td>1.3.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=ARCHIVER-UTILS_MIT.TXT>ARCHIVER-UTILS_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>array-flatten</td> <td>1.1.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=ARRAY-FLATTEN_MIT.TXT>ARRAY-FLATTEN_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>asn1</td> <td>0.2.3</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=ASN1_MIT.TXT>ASN1_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>assert-plus</td> <td>0.2.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>assert-plus</td> <td>1.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>async</td> <td>0.2.7</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=ASYNC_MIT.TXT>ASYNC_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>async</td> <td>1.5.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=ASYNC_MIT.TXT>ASYNC_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>async</td> <td>2.1.5</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=ASYNC_MIT.TXT>ASYNC_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>async</td> <td>0.2.9</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=ASYNC_MIT.TXT>ASYNC_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>async-listener</td> <td>0.6.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;BSD-2-Clause</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>asynckit</td> <td>0.4.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=ASYNCKIT_MIT.TXT>ASYNCKIT_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>aws-sign2</td> <td>0.6.0</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=AWS-SIGN2_APACHE-2.0.TXT>AWS-SIGN2_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>aws-sign2</td> <td>0.7.0</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=AWS-SIGN2_APACHE-2.0.TXT>AWS-SIGN2_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>aws4</td> <td>1.6.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=AWS4_MIT.TXT>AWS4_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>backoff</td> <td>2.5.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=BACKOFF_MIT.TXT>BACKOFF_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>balanced-match</td> <td>1.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=BALANCED-MATCH_MIT.TXT>BALANCED-MATCH_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>balanced-match</td> <td>0.4.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=BALANCED-MATCH_MIT.TXT>BALANCED-MATCH_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>bcrypt-pbkdf</td> <td>1.0.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;BSD-3-Clause</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>bl</td> <td>1.2.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=BL_MIT.TXT>BL_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>bluebird</td> <td>3.5.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=BLUEBIRD_MIT.TXT>BLUEBIRD_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>body-parser</td> <td>1.18.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=BODY-PARSER_MIT.TXT>BODY-PARSER_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>body-parser</td> <td>1.0.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>boom</td> <td>2.10.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;BSD-3-Clause</td> <td><a href=BOOM_BSD-3-CLAUSE.TXT>BOOM_BSD-3-CLAUSE.TXT</a></td> </tr> <tr> <td>N/A</td> <td>boom</td> <td>4.3.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;BSD-3-Clause</td> <td><a href=BOOM_BSD-3-CLAUSE.TXT>BOOM_BSD-3-CLAUSE.TXT</a></td> </tr> <tr> <td>N/A</td> <td>boom</td> <td>5.2.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;BSD-3-Clause</td> <td><a href=BOOM_BSD-3-CLAUSE.TXT>BOOM_BSD-3-CLAUSE.TXT</a></td> </tr> <tr> <td>N/A</td> <td>brace-expansion</td> <td>1.1.5</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>brace-expansion</td> <td>1.1.8</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>bson</td> <td>0.4.22</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=BSON_APACHE-2.0.TXT>BSON_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>bson</td> <td>0.4.23</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=BSON_APACHE-2.0.TXT>BSON_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>bson</td> <td>1.0.6</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=BSON_APACHE-2.0.TXT>BSON_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>buffer-crc32</td> <td>0.2.1</td> <td>UNKNOWN</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>buffer-crc32</td> <td>0.2.13</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=BUFFER-CRC32_MIT.TXT>BUFFER-CRC32_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>bunyan</td> <td>1.8.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=BUNYAN_MIT.TXT>BUNYAN_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>bunyan</td> <td>1.8.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=BUNYAN_MIT.TXT>BUNYAN_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>busboy</td> <td>0.2.14</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=BUSBOY_MIT.TXT>BUSBOY_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>bytes</td> <td>1.0.0</td> <td>UNKNOWN</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>bytes</td> <td>3.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=BYTES_MIT.TXT>BYTES_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>caseless</td> <td>0.11.0</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=CASELESS_APACHE-2.0.TXT>CASELESS_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>caseless</td> <td>0.12.0</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=CASELESS_APACHE-2.0.TXT>CASELESS_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>chalk</td> <td>1.1.3</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=CHALK_MIT.TXT>CHALK_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>co</td> <td>4.6.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=CO_MIT.TXT>CO_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>combined-stream</td> <td>1.0.5</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=COMBINED-STREAM_MIT.TXT>COMBINED-STREAM_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>combined-stream</td> <td>1.0.6</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=COMBINED-STREAM_MIT.TXT>COMBINED-STREAM_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>commander</td> <td>2.15.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=COMMANDER_MIT.TXT>COMMANDER_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>compress-commons</td> <td>1.2.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=COMPRESS-COMMONS_MIT.TXT>COMPRESS-COMMONS_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>concat-map</td> <td>0.0.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=CONCAT-MAP_MIT.TXT>CONCAT-MAP_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>concat-stream</td> <td>1.6.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=CONCAT-STREAM_MIT.TXT>CONCAT-STREAM_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>connection-parse</td> <td>0.0.7</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>content-disposition</td> <td>0.5.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=CONTENT-DISPOSITION_MIT.TXT>CONTENT-DISPOSITION_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>content-type</td> <td>1.0.4</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=CONTENT-TYPE_MIT.TXT>CONTENT-TYPE_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>continuation-local-storage</td> <td>3.1.7</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;BSD-2-Clause</td> <td><a href=CONTINUATION-LOCAL-STORAGE_BSD-2-CLAUSE.TXT>CONTINUATION-LOCAL-STORAGE_BSD-2-CLAUSE.TXT</a></td> </tr> <tr> <td>N/A</td> <td>cookie</td> <td>0.3.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=COOKIE_MIT.TXT>COOKIE_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>cookie</td> <td>0.1.0</td> <td>UNKNOWN</td> <td><a href=COOKIE_MIT*.TXT>COOKIE_MIT*.TXT</a></td> </tr> <tr> <td>N/A</td> <td>cookie-signature</td> <td>1.0.3</td> <td>UNKNOWN</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>cookie-signature</td> <td>1.0.6</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>core-util-is</td> <td>1.0.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=CORE-UTIL-IS_MIT.TXT>CORE-UTIL-IS_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>cors</td> <td>2.2.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=CORS_MIT.TXT>CORS_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>cors</td> <td>2.1.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=CORS_MIT.TXT>CORS_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>crc</td> <td>3.5.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=CRC_MIT.TXT>CRC_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>crc32-stream</td> <td>2.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=CRC32-STREAM_MIT.TXT>CRC32-STREAM_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>cryptiles</td> <td>3.1.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;BSD-3-Clause</td> <td><a href=CRYPTILES_BSD-3-CLAUSE.TXT>CRYPTILES_BSD-3-CLAUSE.TXT</a></td> </tr> <tr> <td>N/A</td> <td>cryptiles</td> <td>2.0.5</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;BSD-3-Clause</td> <td><a href=CRYPTILES_BSD-3-CLAUSE.TXT>CRYPTILES_BSD-3-CLAUSE.TXT</a></td> </tr> <tr> <td>N/A</td> <td>csvtojson</td> <td>0.3.6</td> <td>UNKNOWN</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>dashdash</td> <td>1.14.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=DASHDASH_MIT.TXT>DASHDASH_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>debug</td> <td>0.8.1</td> <td>UNKNOWN</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>debug</td> <td>2.6.9</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=DEBUG_MIT.TXT>DEBUG_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>deep-extend</td> <td>0.2.11</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=DEEP-EXTEND_MIT.TXT>DEEP-EXTEND_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>delayed-stream</td> <td>1.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=DELAYED-STREAM_MIT.TXT>DELAYED-STREAM_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>depd</td> <td>1.1.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=DEPD_MIT.TXT>DEPD_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>destroy</td> <td>1.0.4</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=DESTROY_MIT.TXT>DESTROY_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>dicer</td> <td>0.2.5</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=DICER_MIT.TXT>DICER_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>double-ended-queue</td> <td>2.1.0-0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=DOUBLE-ENDED-QUEUE_MIT.TXT>DOUBLE-ENDED-QUEUE_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>dtrace-provider</td> <td>0.6.0</td> <td>UNKNOWN</td> <td><a href=DTRACE-PROVIDER_BSD*.TXT>DTRACE-PROVIDER_BSD*.TXT</a></td> </tr> <tr> <td>N/A</td> <td>dtrace-provider</td> <td>0.7.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;BSD-2-Clause</td> <td><a href=DTRACE-PROVIDER_BSD-2-CLAUSE.TXT>DTRACE-PROVIDER_BSD-2-CLAUSE.TXT</a></td> </tr> <tr> <td>N/A</td> <td>ecc-jsbn</td> <td>0.1.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=ECC-JSBN_MIT.TXT>ECC-JSBN_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>ee-first</td> <td>1.1.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=EE-FIRST_MIT.TXT>EE-FIRST_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>emitter-listener</td> <td>1.0.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;BSD-2-Clause</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>encodeurl</td> <td>1.0.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=ENCODEURL_MIT.TXT>ENCODEURL_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>end-of-stream</td> <td>1.4.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=END-OF-STREAM_MIT.TXT>END-OF-STREAM_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>env-var</td> <td>2.4.3</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>es6-promise</td> <td>3.0.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=ES6-PROMISE_MIT.TXT>ES6-PROMISE_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>escape-html</td> <td>1.0.3</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=ESCAPE-HTML_MIT.TXT>ESCAPE-HTML_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>escape-html</td> <td>1.0.1</td> <td>UNKNOWN</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>escape-string-regexp</td> <td>1.0.5</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=ESCAPE-STRING-REGEXP_MIT.TXT>ESCAPE-STRING-REGEXP_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>etag</td> <td>1.8.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=ETAG_MIT.TXT>ETAG_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>express</td> <td>4.16.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=EXPRESS_MIT.TXT>EXPRESS_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>express</td> <td>4.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=EXPRESS_MIT.TXT>EXPRESS_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>extend</td> <td>3.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=EXTEND_MIT.TXT>EXTEND_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>extend</td> <td>3.0.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=EXTEND_MIT.TXT>EXTEND_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>extsprintf</td> <td>1.2.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=EXTSPRINTF_MIT.TXT>EXTSPRINTF_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>extsprintf</td> <td>1.0.2</td> <td>UNKNOWN</td> <td><a href=EXTSPRINTF_MIT*.TXT>EXTSPRINTF_MIT*.TXT</a></td> </tr> <tr> <td>N/A</td> <td>extsprintf</td> <td>1.3.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=EXTSPRINTF_MIT.TXT>EXTSPRINTF_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>fast-deep-equal</td> <td>1.1.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=FAST-DEEP-EQUAL_MIT.TXT>FAST-DEEP-EQUAL_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>fast-json-stable-stringify</td> <td>2.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=FAST-JSON-STABLE-STRINGIFY_MIT.TXT>FAST-JSON-STABLE-STRINGIFY_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>fh-amqp-js</td> <td>0.7.1</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=FH-AMQP-JS_APACHE-2.0.TXT>FH-AMQP-JS_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>fh-component-metrics</td> <td>2.7.0</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=FH-COMPONENT-METRICS_APACHE-2.0.TXT>FH-COMPONENT-METRICS_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>fh-db</td> <td>3.3.0</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=FH-DB_APACHE-2.0.TXT>FH-DB_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>fh-logger</td> <td>0.5.0</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=FH-LOGGER_APACHE-2.0.TXT>FH-LOGGER_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>fh-mbaas-api</td> <td>8.2.1</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=FH-MBAAS-API_APACHE-2.0.TXT>FH-MBAAS-API_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>fh-mbaas-client</td> <td>0.16.5</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=FH-MBAAS-CLIENT_APACHE-2.0.TXT>FH-MBAAS-CLIENT_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>fh-mbaas-client</td> <td>1.1.1</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=FH-MBAAS-CLIENT_APACHE-2.0.TXT>FH-MBAAS-CLIENT_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>fh-mbaas-express</td> <td>5.10.0</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=FH-MBAAS-EXPRESS_APACHE-2.0.TXT>FH-MBAAS-EXPRESS_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>fh-mongodb-queue</td> <td>3.3.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=FH-MONGODB-QUEUE_MIT.TXT>FH-MONGODB-QUEUE_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>fh-reportingclient</td> <td>0.5.7</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=FH-REPORTINGCLIENT_APACHE-2.0.TXT>FH-REPORTINGCLIENT_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>fh-security</td> <td>0.2.1</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=FH-SECURITY_APACHE-2.0.TXT>FH-SECURITY_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>fh-service-mongodb-cloud</td> <td>0.2.1</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=FH-SERVICE-MONGODB-CLOUD_APACHE-2.0.TXT>FH-SERVICE-MONGODB-CLOUD_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>fh-statsc</td> <td>0.3.0</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=FH-STATSC_APACHE-2.0.TXT>FH-STATSC_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>fh-sync</td> <td>1.0.14</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=FH-SYNC_APACHE-2.0.TXT>FH-SYNC_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>finalhandler</td> <td>1.1.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=FINALHANDLER_MIT.TXT>FINALHANDLER_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>forever-agent</td> <td>0.6.1</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=FOREVER-AGENT_APACHE-2.0.TXT>FOREVER-AGENT_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>form-data</td> <td>2.1.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=FORM-DATA_MIT.TXT>FORM-DATA_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>form-data</td> <td>2.1.4</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=FORM-DATA_MIT.TXT>FORM-DATA_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>form-data</td> <td>2.3.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=FORM-DATA_MIT.TXT>FORM-DATA_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>forwarded</td> <td>0.1.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=FORWARDED_MIT.TXT>FORWARDED_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>fresh</td> <td>0.2.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>fresh</td> <td>0.2.0</td> <td>UNKNOWN</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>fresh</td> <td>0.5.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=FRESH_MIT.TXT>FRESH_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>fs.realpath</td> <td>1.0.0</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=FS.REALPATH_ISC.TXT>FS.REALPATH_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>generate-function</td> <td>2.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>generate-object-property</td> <td>1.2.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=GENERATE-OBJECT-PROPERTY_MIT.TXT>GENERATE-OBJECT-PROPERTY_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>getpass</td> <td>0.1.6</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=GETPASS_MIT.TXT>GETPASS_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>getpass</td> <td>0.1.7</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=GETPASS_MIT.TXT>GETPASS_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>glob</td> <td>6.0.4</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=GLOB_ISC.TXT>GLOB_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>glob</td> <td>7.1.2</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=GLOB_ISC.TXT>GLOB_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>graceful-fs</td> <td>4.1.11</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=GRACEFUL-FS_ISC.TXT>GRACEFUL-FS_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>har-schema</td> <td>1.0.5</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=HAR-SCHEMA_ISC.TXT>HAR-SCHEMA_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>har-schema</td> <td>2.0.0</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=HAR-SCHEMA_ISC.TXT>HAR-SCHEMA_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>har-validator</td> <td>2.0.6</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=HAR-VALIDATOR_ISC.TXT>HAR-VALIDATOR_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>har-validator</td> <td>4.2.1</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=HAR-VALIDATOR_ISC.TXT>HAR-VALIDATOR_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>har-validator</td> <td>5.0.3</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=HAR-VALIDATOR_ISC.TXT>HAR-VALIDATOR_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>has-ansi</td> <td>2.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=HAS-ANSI_MIT.TXT>HAS-ANSI_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>hashring</td> <td>3.2.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=HASHRING_MIT.TXT>HASHRING_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>hawk</td> <td>3.1.3</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;BSD-3-Clause</td> <td><a href=HAWK_BSD-3-CLAUSE.TXT>HAWK_BSD-3-CLAUSE.TXT</a></td> </tr> <tr> <td>N/A</td> <td>hawk</td> <td>6.0.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;BSD-3-Clause</td> <td><a href=HAWK_BSD-3-CLAUSE.TXT>HAWK_BSD-3-CLAUSE.TXT</a></td> </tr> <tr> <td>N/A</td> <td>hoek</td> <td>2.16.3</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;BSD-3-Clause</td> <td><a href=HOEK_BSD-3-CLAUSE.TXT>HOEK_BSD-3-CLAUSE.TXT</a></td> </tr> <tr> <td>N/A</td> <td>hoek</td> <td>4.2.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;BSD-3-Clause</td> <td><a href=HOEK_BSD-3-CLAUSE.TXT>HOEK_BSD-3-CLAUSE.TXT</a></td> </tr> <tr> <td>N/A</td> <td>http-errors</td> <td>1.6.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=HTTP-ERRORS_MIT.TXT>HTTP-ERRORS_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>http-signature</td> <td>1.2.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=HTTP-SIGNATURE_MIT.TXT>HTTP-SIGNATURE_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>http-signature</td> <td>1.1.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=HTTP-SIGNATURE_MIT.TXT>HTTP-SIGNATURE_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>iconv-lite</td> <td>0.4.19</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=ICONV-LITE_MIT.TXT>ICONV-LITE_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>inflight</td> <td>1.0.5</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=INFLIGHT_ISC.TXT>INFLIGHT_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>inflight</td> <td>1.0.6</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=INFLIGHT_ISC.TXT>INFLIGHT_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>inherits</td> <td>2.0.1</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=INHERITS_ISC.TXT>INHERITS_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>inherits</td> <td>2.0.3</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=INHERITS_ISC.TXT>INHERITS_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>ini</td> <td>1.1.0</td> <td>UNKNOWN</td> <td><a href=INI_MIT*.TXT>INI_MIT*.TXT</a></td> </tr> <tr> <td>N/A</td> <td>ipaddr.js</td> <td>1.5.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=IPADDR.JS_MIT.TXT>IPADDR.JS_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>is-my-ip-valid</td> <td>1.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>is-my-json-valid</td> <td>2.17.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=IS-MY-JSON-VALID_MIT.TXT>IS-MY-JSON-VALID_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>is-property</td> <td>1.0.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=IS-PROPERTY_MIT.TXT>IS-PROPERTY_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>is-typedarray</td> <td>1.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=IS-TYPEDARRAY_MIT.TXT>IS-TYPEDARRAY_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>isarray</td> <td>0.0.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>isarray</td> <td>1.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>isstream</td> <td>0.1.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=ISSTREAM_MIT.TXT>ISSTREAM_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>jackpot</td> <td>0.0.6</td> <td>UNKNOWN</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>jcsv</td> <td>0.0.3</td> <td>UNKNOWN</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>jodid25519</td> <td>1.0.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=JODID25519_MIT.TXT>JODID25519_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>jsbn</td> <td>0.1.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=JSBN_MIT.TXT>JSBN_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>json-schema</td> <td>0.2.3</td> <td>UNKNOWN</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>json-schema-traverse</td> <td>0.3.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=JSON-SCHEMA-TRAVERSE_MIT.TXT>JSON-SCHEMA-TRAVERSE_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>json-stable-stringify</td> <td>1.0.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=JSON-STABLE-STRINGIFY_MIT.TXT>JSON-STABLE-STRINGIFY_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>json-stringify-safe</td> <td>5.0.1</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=JSON-STRINGIFY-SAFE_ISC.TXT>JSON-STRINGIFY-SAFE_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>jsonify</td> <td>0.0.0</td> <td>UNKNOWN</td> <td><a href=JSONIFY_PUBLIC%20DOMAIN.TXT>JSONIFY_PUBLIC DOMAIN.TXT</a></td> </tr> <tr> <td>N/A</td> <td>jsonpointer</td> <td>4.0.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=JSONPOINTER_MIT.TXT>JSONPOINTER_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>jsprim</td> <td>1.4.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=JSPRIM_MIT.TXT>JSPRIM_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>jsprim</td> <td>1.4.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=JSPRIM_MIT.TXT>JSPRIM_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>lazystream</td> <td>1.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>lodash</td> <td>3.10.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=LODASH_MIT.TXT>LODASH_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>lodash</td> <td>1.3.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>lodash</td> <td>4.17.4</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=LODASH_MIT.TXT>LODASH_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>lodash</td> <td>4.17.5</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=LODASH_MIT.TXT>LODASH_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>lodash</td> <td>2.4.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=LODASH_MIT.TXT>LODASH_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>lodash</td> <td>3.9.3</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=LODASH_MIT.TXT>LODASH_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>lodash-contrib</td> <td>393.0.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=LODASH-CONTRIB_MIT.TXT>LODASH-CONTRIB_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>media-typer</td> <td>0.3.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=MEDIA-TYPER_MIT.TXT>MEDIA-TYPER_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>memcached</td> <td>2.2.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=MEMCACHED_MIT.TXT>MEMCACHED_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>merge-descriptors</td> <td>0.0.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>merge-descriptors</td> <td>1.0.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=MERGE-DESCRIPTORS_MIT.TXT>MERGE-DESCRIPTORS_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>methods</td> <td>0.1.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>methods</td> <td>1.1.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=METHODS_MIT.TXT>METHODS_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>mime</td> <td>1.2.11</td> <td>UNKNOWN</td> <td><a href=MIME_MIT*.TXT>MIME_MIT*.TXT</a></td> </tr> <tr> <td>N/A</td> <td>mime</td> <td>1.4.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=MIME_MIT.TXT>MIME_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>mime-db</td> <td>1.33.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=MIME-DB_MIT.TXT>MIME-DB_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>mime-db</td> <td>1.26.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=MIME-DB_MIT.TXT>MIME-DB_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>mime-db</td> <td>1.30.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=MIME-DB_MIT.TXT>MIME-DB_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>mime-types</td> <td>2.1.14</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=MIME-TYPES_MIT.TXT>MIME-TYPES_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>mime-types</td> <td>2.1.17</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=MIME-TYPES_MIT.TXT>MIME-TYPES_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>mime-types</td> <td>2.1.18</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=MIME-TYPES_MIT.TXT>MIME-TYPES_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>mime-types</td> <td>1.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=MIME-TYPES_MIT.TXT>MIME-TYPES_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>minimatch</td> <td>3.0.2</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=MINIMATCH_ISC.TXT>MINIMATCH_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>minimatch</td> <td>3.0.4</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=MINIMATCH_ISC.TXT>MINIMATCH_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>minimist</td> <td>0.0.8</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=MINIMIST_MIT.TXT>MINIMIST_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>mkdirp</td> <td>0.5.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=MKDIRP_MIT.TXT>MKDIRP_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>moment</td> <td>2.13.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=MOMENT_MIT.TXT>MOMENT_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>moment</td> <td>2.18.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=MOMENT_MIT.TXT>MOMENT_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>mongodb</td> <td>3.0.5</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=MONGODB_APACHE-2.0.TXT>MONGODB_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>mongodb</td> <td>2.1.18</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=MONGODB_APACHE-2.0.TXT>MONGODB_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>mongodb-core</td> <td>1.3.18</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=MONGODB-CORE_APACHE-2.0.TXT>MONGODB-CORE_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>mongodb-core</td> <td>3.0.5</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=MONGODB-CORE_APACHE-2.0.TXT>MONGODB-CORE_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>mongodb-lock</td> <td>0.4.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=MONGODB-LOCK_MIT.TXT>MONGODB-LOCK_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>mongodb-uri</td> <td>0.9.7</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=MONGODB-URI_MIT.TXT>MONGODB-URI_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>ms</td> <td>2.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=MS_MIT.TXT>MS_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>multer</td> <td>1.3.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=MULTER_MIT.TXT>MULTER_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>mv</td> <td>2.1.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=MV_MIT.TXT>MV_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>nan</td> <td>2.3.5</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=NAN_MIT.TXT>NAN_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>nan</td> <td>2.7.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=NAN_MIT.TXT>NAN_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>ncp</td> <td>2.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=NCP_MIT.TXT>NCP_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>negotiator</td> <td>0.6.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=NEGOTIATOR_MIT.TXT>NEGOTIATOR_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>negotiator</td> <td>0.3.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=NEGOTIATOR_MIT.TXT>NEGOTIATOR_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>node-rsa</td> <td>0.3.2</td> <td>UNKNOWN</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>node-uuid</td> <td>1.4.7</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=NODE-UUID_MIT.TXT>NODE-UUID_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>normalize-path</td> <td>2.1.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=NORMALIZE-PATH_MIT.TXT>NORMALIZE-PATH_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>oauth-sign</td> <td>0.8.2</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=OAUTH-SIGN_APACHE-2.0.TXT>OAUTH-SIGN_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>object-assign</td> <td>3.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=OBJECT-ASSIGN_MIT.TXT>OBJECT-ASSIGN_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>on-finished</td> <td>2.3.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=ON-FINISHED_MIT.TXT>ON-FINISHED_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>once</td> <td>1.3.3</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=ONCE_ISC.TXT>ONCE_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>once</td> <td>1.4.0</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=ONCE_ISC.TXT>ONCE_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>optimist</td> <td>0.3.7</td> <td>UNKNOWN</td> <td><a href=OPTIMIST_MIT*.TXT>OPTIMIST_MIT*.TXT</a></td> </tr> <tr> <td>N/A</td> <td>optval</td> <td>1.0.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=OPTVAL_MIT.TXT>OPTVAL_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>parse-duration</td> <td>0.1.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>parseurl</td> <td>1.3.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=PARSEURL_MIT.TXT>PARSEURL_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>parseurl</td> <td>1.0.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>path-is-absolute</td> <td>1.0.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=PATH-IS-ABSOLUTE_MIT.TXT>PATH-IS-ABSOLUTE_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>path-is-absolute</td> <td>1.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=PATH-IS-ABSOLUTE_MIT.TXT>PATH-IS-ABSOLUTE_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>path-to-regexp</td> <td>0.1.2</td> <td>UNKNOWN</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>path-to-regexp</td> <td>0.1.7</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=PATH-TO-REGEXP_MIT.TXT>PATH-TO-REGEXP_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>performance-now</td> <td>2.1.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=PERFORMANCE-NOW_MIT.TXT>PERFORMANCE-NOW_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>performance-now</td> <td>0.2.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=PERFORMANCE-NOW_MIT.TXT>PERFORMANCE-NOW_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>pinkie</td> <td>2.0.4</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=PINKIE_MIT.TXT>PINKIE_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>pinkie-promise</td> <td>2.0.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=PINKIE-PROMISE_MIT.TXT>PINKIE-PROMISE_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>precond</td> <td>0.2.3</td> <td>UNKNOWN</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>process-nextick-args</td> <td>1.0.7</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=PROCESS-NEXTICK-ARGS_MIT.TXT>PROCESS-NEXTICK-ARGS_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>proxy-addr</td> <td>2.0.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=PROXY-ADDR_MIT.TXT>PROXY-ADDR_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>punycode</td> <td>1.4.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>qs</td> <td>0.6.6</td> <td>UNKNOWN</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>qs</td> <td>6.3.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;BSD-3-Clause</td> <td><a href=QS_BSD-3-CLAUSE.TXT>QS_BSD-3-CLAUSE.TXT</a></td> </tr> <tr> <td>N/A</td> <td>qs</td> <td>6.4.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;BSD-3-Clause</td> <td><a href=QS_BSD-3-CLAUSE.TXT>QS_BSD-3-CLAUSE.TXT</a></td> </tr> <tr> <td>N/A</td> <td>qs</td> <td>6.5.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;BSD-3-Clause</td> <td><a href=QS_BSD-3-CLAUSE.TXT>QS_BSD-3-CLAUSE.TXT</a></td> </tr> <tr> <td>N/A</td> <td>range-parser</td> <td>0.0.4</td> <td>UNKNOWN</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>range-parser</td> <td>1.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>range-parser</td> <td>1.2.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=RANGE-PARSER_MIT.TXT>RANGE-PARSER_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>raw-body</td> <td>2.3.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=RAW-BODY_MIT.TXT>RAW-BODY_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>raw-body</td> <td>1.1.7</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>rc</td> <td>0.1.1</td> <td>UNKNOWN</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>readable-stream</td> <td>2.3.3</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=READABLE-STREAM_MIT.TXT>READABLE-STREAM_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>readable-stream</td> <td>1.0.31</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=READABLE-STREAM_MIT.TXT>READABLE-STREAM_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>readable-stream</td> <td>1.1.14</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=READABLE-STREAM_MIT.TXT>READABLE-STREAM_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>redis</td> <td>2.6.5</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=REDIS_MIT.TXT>REDIS_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>redis</td> <td>2.8.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=REDIS_MIT.TXT>REDIS_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>redis-commands</td> <td>1.3.4</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=REDIS-COMMANDS_MIT.TXT>REDIS-COMMANDS_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>redis-parser</td> <td>2.6.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=REDIS-PARSER_MIT.TXT>REDIS-PARSER_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>remove-trailing-separator</td> <td>1.1.0</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=REMOVE-TRAILING-SEPARATOR_ISC.TXT>REMOVE-TRAILING-SEPARATOR_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>request</td> <td>2.83.0</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=REQUEST_APACHE-2.0.TXT>REQUEST_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>request</td> <td>2.79.0</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=REQUEST_APACHE-2.0.TXT>REQUEST_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>request</td> <td>2.81.0</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=REQUEST_APACHE-2.0.TXT>REQUEST_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>require_optional</td> <td>1.0.1</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=REQUIRE_OPTIONAL_APACHE-2.0.TXT>REQUIRE_OPTIONAL_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>resolve-from</td> <td>2.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=RESOLVE-FROM_MIT.TXT>RESOLVE-FROM_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>retry</td> <td>0.6.0</td> <td>UNKNOWN</td> <td><a href=RETRY_MIT*.TXT>RETRY_MIT*.TXT</a></td> </tr> <tr> <td>N/A</td> <td>rimraf</td> <td>2.4.5</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=RIMRAF_ISC.TXT>RIMRAF_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>safe-buffer</td> <td>5.1.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=SAFE-BUFFER_MIT.TXT>SAFE-BUFFER_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>safe-buffer</td> <td>5.0.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=SAFE-BUFFER_MIT.TXT>SAFE-BUFFER_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>safe-json-stringify</td> <td>1.0.4</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>safe-json-stringify</td> <td>1.0.3</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>semver</td> <td>5.5.0</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=SEMVER_ISC.TXT>SEMVER_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>semver</td> <td>5.4.1</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=SEMVER_ISC.TXT>SEMVER_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>send</td> <td>0.1.4</td> <td>UNKNOWN</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>send</td> <td>0.16.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=SEND_MIT.TXT>SEND_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>send</td> <td>0.2.0</td> <td>UNKNOWN</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>serve-static</td> <td>1.0.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=SERVE-STATIC_MIT.TXT>SERVE-STATIC_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>serve-static</td> <td>1.13.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=SERVE-STATIC_MIT.TXT>SERVE-STATIC_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>setprototypeof</td> <td>1.0.3</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=SETPROTOTYPEOF_ISC.TXT>SETPROTOTYPEOF_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>setprototypeof</td> <td>1.1.0</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=SETPROTOTYPEOF_ISC.TXT>SETPROTOTYPEOF_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>shimmer</td> <td>1.0.0</td> <td>UNKNOWN</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>simple-lru-cache</td> <td>0.0.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=SIMPLE-LRU-CACHE_MIT.TXT>SIMPLE-LRU-CACHE_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>sntp</td> <td>1.0.9</td> <td>UNKNOWN</td> <td><a href=SNTP_BSD.TXT>SNTP_BSD.TXT</a></td> </tr> <tr> <td>N/A</td> <td>sntp</td> <td>2.1.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;BSD-3-Clause</td> <td><a href=SNTP_BSD-3-CLAUSE.TXT>SNTP_BSD-3-CLAUSE.TXT</a></td> </tr> <tr> <td>N/A</td> <td>sshpk</td> <td>1.11.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=SSHPK_MIT.TXT>SSHPK_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>sshpk</td> <td>1.13.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=SSHPK_MIT.TXT>SSHPK_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>sshpk</td> <td>1.14.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=SSHPK_MIT.TXT>SSHPK_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>statuses</td> <td>1.3.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=STATUSES_MIT.TXT>STATUSES_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>stream-buffers</td> <td>3.0.0</td> <td>http:&#x2F;&#x2F;unlicense.org&#x2F;</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>streamsearch</td> <td>0.1.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=STREAMSEARCH_MIT.TXT>STREAMSEARCH_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>string_decoder</td> <td>0.10.31</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=STRING_DECODER_MIT.TXT>STRING_DECODER_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>string_decoder</td> <td>1.0.3</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=STRING_DECODER_MIT.TXT>STRING_DECODER_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>stringstream</td> <td>0.0.5</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=STRINGSTREAM_MIT.TXT>STRINGSTREAM_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>strip-ansi</td> <td>3.0.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=STRIP-ANSI_MIT.TXT>STRIP-ANSI_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>supports-color</td> <td>2.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=SUPPORTS-COLOR_MIT.TXT>SUPPORTS-COLOR_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>tar-stream</td> <td>1.5.4</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=TAR-STREAM_MIT.TXT>TAR-STREAM_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>tough-cookie</td> <td>2.3.4</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;BSD-3-Clause</td> <td><a href=TOUGH-COOKIE_BSD-3-CLAUSE.TXT>TOUGH-COOKIE_BSD-3-CLAUSE.TXT</a></td> </tr> <tr> <td>N/A</td> <td>tough-cookie</td> <td>2.3.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;BSD-3-Clause</td> <td><a href=TOUGH-COOKIE_BSD-3-CLAUSE.TXT>TOUGH-COOKIE_BSD-3-CLAUSE.TXT</a></td> </tr> <tr> <td>N/A</td> <td>tunnel-agent</td> <td>0.6.0</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=TUNNEL-AGENT_APACHE-2.0.TXT>TUNNEL-AGENT_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>tunnel-agent</td> <td>0.4.3</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=TUNNEL-AGENT_APACHE-2.0.TXT>TUNNEL-AGENT_APACHE-2.0.TXT</a></td> </tr> <tr> <td>N/A</td> <td>tweetnacl</td> <td>0.14.5</td> <td>http:&#x2F;&#x2F;unlicense.org&#x2F;</td> <td><a href=TWEETNACL_UNLICENSE.TXT>TWEETNACL_UNLICENSE.TXT</a></td> </tr> <tr> <td>N/A</td> <td>type-is</td> <td>1.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>type-is</td> <td>1.1.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>type-is</td> <td>1.2.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>type-is</td> <td>1.6.15</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=TYPE-IS_MIT.TXT>TYPE-IS_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>typedarray</td> <td>0.0.6</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=TYPEDARRAY_MIT.TXT>TYPEDARRAY_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>underscore</td> <td>1.5.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=UNDERSCORE_MIT.TXT>UNDERSCORE_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>underscore</td> <td>1.8.3</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=UNDERSCORE_MIT.TXT>UNDERSCORE_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>underscore</td> <td>1.7.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=UNDERSCORE_MIT.TXT>UNDERSCORE_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>unifiedpush-node-sender</td> <td>0.12.1</td> <td>http:&#x2F;&#x2F;www.apache.org&#x2F;licenses&#x2F;LICENSE-2.0</td> <td><a href=#>No local license could be found for the dependency</a></td> </tr> <tr> <td>N/A</td> <td>unpipe</td> <td>1.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=UNPIPE_MIT.TXT>UNPIPE_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>util-deprecate</td> <td>1.0.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=UTIL-DEPRECATE_MIT.TXT>UTIL-DEPRECATE_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>utils-merge</td> <td>1.0.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=UTILS-MERGE_MIT.TXT>UTILS-MERGE_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>utils-merge</td> <td>1.0.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=UTILS-MERGE_MIT.TXT>UTILS-MERGE_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>uuid</td> <td>3.0.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=UUID_MIT.TXT>UUID_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>uuid</td> <td>3.2.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=UUID_MIT.TXT>UUID_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>vary</td> <td>1.1.2</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=VARY_MIT.TXT>VARY_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>verror</td> <td>1.10.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=VERROR_MIT.TXT>VERROR_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>verror</td> <td>1.3.6</td> <td>UNKNOWN</td> <td><a href=VERROR_MIT*.TXT>VERROR_MIT*.TXT</a></td> </tr> <tr> <td>N/A</td> <td>verror</td> <td>1.6.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=VERROR_MIT.TXT>VERROR_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>wordwrap</td> <td>0.0.3</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=WORDWRAP_MIT.TXT>WORDWRAP_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>wrappy</td> <td>1.0.2</td> <td>http:&#x2F;&#x2F;www.isc.org&#x2F;software&#x2F;license</td> <td><a href=WRAPPY_ISC.TXT>WRAPPY_ISC.TXT</a></td> </tr> <tr> <td>N/A</td> <td>xtend</td> <td>4.0.1</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=XTEND_MIT.TXT>XTEND_MIT.TXT</a></td> </tr> <tr> <td>N/A</td> <td>zip-stream</td> <td>1.2.0</td> <td>http:&#x2F;&#x2F;www.opensource.org&#x2F;licenses&#x2F;MIT</td> <td><a href=ZIP-STREAM_MIT.TXT>ZIP-STREAM_MIT.TXT</a></td> </tr> </table> </body> </html>
feedhenry-templates/fh-connector-mongodb-cloud
licenses/licenses.html
HTML
apache-2.0
60,702
export const removeWeight = (element) => { try { element.removeAttribute('data-weight'); } catch (e) { // We are now in IE11 territory if (!!element) { element.setAttribute('data-weight', null); } } };
OpenConext/OpenConext-engineblock
theme/base/javascripts/wayf/search/removeWeight.js
JavaScript
apache-2.0
230
using MO.Core.Forms.Common; using System; using System.ComponentModel; using System.Drawing; using System.Windows.Forms; namespace MO.Core.Forms.Controls { //============================================================ // <T>颜色选取控件<T> //============================================================ public partial class QColorPicker : QControl { // 最大高度 protected int _heighMax = 20; // 颜色宽度 protected int _colorWidth = 18; // 颜色 protected Color _selectColor = Color.Black; //============================================================ // <T>构造颜色选取控件<T> //============================================================ public QColorPicker() { InitializeComponent(); } //============================================================ // <T>设置颜色<T> //============================================================ protected void InnerSetSize(int width, int height) { // 设置尺寸 Width = width; Height = height = _heighMax; // 设置信息 int contentLeft = _borderOuter.Left.Width + _borderInner.Left.Width; int contentRight = _borderOuter.Bottom.Width + _borderInner.Bottom.Width; int contentTop = _borderOuter.Top.Width + _borderInner.Top.Width; int contentBottom = _borderOuter.Bottom.Width + _borderInner.Bottom.Width; // 设置容器 pnlContanier.SetBounds( contentLeft, contentTop, width - contentLeft - contentRight, height - contentTop - contentBottom); // 设置内容框 txtValue.SetBounds( contentLeft, contentTop, width - _colorWidth - contentLeft - contentRight - 2, height - contentTop - contentBottom); // 设置颜色框 pnlColor.SetBounds( width - _colorWidth - contentLeft - contentRight, contentTop - 2, _colorWidth, height - contentTop - contentBottom); Invalidate(); } //============================================================ // <T>大小</T> //============================================================ [Browsable(true)] public new Size Size { get { return base.Size; } set { InnerSetSize(value.Width, value.Height); } } //============================================================ // <T>刷新颜色处理。</T> //============================================================ public void RefreshColor(){ pnlColor.BackColor = _selectColor; txtValue.Text = RColor.FormatHex(_selectColor.ToArgb()); } //============================================================ // <T>获取或获得选中颜色。</T> //============================================================ [Browsable(true)] [DefaultValue(typeof(Color), "Color.Black")] public Color SelectColor { get { return _selectColor; } set { _selectColor = value; RefreshColor(); } } //============================================================ // <T>获取或获得选中颜色文本。</T> //============================================================ [Browsable(true)] [DefaultValue("FF000000")] public string SelectColorText { get { return RColor.FormatHex(_selectColor.ToArgb()); } set { _selectColor = RColor.ParseHexColor(value); RefreshColor(); } } //============================================================ // <T>获取或获得选中颜色内容。</T> //============================================================ [Browsable(true)] [DefaultValue(-16777216)] public int SelectColorValue { get { return _selectColor.ToArgb(); } set { _selectColor = Color.FromArgb(value); RefreshColor(); } } //============================================================ // <T>调整下拉框的高度</T> //============================================================ private void QColorPicker_Resize(object sender, EventArgs e) { InnerSetSize(Width, Height); } //============================================================ // <T>鼠标点击事件处理。</T> //============================================================ private void pnlColor_Click(object sender, EventArgs e) { dlgColor.Color = _selectColor; DialogResult resultCd = dlgColor.ShowDialog(); if (resultCd == DialogResult.OK) { _selectColor = dlgColor.Color; RefreshColor(); } } //============================================================ // <T>文本变更。</T> //============================================================ private void txtValue_Leave(object sender, EventArgs e) { _selectColor = RColor.ParseHexColor(txtValue.Text); RefreshColor(); } } }
favedit/MoCross
Tools/1 - Common/MoCore/Forms/Controls/QColorPicker.cs
C#
apache-2.0
5,282
/** * Copyright (C) 2015 The Gravitee team (http://gravitee.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gravitee.gateway.services.sync.cache; import com.hazelcast.core.HazelcastInstance; import org.springframework.beans.factory.annotation.Autowired; import java.util.Map; /** * @author David BRASSELY (david.brassely at graviteesource.com) * @author GraviteeSource Team */ public final class CacheManager { @Autowired private HazelcastInstance hzInstance; public <K, V> Map<K, V> getCache(String name) { return hzInstance.getMap(name); } }
gravitee-io/gateway
gravitee-gateway-services/gravitee-gateway-services-sync/src/main/java/io/gravitee/gateway/services/sync/cache/CacheManager.java
Java
apache-2.0
1,110
/* * Copyright 2017 Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <folly/detail/CacheLocality.h> #include <folly/portability/GTest.h> #include <sched.h> #include <memory> #include <thread> #include <type_traits> #include <unordered_map> #include <glog/logging.h> using namespace folly::detail; /// This is the relevant nodes from a production box's sysfs tree. If you /// think this map is ugly you should see the version of this test that /// used a real directory tree. To reduce the chance of testing error /// I haven't tried to remove the common prefix static std::unordered_map<std::string, std::string> fakeSysfsTree = { {"/sys/devices/system/cpu/cpu0/cache/index0/shared_cpu_list", "0,17"}, {"/sys/devices/system/cpu/cpu0/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu0/cache/index1/shared_cpu_list", "0,17"}, {"/sys/devices/system/cpu/cpu0/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu0/cache/index2/shared_cpu_list", "0,17"}, {"/sys/devices/system/cpu/cpu0/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu0/cache/index3/shared_cpu_list", "0-8,17-23"}, {"/sys/devices/system/cpu/cpu0/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu1/cache/index0/shared_cpu_list", "1,18"}, {"/sys/devices/system/cpu/cpu1/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu1/cache/index1/shared_cpu_list", "1,18"}, {"/sys/devices/system/cpu/cpu1/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu1/cache/index2/shared_cpu_list", "1,18"}, {"/sys/devices/system/cpu/cpu1/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu1/cache/index3/shared_cpu_list", "0-8,17-23"}, {"/sys/devices/system/cpu/cpu1/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu2/cache/index0/shared_cpu_list", "2,19"}, {"/sys/devices/system/cpu/cpu2/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu2/cache/index1/shared_cpu_list", "2,19"}, {"/sys/devices/system/cpu/cpu2/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu2/cache/index2/shared_cpu_list", "2,19"}, {"/sys/devices/system/cpu/cpu2/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu2/cache/index3/shared_cpu_list", "0-8,17-23"}, {"/sys/devices/system/cpu/cpu2/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu3/cache/index0/shared_cpu_list", "3,20"}, {"/sys/devices/system/cpu/cpu3/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu3/cache/index1/shared_cpu_list", "3,20"}, {"/sys/devices/system/cpu/cpu3/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu3/cache/index2/shared_cpu_list", "3,20"}, {"/sys/devices/system/cpu/cpu3/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu3/cache/index3/shared_cpu_list", "0-8,17-23"}, {"/sys/devices/system/cpu/cpu3/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu4/cache/index0/shared_cpu_list", "4,21"}, {"/sys/devices/system/cpu/cpu4/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu4/cache/index1/shared_cpu_list", "4,21"}, {"/sys/devices/system/cpu/cpu4/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu4/cache/index2/shared_cpu_list", "4,21"}, {"/sys/devices/system/cpu/cpu4/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu4/cache/index3/shared_cpu_list", "0-8,17-23"}, {"/sys/devices/system/cpu/cpu4/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu5/cache/index0/shared_cpu_list", "5-6"}, {"/sys/devices/system/cpu/cpu5/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu5/cache/index1/shared_cpu_list", "5-6"}, {"/sys/devices/system/cpu/cpu5/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu5/cache/index2/shared_cpu_list", "5-6"}, {"/sys/devices/system/cpu/cpu5/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu5/cache/index3/shared_cpu_list", "0-8,17-23"}, {"/sys/devices/system/cpu/cpu5/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu6/cache/index0/shared_cpu_list", "5-6"}, {"/sys/devices/system/cpu/cpu6/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu6/cache/index1/shared_cpu_list", "5-6"}, {"/sys/devices/system/cpu/cpu6/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu6/cache/index2/shared_cpu_list", "5-6"}, {"/sys/devices/system/cpu/cpu6/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu6/cache/index3/shared_cpu_list", "0-8,17-23"}, {"/sys/devices/system/cpu/cpu6/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu7/cache/index0/shared_cpu_list", "7,22"}, {"/sys/devices/system/cpu/cpu7/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu7/cache/index1/shared_cpu_list", "7,22"}, {"/sys/devices/system/cpu/cpu7/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu7/cache/index2/shared_cpu_list", "7,22"}, {"/sys/devices/system/cpu/cpu7/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu7/cache/index3/shared_cpu_list", "0-8,17-23"}, {"/sys/devices/system/cpu/cpu7/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu8/cache/index0/shared_cpu_list", "8,23"}, {"/sys/devices/system/cpu/cpu8/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu8/cache/index1/shared_cpu_list", "8,23"}, {"/sys/devices/system/cpu/cpu8/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu8/cache/index2/shared_cpu_list", "8,23"}, {"/sys/devices/system/cpu/cpu8/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu8/cache/index3/shared_cpu_list", "0-8,17-23"}, {"/sys/devices/system/cpu/cpu8/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu9/cache/index0/shared_cpu_list", "9,24"}, {"/sys/devices/system/cpu/cpu9/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu9/cache/index1/shared_cpu_list", "9,24"}, {"/sys/devices/system/cpu/cpu9/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu9/cache/index2/shared_cpu_list", "9,24"}, {"/sys/devices/system/cpu/cpu9/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu9/cache/index3/shared_cpu_list", "9-16,24-31"}, {"/sys/devices/system/cpu/cpu9/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu10/cache/index0/shared_cpu_list", "10,25"}, {"/sys/devices/system/cpu/cpu10/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu10/cache/index1/shared_cpu_list", "10,25"}, {"/sys/devices/system/cpu/cpu10/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu10/cache/index2/shared_cpu_list", "10,25"}, {"/sys/devices/system/cpu/cpu10/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu10/cache/index3/shared_cpu_list", "9-16,24-31"}, {"/sys/devices/system/cpu/cpu10/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu11/cache/index0/shared_cpu_list", "11,26"}, {"/sys/devices/system/cpu/cpu11/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu11/cache/index1/shared_cpu_list", "11,26"}, {"/sys/devices/system/cpu/cpu11/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu11/cache/index2/shared_cpu_list", "11,26"}, {"/sys/devices/system/cpu/cpu11/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu11/cache/index3/shared_cpu_list", "9-16,24-31"}, {"/sys/devices/system/cpu/cpu11/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu12/cache/index0/shared_cpu_list", "12,27"}, {"/sys/devices/system/cpu/cpu12/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu12/cache/index1/shared_cpu_list", "12,27"}, {"/sys/devices/system/cpu/cpu12/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu12/cache/index2/shared_cpu_list", "12,27"}, {"/sys/devices/system/cpu/cpu12/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu12/cache/index3/shared_cpu_list", "9-16,24-31"}, {"/sys/devices/system/cpu/cpu12/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu13/cache/index0/shared_cpu_list", "13,28"}, {"/sys/devices/system/cpu/cpu13/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu13/cache/index1/shared_cpu_list", "13,28"}, {"/sys/devices/system/cpu/cpu13/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu13/cache/index2/shared_cpu_list", "13,28"}, {"/sys/devices/system/cpu/cpu13/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu13/cache/index3/shared_cpu_list", "9-16,24-31"}, {"/sys/devices/system/cpu/cpu13/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu14/cache/index0/shared_cpu_list", "14,29"}, {"/sys/devices/system/cpu/cpu14/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu14/cache/index1/shared_cpu_list", "14,29"}, {"/sys/devices/system/cpu/cpu14/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu14/cache/index2/shared_cpu_list", "14,29"}, {"/sys/devices/system/cpu/cpu14/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu14/cache/index3/shared_cpu_list", "9-16,24-31"}, {"/sys/devices/system/cpu/cpu14/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu15/cache/index0/shared_cpu_list", "15,30"}, {"/sys/devices/system/cpu/cpu15/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu15/cache/index1/shared_cpu_list", "15,30"}, {"/sys/devices/system/cpu/cpu15/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu15/cache/index2/shared_cpu_list", "15,30"}, {"/sys/devices/system/cpu/cpu15/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu15/cache/index3/shared_cpu_list", "9-16,24-31"}, {"/sys/devices/system/cpu/cpu15/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu16/cache/index0/shared_cpu_list", "16,31"}, {"/sys/devices/system/cpu/cpu16/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu16/cache/index1/shared_cpu_list", "16,31"}, {"/sys/devices/system/cpu/cpu16/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu16/cache/index2/shared_cpu_list", "16,31"}, {"/sys/devices/system/cpu/cpu16/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu16/cache/index3/shared_cpu_list", "9-16,24-31"}, {"/sys/devices/system/cpu/cpu16/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu17/cache/index0/shared_cpu_list", "0,17"}, {"/sys/devices/system/cpu/cpu17/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu17/cache/index1/shared_cpu_list", "0,17"}, {"/sys/devices/system/cpu/cpu17/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu17/cache/index2/shared_cpu_list", "0,17"}, {"/sys/devices/system/cpu/cpu17/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu17/cache/index3/shared_cpu_list", "0-8,17-23"}, {"/sys/devices/system/cpu/cpu17/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu18/cache/index0/shared_cpu_list", "1,18"}, {"/sys/devices/system/cpu/cpu18/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu18/cache/index1/shared_cpu_list", "1,18"}, {"/sys/devices/system/cpu/cpu18/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu18/cache/index2/shared_cpu_list", "1,18"}, {"/sys/devices/system/cpu/cpu18/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu18/cache/index3/shared_cpu_list", "0-8,17-23"}, {"/sys/devices/system/cpu/cpu18/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu19/cache/index0/shared_cpu_list", "2,19"}, {"/sys/devices/system/cpu/cpu19/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu19/cache/index1/shared_cpu_list", "2,19"}, {"/sys/devices/system/cpu/cpu19/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu19/cache/index2/shared_cpu_list", "2,19"}, {"/sys/devices/system/cpu/cpu19/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu19/cache/index3/shared_cpu_list", "0-8,17-23"}, {"/sys/devices/system/cpu/cpu19/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu20/cache/index0/shared_cpu_list", "3,20"}, {"/sys/devices/system/cpu/cpu20/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu20/cache/index1/shared_cpu_list", "3,20"}, {"/sys/devices/system/cpu/cpu20/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu20/cache/index2/shared_cpu_list", "3,20"}, {"/sys/devices/system/cpu/cpu20/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu20/cache/index3/shared_cpu_list", "0-8,17-23"}, {"/sys/devices/system/cpu/cpu20/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu21/cache/index0/shared_cpu_list", "4,21"}, {"/sys/devices/system/cpu/cpu21/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu21/cache/index1/shared_cpu_list", "4,21"}, {"/sys/devices/system/cpu/cpu21/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu21/cache/index2/shared_cpu_list", "4,21"}, {"/sys/devices/system/cpu/cpu21/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu21/cache/index3/shared_cpu_list", "0-8,17-23"}, {"/sys/devices/system/cpu/cpu21/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu22/cache/index0/shared_cpu_list", "7,22"}, {"/sys/devices/system/cpu/cpu22/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu22/cache/index1/shared_cpu_list", "7,22"}, {"/sys/devices/system/cpu/cpu22/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu22/cache/index2/shared_cpu_list", "7,22"}, {"/sys/devices/system/cpu/cpu22/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu22/cache/index3/shared_cpu_list", "0-8,17-23"}, {"/sys/devices/system/cpu/cpu22/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu23/cache/index0/shared_cpu_list", "8,23"}, {"/sys/devices/system/cpu/cpu23/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu23/cache/index1/shared_cpu_list", "8,23"}, {"/sys/devices/system/cpu/cpu23/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu23/cache/index2/shared_cpu_list", "8,23"}, {"/sys/devices/system/cpu/cpu23/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu23/cache/index3/shared_cpu_list", "0-8,17-23"}, {"/sys/devices/system/cpu/cpu23/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu24/cache/index0/shared_cpu_list", "9,24"}, {"/sys/devices/system/cpu/cpu24/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu24/cache/index1/shared_cpu_list", "9,24"}, {"/sys/devices/system/cpu/cpu24/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu24/cache/index2/shared_cpu_list", "9,24"}, {"/sys/devices/system/cpu/cpu24/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu24/cache/index3/shared_cpu_list", "9-16,24-31"}, {"/sys/devices/system/cpu/cpu24/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu25/cache/index0/shared_cpu_list", "10,25"}, {"/sys/devices/system/cpu/cpu25/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu25/cache/index1/shared_cpu_list", "10,25"}, {"/sys/devices/system/cpu/cpu25/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu25/cache/index2/shared_cpu_list", "10,25"}, {"/sys/devices/system/cpu/cpu25/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu25/cache/index3/shared_cpu_list", "9-16,24-31"}, {"/sys/devices/system/cpu/cpu25/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu26/cache/index0/shared_cpu_list", "11,26"}, {"/sys/devices/system/cpu/cpu26/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu26/cache/index1/shared_cpu_list", "11,26"}, {"/sys/devices/system/cpu/cpu26/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu26/cache/index2/shared_cpu_list", "11,26"}, {"/sys/devices/system/cpu/cpu26/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu26/cache/index3/shared_cpu_list", "9-16,24-31"}, {"/sys/devices/system/cpu/cpu26/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu27/cache/index0/shared_cpu_list", "12,27"}, {"/sys/devices/system/cpu/cpu27/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu27/cache/index1/shared_cpu_list", "12,27"}, {"/sys/devices/system/cpu/cpu27/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu27/cache/index2/shared_cpu_list", "12,27"}, {"/sys/devices/system/cpu/cpu27/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu27/cache/index3/shared_cpu_list", "9-16,24-31"}, {"/sys/devices/system/cpu/cpu27/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu28/cache/index0/shared_cpu_list", "13,28"}, {"/sys/devices/system/cpu/cpu28/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu28/cache/index1/shared_cpu_list", "13,28"}, {"/sys/devices/system/cpu/cpu28/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu28/cache/index2/shared_cpu_list", "13,28"}, {"/sys/devices/system/cpu/cpu28/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu28/cache/index3/shared_cpu_list", "9-16,24-31"}, {"/sys/devices/system/cpu/cpu28/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu29/cache/index0/shared_cpu_list", "14,29"}, {"/sys/devices/system/cpu/cpu29/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu29/cache/index1/shared_cpu_list", "14,29"}, {"/sys/devices/system/cpu/cpu29/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu29/cache/index2/shared_cpu_list", "14,29"}, {"/sys/devices/system/cpu/cpu29/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu29/cache/index3/shared_cpu_list", "9-16,24-31"}, {"/sys/devices/system/cpu/cpu29/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu30/cache/index0/shared_cpu_list", "15,30"}, {"/sys/devices/system/cpu/cpu30/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu30/cache/index1/shared_cpu_list", "15,30"}, {"/sys/devices/system/cpu/cpu30/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu30/cache/index2/shared_cpu_list", "15,30"}, {"/sys/devices/system/cpu/cpu30/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu30/cache/index3/shared_cpu_list", "9-16,24-31"}, {"/sys/devices/system/cpu/cpu30/cache/index3/type", "Unified"}, {"/sys/devices/system/cpu/cpu31/cache/index0/shared_cpu_list", "16,31"}, {"/sys/devices/system/cpu/cpu31/cache/index0/type", "Data"}, {"/sys/devices/system/cpu/cpu31/cache/index1/shared_cpu_list", "16,31"}, {"/sys/devices/system/cpu/cpu31/cache/index1/type", "Instruction"}, {"/sys/devices/system/cpu/cpu31/cache/index2/shared_cpu_list", "16,31"}, {"/sys/devices/system/cpu/cpu31/cache/index2/type", "Unified"}, {"/sys/devices/system/cpu/cpu31/cache/index3/shared_cpu_list", "9-16,24-31"}, {"/sys/devices/system/cpu/cpu31/cache/index3/type", "Unified"}}; /// This is the expected CacheLocality structure for fakeSysfsTree static const CacheLocality nonUniformExampleLocality = {32, {16, 16, 2}, {0, 2, 4, 6, 8, 10, 11, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 1, 3, 5, 7, 9, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31}}; TEST(CacheLocality, FakeSysfs) { auto parsed = CacheLocality::readFromSysfsTree([](std::string name) { auto iter = fakeSysfsTree.find(name); return iter == fakeSysfsTree.end() ? std::string() : iter->second; }); auto& expected = nonUniformExampleLocality; EXPECT_EQ(expected.numCpus, parsed.numCpus); EXPECT_EQ(expected.numCachesByLevel, parsed.numCachesByLevel); EXPECT_EQ(expected.localityIndexByCpu, parsed.localityIndexByCpu); } #if FOLLY_HAVE_LINUX_VDSO TEST(Getcpu, VdsoGetcpu) { unsigned cpu; Getcpu::resolveVdsoFunc()(&cpu, nullptr, nullptr); EXPECT_TRUE(cpu < CPU_SETSIZE); } #endif #ifdef FOLLY_TLS TEST(ThreadId, SimpleTls) { unsigned cpu = 0; auto rv = folly::detail::FallbackGetcpu<SequentialThreadId<std::atomic>>::getcpu( &cpu, nullptr, nullptr); EXPECT_EQ(rv, 0); EXPECT_TRUE(cpu > 0); unsigned again; folly::detail::FallbackGetcpu<SequentialThreadId<std::atomic>>::getcpu( &again, nullptr, nullptr); EXPECT_EQ(cpu, again); } #endif TEST(ThreadId, SimplePthread) { unsigned cpu = 0; auto rv = folly::detail::FallbackGetcpu<HashingThreadId>::getcpu( &cpu, nullptr, nullptr); EXPECT_EQ(rv, 0); EXPECT_TRUE(cpu > 0); unsigned again; folly::detail::FallbackGetcpu<HashingThreadId>::getcpu( &again, nullptr, nullptr); EXPECT_EQ(cpu, again); } #ifdef FOLLY_TLS static FOLLY_TLS unsigned testingCpu = 0; static int testingGetcpu(unsigned* cpu, unsigned* node, void* /* unused */) { if (cpu != nullptr) { *cpu = testingCpu; } if (node != nullptr) { *node = testingCpu; } return 0; } #endif TEST(AccessSpreader, Simple) { for (size_t s = 1; s < 200; ++s) { EXPECT_LT(AccessSpreader<>::current(s), s); } } #ifdef FOLLY_TLS #define DECLARE_SPREADER_TAG(tag, locality, func) \ namespace { \ template <typename dummy> \ struct tag {}; \ } \ namespace folly { \ namespace detail { \ template <> \ const CacheLocality& CacheLocality::system<tag>() { \ static auto* inst = new CacheLocality(locality); \ return *inst; \ } \ template <> \ Getcpu::Func AccessSpreader<tag>::pickGetcpuFunc() { \ return func; \ } \ } \ } DECLARE_SPREADER_TAG(ManualTag, CacheLocality::uniform(16), testingGetcpu) TEST(AccessSpreader, Wrapping) { // this test won't pass unless locality.numCpus divides kMaxCpus auto numCpus = CacheLocality::system<ManualTag>().numCpus; EXPECT_EQ(0, 128 % numCpus); for (size_t s = 1; s < 200; ++s) { for (size_t c = 0; c < 400; ++c) { testingCpu = c; auto observed = AccessSpreader<ManualTag>::current(s); testingCpu = c % numCpus; auto expected = AccessSpreader<ManualTag>::current(s); EXPECT_EQ(expected, observed) << "numCpus=" << numCpus << ", s=" << s << ", c=" << c; } } } #endif
charsyam/folly
folly/test/CacheLocalityTest.cpp
C++
apache-2.0
25,381
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.codecommit.model; import javax.annotation.Generated; /** * <p> * The number of approvals required for the approval rule exceeds the maximum number allowed. * </p> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class MaximumNumberOfApprovalsExceededException extends com.amazonaws.services.codecommit.model.AWSCodeCommitException { private static final long serialVersionUID = 1L; /** * Constructs a new MaximumNumberOfApprovalsExceededException with the specified error message. * * @param message * Describes the error encountered. */ public MaximumNumberOfApprovalsExceededException(String message) { super(message); } }
aws/aws-sdk-java
aws-java-sdk-codecommit/src/main/java/com/amazonaws/services/codecommit/model/MaximumNumberOfApprovalsExceededException.java
Java
apache-2.0
1,318
/** * Copyright (C) 2014-2015 Really Inc. <http://really.io> */ package io.really.model import akka.contrib.pattern.ShardRegion import _root_.io.really.{ RoutableToCollectionActor, ReallyConfig } class CollectionSharding(config: ReallyConfig) { implicit val implicitConfig = config val maxShards = config.Sharding.maxShards /** * ID Extractor for Akka Sharding extension * ID is the BucketId */ val idExtractor: ShardRegion.IdExtractor = { case req: RoutableToCollectionActor => Helpers.getBucketIDFromR(req.r) -> req } /** * Shard Resolver for Akka Sharding extension */ val shardResolver: ShardRegion.ShardResolver = { case req: RoutableToCollectionActor => (Helpers.getBucketIDFromR(req.r).hashCode % maxShards).toString } }
reallylabs/really
modules/really-core/src/main/scala/io/really/model/CollectionSharding.scala
Scala
apache-2.0
773
#!/bin/bash echo "bootstrap" service ssh restart cd /MapRouletteAPI # Delete any RUNNING_PID file on restart rm RUNNING_PID || true ./setupServer.sh > setupServer.log 2>&1 while true; do sleep 1000; done
mgcuthbert/maproulette2-docker
api/bootstrap.sh
Shell
apache-2.0
208
#define NETCORE using System; using System.Linq; using System.Reflection; namespace Foundatio.Force.DeepCloner.Helpers { internal static class ReflectionHelper { public static bool IsEnum(this Type t) { #if NETCORE return t.GetTypeInfo().IsEnum; #else return t.IsEnum; #endif } public static bool IsValueType(this Type t) { #if NETCORE return t.GetTypeInfo().IsValueType; #else return t.IsValueType; #endif } public static bool IsClass(this Type t) { #if NETCORE return t.GetTypeInfo().IsClass; #else return t.IsClass; #endif } public static Type BaseType(this Type t) { #if NETCORE return t.GetTypeInfo().BaseType; #else return t.BaseType; #endif } public static FieldInfo[] GetAllFields(this Type t) { #if NETCORE return t.GetTypeInfo().DeclaredFields.Where(x => !x.IsStatic).ToArray(); #else return t.GetFields(BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public); #endif } public static PropertyInfo[] GetPublicProperties(this Type t) { #if NETCORE return t.GetTypeInfo().DeclaredProperties.ToArray(); #else return t.GetProperties(BindingFlags.Instance | BindingFlags.Public); #endif } public static FieldInfo[] GetDeclaredFields(this Type t) { #if NETCORE return t.GetTypeInfo().DeclaredFields.Where(x => !x.IsStatic).ToArray(); #else return t.GetFields(BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.DeclaredOnly); #endif } public static ConstructorInfo[] GetPrivateConstructors(this Type t) { #if NETCORE return t.GetTypeInfo().DeclaredConstructors.ToArray(); #else return t.GetConstructors(BindingFlags.NonPublic | BindingFlags.Instance); #endif } public static ConstructorInfo[] GetPublicConstructors(this Type t) { #if NETCORE return t.GetTypeInfo().DeclaredConstructors.ToArray(); #else return t.GetConstructors(BindingFlags.Public | BindingFlags.Instance); #endif } public static MethodInfo GetPrivateMethod(this Type t, string methodName) { #if NETCORE return t.GetTypeInfo().GetDeclaredMethod(methodName); #else return t.GetMethod(methodName, BindingFlags.NonPublic | BindingFlags.Instance); #endif } public static MethodInfo GetMethod(this Type t, string methodName) { #if NETCORE return t.GetTypeInfo().GetDeclaredMethod(methodName); #else return t.GetMethod(methodName); #endif } public static MethodInfo GetPrivateStaticMethod(this Type t, string methodName) { #if NETCORE return t.GetTypeInfo().GetDeclaredMethod(methodName); #else return t.GetMethod(methodName, BindingFlags.NonPublic | BindingFlags.Static); #endif } public static FieldInfo GetPrivateField(this Type t, string fieldName) { #if NETCORE return t.GetTypeInfo().GetDeclaredField(fieldName); #else return t.GetField(fieldName, BindingFlags.NonPublic | BindingFlags.Instance); #endif } public static FieldInfo GetPrivateStaticField(this Type t, string fieldName) { #if NETCORE return t.GetTypeInfo().GetDeclaredField(fieldName); #else return t.GetField(fieldName, BindingFlags.NonPublic | BindingFlags.Static); #endif } #if NETCORE public static bool IsSubclassOfTypeByName(this Type t, string typeName) { while (t != null) { if (t.Name == typeName) return true; t = t.BaseType(); } return false; } #endif #if NETCORE public static bool IsAssignableFrom(this Type from, Type to) { return from.GetTypeInfo().IsAssignableFrom(to.GetTypeInfo()); } public static bool IsInstanceOfType(this Type from, object to) { return from.IsAssignableFrom(to.GetType()); } #endif public static Type[] GenericArguments(this Type t) { #if NETCORE return t.GetTypeInfo().GenericTypeArguments; #else return t.GetGenericArguments(); #endif } } }
FoundatioFx/Foundatio
src/Foundatio/DeepCloner/Helpers/ReflectionHelper.cs
C#
apache-2.0
3,817
"""Translation helper functions.""" import locale import os import re import sys import gettext as gettext_module from cStringIO import StringIO from django.utils.importlib import import_module from django.utils.safestring import mark_safe, SafeData from django.utils.thread_support import currentThread # Translations are cached in a dictionary for every language+app tuple. # The active translations are stored by threadid to make them thread local. _translations = {} _active = {} # The default translation is based on the settings file. _default = None # This is a cache for normalized accept-header languages to prevent multiple # file lookups when checking the same locale on repeated requests. _accepted = {} # Format of Accept-Language header values. From RFC 2616, section 14.4 and 3.9. accept_language_re = re.compile(r''' ([A-Za-z]{1,8}(?:-[A-Za-z]{1,8})*|\*) # "en", "en-au", "x-y-z", "*" (?:;q=(0(?:\.\d{,3})?|1(?:.0{,3})?))? # Optional "q=1.00", "q=0.8" (?:\s*,\s*|$) # Multiple accepts per header. ''', re.VERBOSE) def to_locale(language, to_lower=False): """ Turns a language name (en-us) into a locale name (en_US). If 'to_lower' is True, the last component is lower-cased (en_us). """ p = language.find('-') if p >= 0: if to_lower: return language[:p].lower()+'_'+language[p+1:].lower() else: return language[:p].lower()+'_'+language[p+1:].upper() else: return language.lower() def to_language(locale): """Turns a locale name (en_US) into a language name (en-us).""" p = locale.find('_') if p >= 0: return locale[:p].lower()+'-'+locale[p+1:].lower() else: return locale.lower() class DjangoTranslation(gettext_module.GNUTranslations): """ This class sets up the GNUTranslations context with regard to output charset. Django uses a defined DEFAULT_CHARSET as the output charset on Python 2.4. With Python 2.3, use DjangoTranslation23. """ def __init__(self, *args, **kw): from django.conf import settings gettext_module.GNUTranslations.__init__(self, *args, **kw) # Starting with Python 2.4, there's a function to define # the output charset. Before 2.4, the output charset is # identical with the translation file charset. try: self.set_output_charset('utf-8') except AttributeError: pass self.django_output_charset = 'utf-8' self.__language = '??' def merge(self, other): self._catalog.update(other._catalog) def set_language(self, language): self.__language = language def language(self): return self.__language def __repr__(self): return "<DjangoTranslation lang:%s>" % self.__language class DjangoTranslation23(DjangoTranslation): """ Compatibility class that is only used with Python 2.3. Python 2.3 doesn't support set_output_charset on translation objects and needs this wrapper class to make sure input charsets from translation files are correctly translated to output charsets. With a full switch to Python 2.4, this can be removed from the source. """ def gettext(self, msgid): res = self.ugettext(msgid) return res.encode(self.django_output_charset) def ngettext(self, msgid1, msgid2, n): res = self.ungettext(msgid1, msgid2, n) return res.encode(self.django_output_charset) def translation(language): """ Returns a translation object. This translation object will be constructed out of multiple GNUTranslations objects by merging their catalogs. It will construct a object for the requested language and add a fallback to the default language, if it's different from the requested language. """ global _translations t = _translations.get(language, None) if t is not None: return t from django.conf import settings # set up the right translation class klass = DjangoTranslation if sys.version_info < (2, 4): klass = DjangoTranslation23 globalpath = os.path.join(os.path.dirname(sys.modules[settings.__module__].__file__), 'locale') if settings.SETTINGS_MODULE is not None: parts = settings.SETTINGS_MODULE.split('.') project = import_module(parts[0]) projectpath = os.path.join(os.path.dirname(project.__file__), 'locale') else: projectpath = None def _fetch(lang, fallback=None): global _translations loc = to_locale(lang) res = _translations.get(lang, None) if res is not None: return res def _translation(path): try: t = gettext_module.translation('django', path, [loc], klass) t.set_language(lang) return t except IOError, e: return None res = _translation(globalpath) # We want to ensure that, for example, "en-gb" and "en-us" don't share # the same translation object (thus, merging en-us with a local update # doesn't affect en-gb), even though they will both use the core "en" # translation. So we have to subvert Python's internal gettext caching. base_lang = lambda x: x.split('-', 1)[0] if base_lang(lang) in [base_lang(trans) for trans in _translations]: res._info = res._info.copy() res._catalog = res._catalog.copy() def _merge(path): t = _translation(path) if t is not None: if res is None: return t else: res.merge(t) return res for localepath in settings.LOCALE_PATHS: if os.path.isdir(localepath): res = _merge(localepath) if projectpath and os.path.isdir(projectpath): res = _merge(projectpath) for appname in settings.INSTALLED_APPS: app = import_module(appname) apppath = os.path.join(os.path.dirname(app.__file__), 'locale') if os.path.isdir(apppath): res = _merge(apppath) if res is None: if fallback is not None: res = fallback else: return gettext_module.NullTranslations() _translations[lang] = res return res default_translation = _fetch(settings.LANGUAGE_CODE) current_translation = _fetch(language, fallback=default_translation) return current_translation def activate(language): """ Fetches the translation object for a given tuple of application name and language and installs it as the current translation object for the current thread. """ _active[currentThread()] = translation(language) def deactivate(): """ Deinstalls the currently active translation object so that further _ calls will resolve against the default translation object, again. """ global _active if currentThread() in _active: del _active[currentThread()] def deactivate_all(): """ Makes the active translation object a NullTranslations() instance. This is useful when we want delayed translations to appear as the original string for some reason. """ _active[currentThread()] = gettext_module.NullTranslations() def get_language(): """Returns the currently selected language.""" t = _active.get(currentThread(), None) if t is not None: try: return to_language(t.language()) except AttributeError: pass # If we don't have a real translation object, assume it's the default language. from django.conf import settings return settings.LANGUAGE_CODE def get_language_bidi(): """ Returns selected language's BiDi layout. False = left-to-right layout True = right-to-left layout """ from django.conf import settings base_lang = get_language().split('-')[0] return base_lang in settings.LANGUAGES_BIDI def catalog(): """ Returns the current active catalog for further processing. This can be used if you need to modify the catalog or want to access the whole message catalog instead of just translating one string. """ global _default, _active t = _active.get(currentThread(), None) if t is not None: return t if _default is None: from django.conf import settings _default = translation(settings.LANGUAGE_CODE) return _default def do_translate(message, translation_function): """ Translates 'message' using the given 'translation_function' name -- which will be either gettext or ugettext. It uses the current thread to find the translation object to use. If no current translation is activated, the message will be run through the default translation object. """ global _default, _active t = _active.get(currentThread(), None) if t is not None: result = getattr(t, translation_function)(message) else: if _default is None: from django.conf import settings _default = translation(settings.LANGUAGE_CODE) result = getattr(_default, translation_function)(message) if isinstance(message, SafeData): return mark_safe(result) return result def gettext(message): return do_translate(message, 'gettext') def ugettext(message): return do_translate(message, 'ugettext') def gettext_noop(message): """ Marks strings for translation but doesn't translate them now. This can be used to store strings in global variables that should stay in the base language (because they might be used externally) and will be translated later. """ return message def do_ntranslate(singular, plural, number, translation_function): global _default, _active t = _active.get(currentThread(), None) if t is not None: return getattr(t, translation_function)(singular, plural, number) if _default is None: from django.conf import settings _default = translation(settings.LANGUAGE_CODE) return getattr(_default, translation_function)(singular, plural, number) def ngettext(singular, plural, number): """ Returns a UTF-8 bytestring of the translation of either the singular or plural, based on the number. """ return do_ntranslate(singular, plural, number, 'ngettext') def ungettext(singular, plural, number): """ Returns a unicode strings of the translation of either the singular or plural, based on the number. """ return do_ntranslate(singular, plural, number, 'ungettext') def check_for_language(lang_code): """ Checks whether there is a global language file for the given language code. This is used to decide whether a user-provided language is available. This is only used for language codes from either the cookies or session. """ from django.conf import settings globalpath = os.path.join(os.path.dirname(sys.modules[settings.__module__].__file__), 'locale') if gettext_module.find('django', globalpath, [to_locale(lang_code)]) is not None: return True else: return False def get_language_from_request(request): """ Analyzes the request to find what language the user wants the system to show. Only languages listed in settings.LANGUAGES are taken into account. If the user requests a sublanguage where we have a main language, we send out the main language. """ global _accepted from django.conf import settings globalpath = os.path.join(os.path.dirname(sys.modules[settings.__module__].__file__), 'locale') supported = dict(settings.LANGUAGES) if hasattr(request, 'session'): lang_code = request.session.get('django_language', None) if lang_code in supported and lang_code is not None and check_for_language(lang_code): return lang_code lang_code = request.COOKIES.get(settings.LANGUAGE_COOKIE_NAME) if lang_code and lang_code in supported and check_for_language(lang_code): return lang_code accept = request.META.get('HTTP_ACCEPT_LANGUAGE', '') for accept_lang, unused in parse_accept_lang_header(accept): if accept_lang == '*': break # We have a very restricted form for our language files (no encoding # specifier, since they all must be UTF-8 and only one possible # language each time. So we avoid the overhead of gettext.find() and # work out the MO file manually. # 'normalized' is the root name of the locale in POSIX format (which is # the format used for the directories holding the MO files). normalized = locale.locale_alias.get(to_locale(accept_lang, True)) if not normalized: continue # Remove the default encoding from locale_alias. normalized = normalized.split('.')[0] if normalized in _accepted: # We've seen this locale before and have an MO file for it, so no # need to check again. return _accepted[normalized] for lang, dirname in ((accept_lang, normalized), (accept_lang.split('-')[0], normalized.split('_')[0])): if lang.lower() not in supported: continue langfile = os.path.join(globalpath, dirname, 'LC_MESSAGES', 'django.mo') if os.path.exists(langfile): _accepted[normalized] = lang return lang return settings.LANGUAGE_CODE def get_date_formats(): """ Checks whether translation files provide a translation for some technical message ID to store date and time formats. If it doesn't contain one, the formats provided in the settings will be used. """ from django.conf import settings date_format = ugettext('DATE_FORMAT') datetime_format = ugettext('DATETIME_FORMAT') time_format = ugettext('TIME_FORMAT') if date_format == 'DATE_FORMAT': date_format = settings.DATE_FORMAT if datetime_format == 'DATETIME_FORMAT': datetime_format = settings.DATETIME_FORMAT if time_format == 'TIME_FORMAT': time_format = settings.TIME_FORMAT return date_format, datetime_format, time_format def get_partial_date_formats(): """ Checks whether translation files provide a translation for some technical message ID to store partial date formats. If it doesn't contain one, the formats provided in the settings will be used. """ from django.conf import settings year_month_format = ugettext('YEAR_MONTH_FORMAT') month_day_format = ugettext('MONTH_DAY_FORMAT') if year_month_format == 'YEAR_MONTH_FORMAT': year_month_format = settings.YEAR_MONTH_FORMAT if month_day_format == 'MONTH_DAY_FORMAT': month_day_format = settings.MONTH_DAY_FORMAT return year_month_format, month_day_format dot_re = re.compile(r'\S') def blankout(src, char): """ Changes every non-whitespace character to the given char. Used in the templatize function. """ return dot_re.sub(char, src) inline_re = re.compile(r"""^\s*trans\s+((?:".*?")|(?:'.*?'))\s*""") block_re = re.compile(r"""^\s*blocktrans(?:\s+|$)""") endblock_re = re.compile(r"""^\s*endblocktrans$""") plural_re = re.compile(r"""^\s*plural$""") constant_re = re.compile(r"""_\(((?:".*?")|(?:'.*?'))\)""") def templatize(src): """ Turns a Django template into something that is understood by xgettext. It does so by translating the Django translation tags into standard gettext function invocations. """ from django.template import Lexer, TOKEN_TEXT, TOKEN_VAR, TOKEN_BLOCK out = StringIO() intrans = False inplural = False singular = [] plural = [] for t in Lexer(src, None).tokenize(): if intrans: if t.token_type == TOKEN_BLOCK: endbmatch = endblock_re.match(t.contents) pluralmatch = plural_re.match(t.contents) if endbmatch: if inplural: out.write(' ngettext(%r,%r,count) ' % (''.join(singular), ''.join(plural))) for part in singular: out.write(blankout(part, 'S')) for part in plural: out.write(blankout(part, 'P')) else: out.write(' gettext(%r) ' % ''.join(singular)) for part in singular: out.write(blankout(part, 'S')) intrans = False inplural = False singular = [] plural = [] elif pluralmatch: inplural = True else: raise SyntaxError("Translation blocks must not include other block tags: %s" % t.contents) elif t.token_type == TOKEN_VAR: if inplural: plural.append('%%(%s)s' % t.contents) else: singular.append('%%(%s)s' % t.contents) elif t.token_type == TOKEN_TEXT: if inplural: plural.append(t.contents) else: singular.append(t.contents) else: if t.token_type == TOKEN_BLOCK: imatch = inline_re.match(t.contents) bmatch = block_re.match(t.contents) cmatches = constant_re.findall(t.contents) if imatch: g = imatch.group(1) if g[0] == '"': g = g.strip('"') elif g[0] == "'": g = g.strip("'") out.write(' gettext(%r) ' % g) elif bmatch: for fmatch in constant_re.findall(t.contents): out.write(' _(%s) ' % fmatch) intrans = True inplural = False singular = [] plural = [] elif cmatches: for cmatch in cmatches: out.write(' _(%s) ' % cmatch) else: out.write(blankout(t.contents, 'B')) elif t.token_type == TOKEN_VAR: parts = t.contents.split('|') cmatch = constant_re.match(parts[0]) if cmatch: out.write(' _(%s) ' % cmatch.group(1)) for p in parts[1:]: if p.find(':_(') >= 0: out.write(' %s ' % p.split(':',1)[1]) else: out.write(blankout(p, 'F')) else: out.write(blankout(t.contents, 'X')) return out.getvalue() def parse_accept_lang_header(lang_string): """ Parses the lang_string, which is the body of an HTTP Accept-Language header, and returns a list of (lang, q-value), ordered by 'q' values. Any format errors in lang_string results in an empty list being returned. """ result = [] pieces = accept_language_re.split(lang_string) if pieces[-1]: return [] for i in range(0, len(pieces) - 1, 3): first, lang, priority = pieces[i : i + 3] if first: return [] priority = priority and float(priority) or 1.0 result.append((lang, priority)) result.sort(lambda x, y: -cmp(x[1], y[1])) return result
greggian/TapdIn
django/utils/translation/trans_real.py
Python
apache-2.0
20,192
#pragma once #include "DBC/DBC__File.h" DBC_DEF_BEGIN(DBC_CreatureDisplayInfo) __DBC_REF_ID(DBC_CreatureModelData, CreatureModelDataID, 2); __DBC_TVALUE(uint32, SoundID, 3); __DBC_REF_ID(DBC_CreatureDisplayInfoExtra, HumanoidData, 4); __DBC_TVALUE(float_t, Scale, 5); __DBC_TVALUE(uint32, Opacity, 6); // 0 - 255 __DBC_STRING(Texture1, 7); // Name of texture for 1st geoset with type 2 (see this). Texture must be in the same dir as M2 file of creature is. __DBC_STRING(Texture2, 8); // Name of texture for 2nd geoset with type 2 (see this). Texture must be in the same dir as M2 file of creature is. __DBC_STRING(Texture3, 9); // Name of texture for 3rd geoset with type 2 (see this). Texture must be in the same dir as M2 file of creature is. __DBC_STRING(PortaitTexture, 10); __DBC_TVALUE(uint32, UnitBloodLevelID, 11); __DBC_TVALUE(uint32, UnitBloodID, 12); __DBC_TVALUE(uint32, NPCSoundsID, 13); __DBC_TVALUE(uint32, ParticlesID, 14); __DBC_TVALUE(uint32, CreatureGeosetData, 15); __DBC_TVALUE(uint32, ObjectEffectPackageID, 16); DBC_DEF_END
bouzi71/OpenWow
owGame/DBC/Tables/DBC_CreatureDisplayInfo.h
C
apache-2.0
1,677
# # ncurses, keep it simple for the moment # option(USE_BUNDLED_NCURSES "Enable building of the bundled ncurses" ${USE_BUNDLED_DEPS}) if(CURSES_INCLUDE_DIR) # we already have ncurses elseif(NOT USE_BUNDLED_NCURSES) set(CURSES_NEED_NCURSES TRUE) find_package(Curses REQUIRED) message(STATUS "Found ncurses: include: ${CURSES_INCLUDE_DIR}, lib: ${CURSES_LIBRARIES}") else() set(CURSES_BUNDLE_DIR "${PROJECT_BINARY_DIR}/ncurses-prefix/src/ncurses") set(CURSES_INCLUDE_DIR "${CURSES_BUNDLE_DIR}/include/") set(CURSES_LIBRARIES "${CURSES_BUNDLE_DIR}/lib/libncurses.a") if(NOT TARGET ncurses) message(STATUS "Using bundled ncurses in '${CURSES_BUNDLE_DIR}'") ExternalProject_Add(ncurses PREFIX "${PROJECT_BINARY_DIR}/ncurses-prefix" URL "http://download.draios.com/dependencies/ncurses-6.0-20150725.tgz" URL_MD5 "32b8913312e738d707ae68da439ca1f4" CONFIGURE_COMMAND ./configure --without-cxx --without-cxx-binding --without-ada --without-manpages --without-progs --without-tests --with-terminfo-dirs=/etc/terminfo:/lib/terminfo:/usr/share/terminfo BUILD_COMMAND ${CMD_MAKE} BUILD_IN_SOURCE 1 BUILD_BYPRODUCTS ${CURSES_LIBRARIES} INSTALL_COMMAND "") endif() endif() include_directories("${CURSES_INCLUDE_DIR}")
draios/sysdig
cmake/modules/ncurses.cmake
CMake
apache-2.0
1,244
{-# LANGUAGE ExplicitForAll, Rank2Types #-} -- | An implementation of Reagents (http://www.mpi-sws.org/~turon/reagents.pdf) -- NOTE: currently this is just a very tiny core of the Reagent design. Needs -- lots of work. module Data.Concurrent.Internal.Reagent where import Data.IORef import Data.Atomics import Prelude hiding (succ, fail) type Reagent a = forall b. (a -> IO b) -> IO b -> IO b -- | Execute a Reagent. {-# INLINE react #-} react :: Reagent a -> IO a react r = try where try = r finish try finish x = return x -- | Like atomicModifyIORef, but uses CAS and permits the update action to force -- a retry by returning Nothing {-# INLINE atomicUpdate #-} atomicUpdate :: IORef a -> (a -> Maybe (a, b)) -> Reagent b atomicUpdate r f succ fail = do curTicket <- readForCAS r let cur = peekTicket curTicket case f cur of Just (new, out) -> do (done, _) <- casIORef r curTicket new if done then succ out else fail Nothing -> fail atomicUpdate_ :: IORef a -> (a -> a) -> Reagent () atomicUpdate_ r f = atomicUpdate r (\x -> Just (f x, ())) postCommit :: Reagent a -> (a -> IO b) -> Reagent b postCommit r f succ fail = r (\x -> f x >>= succ) fail choice :: Reagent a -> Reagent a -> Reagent a choice _ _ = error "TODO"
rrnewton/concurrent-skiplist
src/Data/Concurrent/Internal/Reagent.hs
Haskell
apache-2.0
1,287
using System; using NDatabase.Api; using NUnit.Framework; using Test.NDatabase.Odb.Test.VO.Human; namespace Test.NDatabase.Odb.Test.Query.Criteria { public class TestPolyMorphic : ODBTest { private const string DbName = "TestPolyMorphic.ndb"; [Test] public void Test1() { DeleteBase(DbName); using (var odb = Open(DbName)) { odb.Store(new Animal("dog", "M", "my dog")); odb.Store(new Animal("cat", "F", "my cat")); odb.Store(new Man("Joe")); odb.Store(new Woman("Karine")); } IObjectSet<object> os; using (var odb = Open(DbName)) { var q = odb.Query<object>(); os = q.Execute<object>(); Println(os); } AssertEquals(4, os.Count); DeleteBase(DbName); } [Test] public void Test2() { DeleteBase(DbName); using (var odb = Open(DbName)) { odb.Store(new Animal("dog", "M", "my dog")); odb.Store(new Animal("cat", "F", "my cat")); odb.Store(new Man("Joe")); odb.Store(new Woman("Karine")); } IObjectSet<Human> os; using (var odb = Open(DbName)) { var q = odb.Query<Human>(); os = q.Execute<Human>(); Println(os); } AssertEquals(2, os.Count); DeleteBase(DbName); } [Test] public void Test3() { DeleteBase(DbName); using (var odb = Open(DbName)) { odb.Store(new Animal("dog", "M", "my dog")); odb.Store(new Animal("cat", "F", "my cat")); odb.Store(new Man("Joe")); odb.Store(new Woman("Karine")); } IValues os; using (var odb = Open(DbName)) { var q = odb.ValuesQuery<object>().Field("specie"); os = q.Execute(); Println(os); } AssertEquals(4, os.Count); DeleteBase(DbName); } [Test] public void Test4() { DeleteBase(DbName); using (var odb = Open(DbName)) { odb.Store(new Animal("dog", "M", "my dog")); odb.Store(new Animal("cat", "F", "my cat")); odb.Store(new Man("Joe")); odb.Store(new Woman("Karine")); } IValues os; using (var odb = Open(DbName)) { var q = odb.ValuesQuery<Human>().Field("specie"); os = q.Execute(); Println(os); } AssertEquals(2, os.Count); DeleteBase(DbName); } [Test] public void Test5() { DeleteBase(DbName); using (var odb = Open(DbName)) { odb.Store(new Animal("dog", "M", "my dog")); odb.Store(new Animal("cat", "F", "my cat")); odb.Store(new Man("Joe")); odb.Store(new Woman("Karine")); } IValues os; using (var odb = Open(DbName)) { var q = odb.ValuesQuery<Man>().Field("specie"); os = q.Execute(); Println(os); } AssertEquals(1, os.Count); DeleteBase(DbName); } [Test] public void Test6() { DeleteBase(DbName); using (var odb = Open(DbName)) { odb.Store(new Animal("dog", "M", "my dog")); odb.Store(new Animal("cat", "F", "my cat")); odb.Store(new Man("Joe")); odb.Store(new Woman("Karine")); } Decimal nb; using (var odb = Open(DbName)) { var q = odb.Query<object>(); nb = q.Count(); Println(nb); } AssertEquals(new Decimal(4), nb); DeleteBase(DbName); } [Test] public void Test7() { const int size = 3000; var baseName = GetBaseName(); using (var odb = Open(baseName)) { for (var i = 0; i < size; i++) { odb.Store(new Animal("dog", "M", "my dog")); odb.Store(new Animal("cat", "F", "my cat")); odb.Store(new Man("Joe" + i)); odb.Store(new Woman("Karine" + i)); } } Decimal nb; using (var odb = Open(baseName)) { var q = odb.Query<object>(); nb = q.Count(); Println(nb); } AssertEquals(new Decimal(4 * size), nb); DeleteBase(baseName); } [Test] public void Test8() { const int size = 3000; var baseName = GetBaseName(); using (var odb = Open(baseName)) { for (var i = 0; i < size; i++) { odb.Store(new Animal("dog" + i, "M", "my dog" + i)); odb.Store(new Animal("cat" + i, "F", "my cat" + i)); odb.Store(new Man("Joe" + i)); odb.Store(new Woman("Karine" + i)); } } Decimal nb; using (var odb = Open(baseName)) { var q = odb.Query<object>(); q.Descend("specie").Constrain("man").Equal(); nb = q.Count(); Println(nb); } AssertEquals(new Decimal(1 * size), nb); DeleteBase(baseName); } } }
WaltChen/NDatabase
tests/NDatabase.Old.UnitTests/NDatabase/Odb/Test/Query/Criteria/TestPolyMorphic.cs
C#
apache-2.0
6,267
package org.myrobotlab.framework; import static org.myrobotlab.framework.StatusLevel.DEBUG; import static org.myrobotlab.framework.StatusLevel.ERROR; import static org.myrobotlab.framework.StatusLevel.INFO; import static org.myrobotlab.framework.StatusLevel.SUCCESS; import static org.myrobotlab.framework.StatusLevel.WARN; import java.io.IOException; import java.io.PrintWriter; import java.io.Serializable; import java.io.StringWriter; import java.util.Objects; import org.myrobotlab.codec.CodecUtils; import org.myrobotlab.logging.Level; import org.myrobotlab.logging.LoggerFactory; import org.myrobotlab.logging.LoggingFactory; import org.slf4j.Logger; /** * Goal is to have a very simple Pojo with only a few (native Java helper * methods) WARNING !!! - this class used to extend Exception or Throwable - but * the gson serializer would stack overflow with self reference issue * * TODO - allow radix tree searches for "keys" ??? * */ public class Status implements Serializable {// extends Exception { private static final long serialVersionUID = 1L; public final static Logger log = LoggerFactory.getLogger(Status.class); public String name; // service name ??? /** * FIXME - should probably be an enum now that serialization mostly works now * with enums [debug|info|warn|error|success] - yes the last part is different * than "logging" but could still be a status... * */ public String level; /** * The key is the non changing part and good identifier of what went on... For * Exceptions I would recommend the Exception.class.getSimpleName() for the * key, whilst the "detail" is for "changing" detail. This becomes important * when Stati are aggregated - and humans are interested in "high" counts of * specific Status while the details are not important unless diagnosing one. * * Violating Servo limits is a good example - "key" can be "Outside servo * limits". The key can contain spaces and punctuation - the important part is * that it is STATIC. * * "details" contain dynamic specifics - for example: "key":"Outside servo * limits", "detail":"servo01 moveTo(75) limit is greater than 100" */ public String key; /** * Dynamic of verbose explanation of the status. e.g. "detail":"servo01 * moveTo(75) limit is greater than 100" or complete stack trace from an * exception */ public String detail; /** * optional source of status */ public Object source; // --- static creation of typed Status objects ---- public static Status debug(String format, Object... args) { Status status = new Status(String.format(format, args)); status.level = DEBUG; return status; } public static Status error(Exception e) { Status s = new Status(e); s.level = ERROR; return s; } public static Status error(String msg) { Status s = new Status(msg); s.level = ERROR; return s; } public static Status error(String format, Object... args) { Status status = new Status(String.format(format, args)); status.level = ERROR; return status; } public static Status warn(String msg) { Status s = new Status(msg); s.level = ERROR; return s; } public static Status warn(String format, Object... args) { Status status = new Status(String.format(format, args)); status.level = WARN; return status; } public static Status info(String msg) { Status s = new Status(msg); s.level = INFO; return s; } public static Status info(String format, Object... args) { String formattedInfo = String.format(format, args); Status status = new Status(formattedInfo); status.level = INFO; return status; } public final static String stackToString(final Throwable e) { StringWriter sw; try { sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); e.printStackTrace(pw); } catch (Exception e2) { return "bad stackToString"; } return "------\r\n" + sw.toString() + "------\r\n"; } public Status(Exception e) { this.level = ERROR; StringWriter sw; try { sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); e.printStackTrace(pw); detail = sw.toString(); } catch (Exception e2) { } this.key = String.format("%s - %s", e.getClass().getSimpleName(), e.getMessage()); } public Status(Status s) { if (s == null) { return; } this.name = s.name; this.level = s.level; this.key = s.key; this.detail = s.detail; } /** * for minimal amount of information error is assumed, and info is detail of * an ERROR * * @param detail * d */ public Status(String detail) { this.level = ERROR; this.detail = detail; } public Status(String name, String level, String key, String detail) { this.name = name; this.level = level; this.key = key; this.detail = detail; } public boolean isDebug() { return DEBUG.equals(level); } public boolean isError() { return ERROR.equals(level); } public boolean isInfo() { return INFO.equals(level); } public boolean isWarn() { return WARN.equals(level); } @Override public String toString() { StringBuffer sb = new StringBuffer(); if (name != null) { sb.append(name); sb.append(" "); } if (level != null) { sb.append(level); sb.append(" "); } if (key != null) { sb.append(key); sb.append(" "); } if (detail != null) { sb.append(detail); } return sb.toString(); } static public final Status newInstance(String name, String level, String key, String detail) { Status s = new Status(name, level, key, detail); return s; } @Override public boolean equals(Object o) { if (o == this) return true; if (!(o instanceof Status)) { return false; } Status status = (Status) o; return Objects.equals(name, status.name) && Objects.equals(level, status.level) && Objects.equals(key, status.key) && Objects.equals(detail, status.detail); } @Override public int hashCode() { return Objects.hash(name, level, key, detail); } public static void main(String[] args) throws IOException, InterruptedException { LoggingFactory.init(Level.INFO); Status test = new Status("i am pessimistic"); // Status subTest = new Status("i am sub pessimistic"); // test.add(subTest); String json = CodecUtils.toJson(test); Status z = CodecUtils.fromJson(json, Status.class); log.info(json); log.info(z.toString()); } public static Status success() { Status s = new Status(SUCCESS); s.level = SUCCESS; return s; } public boolean isSuccess() { return SUCCESS.equals(level); } public static Status success(String detail) { Status s = new Status(SUCCESS); s.level = SUCCESS; s.detail = detail; return s; } }
MyRobotLab/myrobotlab
src/main/java/org/myrobotlab/framework/Status.java
Java
apache-2.0
7,254
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Threading; using Microsoft.CodeAnalysis.CSharp.Extensions; using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.Shared.Collections; using Microsoft.CodeAnalysis.Structure; using Microsoft.CodeAnalysis.Text; namespace Microsoft.CodeAnalysis.CSharp.Structure { internal class RegionDirectiveStructureProvider : AbstractSyntaxNodeStructureProvider<RegionDirectiveTriviaSyntax> { private static string GetBannerText(DirectiveTriviaSyntax simpleDirective) { var kw = simpleDirective.DirectiveNameToken; var prefixLength = kw.Span.End - simpleDirective.Span.Start; var text = simpleDirective.ToString().Substring(prefixLength).Trim(); if (text.Length == 0) { return simpleDirective.HashToken.ToString() + kw.ToString(); } else { return text; } } protected override void CollectBlockSpans( SyntaxToken previousToken, RegionDirectiveTriviaSyntax regionDirective, ref TemporaryArray<BlockSpan> spans, BlockStructureOptionProvider optionProvider, CancellationToken cancellationToken) { var match = regionDirective.GetMatchingDirective(cancellationToken); if (match != null) { // Always auto-collapse regions for Metadata As Source. These generated files only have one region at // the top of the file, which has content like the following: // // #region Assembly System.Runtime, Version=4.2.2.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a // // C:\Program Files\dotnet\packs\Microsoft.NETCore.App.Ref\3.1.0\ref\netcoreapp3.1\System.Runtime.dll // #endregion // // For other files, auto-collapse regions based on the user option. var autoCollapse = optionProvider.IsMetadataAsSource || optionProvider.GetOption( BlockStructureOptions.CollapseRegionsWhenCollapsingToDefinitions, LanguageNames.CSharp); spans.Add(new BlockSpan( isCollapsible: true, textSpan: TextSpan.FromBounds(regionDirective.SpanStart, match.Span.End), type: BlockTypes.PreprocessorRegion, bannerText: GetBannerText(regionDirective), autoCollapse: autoCollapse, isDefaultCollapsed: !optionProvider.IsMetadataAsSource)); } } } }
eriawan/roslyn
src/Features/CSharp/Portable/Structure/Providers/RegionDirectiveStructureProvider.cs
C#
apache-2.0
2,849
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Type resolution: the phase that finds all the types in the AST with // unresolved type variables and replaces "ty_var" types with their // substitutions. use self::ResolveReason::*; use astconv::AstConv; use check::FnCtxt; use middle::def; use middle::pat_util; use middle::ty::{mod, Ty, MethodCall, MethodCallee}; use middle::ty_fold::{TypeFolder,TypeFoldable}; use middle::infer::{force_all, resolve_all, resolve_region}; use middle::infer::resolve_type; use middle::infer; use write_substs_to_tcx; use write_ty_to_tcx; use util::ppaux::Repr; use std::cell::Cell; use syntax::ast; use syntax::codemap::{DUMMY_SP, Span}; use syntax::print::pprust::pat_to_string; use syntax::visit; use syntax::visit::Visitor; /////////////////////////////////////////////////////////////////////////// // Entry point functions pub fn resolve_type_vars_in_expr(fcx: &FnCtxt, e: &ast::Expr) { assert_eq!(fcx.writeback_errors.get(), false); let mut wbcx = WritebackCx::new(fcx); wbcx.visit_expr(e); wbcx.visit_upvar_borrow_map(); wbcx.visit_unboxed_closures(); wbcx.visit_object_cast_map(); } pub fn resolve_type_vars_in_fn(fcx: &FnCtxt, decl: &ast::FnDecl, blk: &ast::Block) { assert_eq!(fcx.writeback_errors.get(), false); let mut wbcx = WritebackCx::new(fcx); wbcx.visit_block(blk); for arg in decl.inputs.iter() { wbcx.visit_node_id(ResolvingPattern(arg.pat.span), arg.id); wbcx.visit_pat(&*arg.pat); // Privacy needs the type for the whole pattern, not just each binding if !pat_util::pat_is_binding(&fcx.tcx().def_map, &*arg.pat) { wbcx.visit_node_id(ResolvingPattern(arg.pat.span), arg.pat.id); } } wbcx.visit_upvar_borrow_map(); wbcx.visit_unboxed_closures(); wbcx.visit_object_cast_map(); } /////////////////////////////////////////////////////////////////////////// // The Writerback context. This visitor walks the AST, checking the // fn-specific tables to find references to types or regions. It // resolves those regions to remove inference variables and writes the // final result back into the master tables in the tcx. Here and // there, it applies a few ad-hoc checks that were not convenient to // do elsewhere. struct WritebackCx<'cx, 'tcx: 'cx> { fcx: &'cx FnCtxt<'cx, 'tcx>, } impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { fn new(fcx: &'cx FnCtxt<'cx, 'tcx>) -> WritebackCx<'cx, 'tcx> { WritebackCx { fcx: fcx } } fn tcx(&self) -> &'cx ty::ctxt<'tcx> { self.fcx.tcx() } } /////////////////////////////////////////////////////////////////////////// // Impl of Visitor for Resolver // // This is the master code which walks the AST. It delegates most of // the heavy lifting to the generic visit and resolve functions // below. In general, a function is made into a `visitor` if it must // traffic in node-ids or update tables in the type context etc. impl<'cx, 'tcx, 'v> Visitor<'v> for WritebackCx<'cx, 'tcx> { fn visit_item(&mut self, _: &ast::Item) { // Ignore items } fn visit_stmt(&mut self, s: &ast::Stmt) { if self.fcx.writeback_errors.get() { return; } self.visit_node_id(ResolvingExpr(s.span), ty::stmt_node_id(s)); visit::walk_stmt(self, s); } fn visit_expr(&mut self, e: &ast::Expr) { if self.fcx.writeback_errors.get() { return; } self.visit_node_id(ResolvingExpr(e.span), e.id); self.visit_method_map_entry(ResolvingExpr(e.span), MethodCall::expr(e.id)); match e.node { ast::ExprClosure(_, _, ref decl, _) | ast::ExprProc(ref decl, _) => { for input in decl.inputs.iter() { let _ = self.visit_node_id(ResolvingExpr(e.span), input.id); } } _ => {} } visit::walk_expr(self, e); } fn visit_block(&mut self, b: &ast::Block) { if self.fcx.writeback_errors.get() { return; } self.visit_node_id(ResolvingExpr(b.span), b.id); visit::walk_block(self, b); } fn visit_pat(&mut self, p: &ast::Pat) { if self.fcx.writeback_errors.get() { return; } self.visit_node_id(ResolvingPattern(p.span), p.id); debug!("Type for pattern binding {} (id {}) resolved to {}", pat_to_string(p), p.id, ty::node_id_to_type(self.tcx(), p.id).repr(self.tcx())); visit::walk_pat(self, p); } fn visit_local(&mut self, l: &ast::Local) { if self.fcx.writeback_errors.get() { return; } let var_ty = self.fcx.local_ty(l.span, l.id); let var_ty = self.resolve(&var_ty, ResolvingLocal(l.span)); write_ty_to_tcx(self.tcx(), l.id, var_ty); visit::walk_local(self, l); } fn visit_ty(&mut self, t: &ast::Ty) { match t.node { ast::TyFixedLengthVec(ref ty, ref count_expr) => { self.visit_ty(&**ty); write_ty_to_tcx(self.tcx(), count_expr.id, ty::mk_uint()); } _ => visit::walk_ty(self, t) } } } impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { fn visit_upvar_borrow_map(&self) { if self.fcx.writeback_errors.get() { return; } for (upvar_id, upvar_borrow) in self.fcx.inh.upvar_borrow_map.borrow().iter() { let r = upvar_borrow.region; let r = self.resolve(&r, ResolvingUpvar(*upvar_id)); let new_upvar_borrow = ty::UpvarBorrow { kind: upvar_borrow.kind, region: r }; debug!("Upvar borrow for {} resolved to {}", upvar_id.repr(self.tcx()), new_upvar_borrow.repr(self.tcx())); self.fcx.tcx().upvar_borrow_map.borrow_mut().insert( *upvar_id, new_upvar_borrow); } } fn visit_unboxed_closures(&self) { if self.fcx.writeback_errors.get() { return } for (def_id, unboxed_closure) in self.fcx .inh .unboxed_closures .borrow() .iter() { let closure_ty = self.resolve(&unboxed_closure.closure_type, ResolvingUnboxedClosure(*def_id)); let unboxed_closure = ty::UnboxedClosure { closure_type: closure_ty, kind: unboxed_closure.kind, }; self.fcx .tcx() .unboxed_closures .borrow_mut() .insert(*def_id, unboxed_closure); } } fn visit_object_cast_map(&self) { if self.fcx.writeback_errors.get() { return } for (&node_id, trait_ref) in self.fcx .inh .object_cast_map .borrow() .iter() { let span = ty::expr_span(self.tcx(), node_id); let reason = ResolvingExpr(span); let closure_ty = self.resolve(trait_ref, reason); self.tcx() .object_cast_map .borrow_mut() .insert(node_id, closure_ty); } } fn visit_node_id(&self, reason: ResolveReason, id: ast::NodeId) { // Resolve any borrowings for the node with id `id` self.visit_adjustments(reason, id); // Resolve the type of the node with id `id` let n_ty = self.fcx.node_ty(id); let n_ty = self.resolve(&n_ty, reason); write_ty_to_tcx(self.tcx(), id, n_ty); debug!("Node {} has type {}", id, n_ty.repr(self.tcx())); // Resolve any substitutions self.fcx.opt_node_ty_substs(id, |item_substs| { write_substs_to_tcx(self.tcx(), id, self.resolve(item_substs, reason)); }); } fn visit_adjustments(&self, reason: ResolveReason, id: ast::NodeId) { match self.fcx.inh.adjustments.borrow_mut().remove(&id) { None => { debug!("No adjustments for node {}", id); } Some(adjustment) => { let adj_object = ty::adjust_is_object(&adjustment); let resolved_adjustment = match adjustment { ty::AdjustAddEnv(store) => { // FIXME(eddyb) #2190 Allow only statically resolved // bare functions to coerce to a closure to avoid // constructing (slower) indirect call wrappers. match self.tcx().def_map.borrow().get(&id) { Some(&def::DefFn(..)) | Some(&def::DefStaticMethod(..)) | Some(&def::DefVariant(..)) | Some(&def::DefStruct(_)) => { } _ => { span_err!(self.tcx().sess, reason.span(self.tcx()), E0100, "cannot coerce non-statically resolved bare fn to closure"); span_help!(self.tcx().sess, reason.span(self.tcx()), "consider embedding the function in a closure"); } } ty::AdjustAddEnv(self.resolve(&store, reason)) } ty::AdjustDerefRef(adj) => { for autoderef in range(0, adj.autoderefs) { let method_call = MethodCall::autoderef(id, autoderef); self.visit_method_map_entry(reason, method_call); } if adj_object { let method_call = MethodCall::autoobject(id); self.visit_method_map_entry(reason, method_call); } ty::AdjustDerefRef(ty::AutoDerefRef { autoderefs: adj.autoderefs, autoref: self.resolve(&adj.autoref, reason), }) } }; debug!("Adjustments for node {}: {}", id, resolved_adjustment); self.tcx().adjustments.borrow_mut().insert( id, resolved_adjustment); } } } fn visit_method_map_entry(&self, reason: ResolveReason, method_call: MethodCall) { // Resolve any method map entry match self.fcx.inh.method_map.borrow_mut().remove(&method_call) { Some(method) => { debug!("writeback::resolve_method_map_entry(call={}, entry={})", method_call, method.repr(self.tcx())); let new_method = MethodCallee { origin: self.resolve(&method.origin, reason), ty: self.resolve(&method.ty, reason), substs: self.resolve(&method.substs, reason), }; self.tcx().method_map.borrow_mut().insert( method_call, new_method); } None => {} } } fn resolve<T:ResolveIn<'tcx>>(&self, t: &T, reason: ResolveReason) -> T { t.resolve_in(&mut Resolver::new(self.fcx, reason)) } } /////////////////////////////////////////////////////////////////////////// // Resolution reason. enum ResolveReason { ResolvingExpr(Span), ResolvingLocal(Span), ResolvingPattern(Span), ResolvingUpvar(ty::UpvarId), ResolvingUnboxedClosure(ast::DefId), } impl Copy for ResolveReason {} impl ResolveReason { fn span(&self, tcx: &ty::ctxt) -> Span { match *self { ResolvingExpr(s) => s, ResolvingLocal(s) => s, ResolvingPattern(s) => s, ResolvingUpvar(upvar_id) => { ty::expr_span(tcx, upvar_id.closure_expr_id) } ResolvingUnboxedClosure(did) => { if did.krate == ast::LOCAL_CRATE { ty::expr_span(tcx, did.node) } else { DUMMY_SP } } } } } /////////////////////////////////////////////////////////////////////////// // Convenience methods for resolving different kinds of things. trait ResolveIn<'tcx> { fn resolve_in<'a>(&self, resolver: &mut Resolver<'a, 'tcx>) -> Self; } impl<'tcx, T: TypeFoldable<'tcx>> ResolveIn<'tcx> for T { fn resolve_in<'a>(&self, resolver: &mut Resolver<'a, 'tcx>) -> T { self.fold_with(resolver) } } /////////////////////////////////////////////////////////////////////////// // The Resolver. This is the type folding engine that detects // unresolved types and so forth. struct Resolver<'cx, 'tcx: 'cx> { tcx: &'cx ty::ctxt<'tcx>, infcx: &'cx infer::InferCtxt<'cx, 'tcx>, writeback_errors: &'cx Cell<bool>, reason: ResolveReason, } impl<'cx, 'tcx> Resolver<'cx, 'tcx> { fn new(fcx: &'cx FnCtxt<'cx, 'tcx>, reason: ResolveReason) -> Resolver<'cx, 'tcx> { Resolver::from_infcx(fcx.infcx(), &fcx.writeback_errors, reason) } fn from_infcx(infcx: &'cx infer::InferCtxt<'cx, 'tcx>, writeback_errors: &'cx Cell<bool>, reason: ResolveReason) -> Resolver<'cx, 'tcx> { Resolver { infcx: infcx, tcx: infcx.tcx, writeback_errors: writeback_errors, reason: reason } } fn report_error(&self, e: infer::fixup_err) { self.writeback_errors.set(true); if !self.tcx.sess.has_errors() { match self.reason { ResolvingExpr(span) => { span_err!(self.tcx.sess, span, E0101, "cannot determine a type for this expression: {}", infer::fixup_err_to_string(e)); } ResolvingLocal(span) => { span_err!(self.tcx.sess, span, E0102, "cannot determine a type for this local variable: {}", infer::fixup_err_to_string(e)); } ResolvingPattern(span) => { span_err!(self.tcx.sess, span, E0103, "cannot determine a type for this pattern binding: {}", infer::fixup_err_to_string(e)); } ResolvingUpvar(upvar_id) => { let span = self.reason.span(self.tcx); span_err!(self.tcx.sess, span, E0104, "cannot resolve lifetime for captured variable `{}`: {}", ty::local_var_name_str(self.tcx, upvar_id.var_id).get().to_string(), infer::fixup_err_to_string(e)); } ResolvingUnboxedClosure(_) => { let span = self.reason.span(self.tcx); self.tcx.sess.span_err(span, "cannot determine a type for this \ unboxed closure") } } } } } impl<'cx, 'tcx> TypeFolder<'tcx> for Resolver<'cx, 'tcx> { fn tcx<'a>(&'a self) -> &'a ty::ctxt<'tcx> { self.tcx } fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> { if !ty::type_needs_infer(t) { return t; } match resolve_type(self.infcx, None, t, resolve_all | force_all) { Ok(t) => t, Err(e) => { self.report_error(e); ty::mk_err() } } } fn fold_region(&mut self, r: ty::Region) -> ty::Region { match resolve_region(self.infcx, r, resolve_all | force_all) { Ok(r) => r, Err(e) => { self.report_error(e); ty::ReStatic } } } } /////////////////////////////////////////////////////////////////////////// // During type check, we store promises with the result of trait // lookup rather than the actual results (because the results are not // necessarily available immediately). These routines unwind the // promises. It is expected that we will have already reported any // errors that may be encountered, so if the promises store an error, // a dummy result is returned.
emk/rust
src/librustc_typeck/check/writeback.rs
Rust
apache-2.0
17,553
# Brevichara H. Horn af Rantzien, 1956 GENUS #### Status ACCEPTED #### According to Interim Register of Marine and Nonmarine Genera #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Chlorophyta/Charophyceae/Charales/Characeae/Brevichara/README.md
Markdown
apache-2.0
200
# Schismus patens J.Presl SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Liliopsida/Poales/Poaceae/Leptochloa/Leptochloa dubia/ Syn. Schismus patens/README.md
Markdown
apache-2.0
180
# Javorkaea hondurensis (Donn.Sm.) Borhidi & Komlódi SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Gentianales/Rubiaceae/Arachnothryx/Arachnothryx hondurensis/ Syn. Javorkaea hondurensis/README.md
Markdown
apache-2.0
208
# Kentrosphaera A. Borzì, 1883 GENUS #### Status ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Chlorophyta/Chlorophyceae/Chlorococcales/Endosphaeraceae/Kentrosphaera/README.md
Markdown
apache-2.0
185
# Borreria lagurus S.Moore SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Gentianales/Rubiaceae/Spermacoce/Spermacoce lagurus/ Syn. Borreria lagurus/README.md
Markdown
apache-2.0
181
# Mimosa callidryas Barneby SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Fabales/Fabaceae/Mimosa/Mimosa callidryas/README.md
Markdown
apache-2.0
175
# Ligularia fangiana Hand.-Mazz. SPECIES #### Status ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Asteraceae/Ligularia fangiana/README.md
Markdown
apache-2.0
180
# Couroupita cutteri C.V.Morton & Skutch SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Ericales/Lecythidaceae/Couroupita/Couroupita nicaraguarensis/ Syn. Couroupita cutteri/README.md
Markdown
apache-2.0
195
# Viola robusta Hillebr. SPECIES #### Status SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in F. Hawaiian Isl. 16. 1888 #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Malpighiales/Violaceae/Viola/Viola chamissoniana/ Syn. Viola robusta/README.md
Markdown
apache-2.0
200
'use strict'; module.exports = function (grunt) { grunt.config( 'a11y', { live: { options: { urls: ['www.google.com'] } } }); };
spolnik/javascript-workspace
grunt/TodoApp/tasks/a11y.js
JavaScript
apache-2.0
166
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tez.dag.api; import java.io.IOException; import java.nio.ByteBuffer; import org.apache.tez.common.TezCommonUtils; import org.apache.tez.dag.api.records.DAGProtos.TezEntityDescriptorProto; import org.junit.Assert; import org.junit.Test; public class TestDagTypeConverters { @Test(timeout = 5000) public void testTezEntityDescriptorSerialization() throws IOException { UserPayload payload = UserPayload.create(ByteBuffer.wrap(new String("Foobar").getBytes()), 100); String historytext = "Bar123"; EntityDescriptor entityDescriptor = InputDescriptor.create("inputClazz").setUserPayload(payload) .setHistoryText(historytext); TezEntityDescriptorProto proto = DagTypeConverters.convertToDAGPlan(entityDescriptor); Assert.assertEquals(payload.getVersion(), proto.getTezUserPayload().getVersion()); Assert.assertArrayEquals(payload.deepCopyAsArray(), proto.getTezUserPayload().getUserPayload().toByteArray()); Assert.assertTrue(proto.hasHistoryText()); Assert.assertNotEquals(historytext, proto.getHistoryText()); Assert.assertEquals(historytext, new String( TezCommonUtils.decompressByteStringToByteArray(proto.getHistoryText()))); // Ensure that the history text is not deserialized InputDescriptor inputDescriptor = DagTypeConverters.convertInputDescriptorFromDAGPlan(proto); Assert.assertNull(inputDescriptor.getHistoryText()); } }
Altiscale/tez
tez-api/src/test/java/org/apache/tez/dag/api/TestDagTypeConverters.java
Java
apache-2.0
2,256
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.jetty; import java.util.Map; import org.apache.camel.Exchange; import org.apache.camel.Processor; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.impl.JndiRegistry; import org.junit.Test; /** * @version */ public class HttpFilterCamelHeadersTest extends BaseJettyTest { @Test public void testFilterCamelHeaders() throws Exception { Exchange out = template.send("http://localhost:{{port}}/test/filter", new Processor() { public void process(Exchange exchange) throws Exception { exchange.getIn().setBody("Claus"); exchange.getIn().setHeader("bar", 123); } }); assertNotNull(out); assertEquals("Hi Claus", out.getOut().getBody(String.class)); // there should be no internal Camel headers // except for the response code Map<String, Object> headers = out.getOut().getHeaders(); for (String key : headers.keySet()) { if (!key.equalsIgnoreCase(Exchange.HTTP_RESPONSE_CODE)) { assertTrue("Should not contain any Camel internal headers", !key.toLowerCase().startsWith("camel")); } else { assertEquals(200, headers.get(Exchange.HTTP_RESPONSE_CODE)); } } } @Override protected JndiRegistry createRegistry() throws Exception { JndiRegistry jndi = super.createRegistry(); jndi.bind("foo", new MyFooBean()); return jndi; } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { from("jetty:http://localhost:{{port}}/test/filter").beanRef("foo"); } }; } public static class MyFooBean { public String hello(String name) { return "Hi " + name; } } }
everttigchelaar/camel-svn
components/camel-jetty/src/test/java/org/apache/camel/component/jetty/HttpFilterCamelHeadersTest.java
Java
apache-2.0
2,847
package org.lightadmin.core.view.preparer; import org.apache.tiles.AttributeContext; import org.apache.tiles.context.TilesRequestContext; import org.lightadmin.core.config.domain.DomainTypeAdministrationConfiguration; public class FormViewPreparer extends ConfigurationAwareViewPreparer { @Override protected void execute(final TilesRequestContext tilesContext, final AttributeContext attributeContext, final DomainTypeAdministrationConfiguration configuration) { super.execute(tilesContext, attributeContext, configuration); addAttribute(attributeContext, "fields", configuration.getFormViewFragment().getFields()); } }
pramoth/light-admin
lightadmin-core/src/main/java/org/lightadmin/core/view/preparer/FormViewPreparer.java
Java
apache-2.0
652
""" Support for EBox. Get data from 'My Usage Page' page: https://client.ebox.ca/myusage For more details about this platform, please refer to the documentation at https://home-assistant.io/components/sensor.ebox/ """ import logging from datetime import timedelta import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( CONF_USERNAME, CONF_PASSWORD, CONF_NAME, CONF_MONITORED_VARIABLES, ) from homeassistant.helpers.entity import Entity from homeassistant.util import Throttle from homeassistant.exceptions import PlatformNotReady _LOGGER = logging.getLogger(__name__) GIGABITS = "Gb" PRICE = "CAD" DAYS = "days" PERCENT = "%" DEFAULT_NAME = "EBox" REQUESTS_TIMEOUT = 15 SCAN_INTERVAL = timedelta(minutes=15) MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=15) SENSOR_TYPES = { "usage": ["Usage", PERCENT, "mdi:percent"], "balance": ["Balance", PRICE, "mdi:square-inc-cash"], "limit": ["Data limit", GIGABITS, "mdi:download"], "days_left": ["Days left", DAYS, "mdi:calendar-today"], "before_offpeak_download": ["Download before offpeak", GIGABITS, "mdi:download"], "before_offpeak_upload": ["Upload before offpeak", GIGABITS, "mdi:upload"], "before_offpeak_total": ["Total before offpeak", GIGABITS, "mdi:download"], "offpeak_download": ["Offpeak download", GIGABITS, "mdi:download"], "offpeak_upload": ["Offpeak Upload", GIGABITS, "mdi:upload"], "offpeak_total": ["Offpeak Total", GIGABITS, "mdi:download"], "download": ["Download", GIGABITS, "mdi:download"], "upload": ["Upload", GIGABITS, "mdi:upload"], "total": ["Total", GIGABITS, "mdi:download"], } PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_MONITORED_VARIABLES): vol.All( cv.ensure_list, [vol.In(SENSOR_TYPES)] ), vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): """Set up the EBox sensor.""" username = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) httpsession = hass.helpers.aiohttp_client.async_get_clientsession() ebox_data = EBoxData(username, password, httpsession) name = config.get(CONF_NAME) from pyebox.client import PyEboxError try: await ebox_data.async_update() except PyEboxError as exp: _LOGGER.error("Failed login: %s", exp) raise PlatformNotReady sensors = [] for variable in config[CONF_MONITORED_VARIABLES]: sensors.append(EBoxSensor(ebox_data, variable, name)) async_add_entities(sensors, True) class EBoxSensor(Entity): """Implementation of a EBox sensor.""" def __init__(self, ebox_data, sensor_type, name): """Initialize the sensor.""" self.client_name = name self.type = sensor_type self._name = SENSOR_TYPES[sensor_type][0] self._unit_of_measurement = SENSOR_TYPES[sensor_type][1] self._icon = SENSOR_TYPES[sensor_type][2] self.ebox_data = ebox_data self._state = None @property def name(self): """Return the name of the sensor.""" return f"{self.client_name} {self._name}" @property def state(self): """Return the state of the sensor.""" return self._state @property def unit_of_measurement(self): """Return the unit of measurement of this entity, if any.""" return self._unit_of_measurement @property def icon(self): """Icon to use in the frontend, if any.""" return self._icon async def async_update(self): """Get the latest data from EBox and update the state.""" await self.ebox_data.async_update() if self.type in self.ebox_data.data: self._state = round(self.ebox_data.data[self.type], 2) class EBoxData: """Get data from Ebox.""" def __init__(self, username, password, httpsession): """Initialize the data object.""" from pyebox import EboxClient self.client = EboxClient(username, password, REQUESTS_TIMEOUT, httpsession) self.data = {} @Throttle(MIN_TIME_BETWEEN_UPDATES) async def async_update(self): """Get the latest data from Ebox.""" from pyebox.client import PyEboxError try: await self.client.fetch_data() except PyEboxError as exp: _LOGGER.error("Error on receive last EBox data: %s", exp) return # Update data self.data = self.client.get_data()
Cinntax/home-assistant
homeassistant/components/ebox/sensor.py
Python
apache-2.0
4,756
# Eriosyce aspillagae subsp. aspillagae SUBSPECIES #### Status ACCEPTED #### According to NUB Generator [autonym] #### Published in null #### Original name null ### Remarks null
mdoering/backbone
life/Plantae/Magnoliophyta/Magnoliopsida/Caryophyllales/Cactaceae/Eriosyce/Eriosyce aspillagae/Eriosyce aspillagae aspillagae/README.md
Markdown
apache-2.0
182
/* * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * The Apereo Foundation licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. * */ package org.unitime.timetable.solver.exam.ui; import java.io.PrintWriter; import java.io.Serializable; import java.util.Collection; import java.util.Collections; import java.util.Enumeration; import java.util.HashSet; import java.util.Hashtable; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.Vector; import javax.servlet.jsp.JspWriter; import org.cpsolver.exam.model.Exam; import org.cpsolver.exam.model.ExamDistributionConstraint; import org.cpsolver.exam.model.ExamInstructor; import org.cpsolver.exam.model.ExamPlacement; import org.cpsolver.exam.model.ExamRoom; import org.cpsolver.exam.model.ExamRoomPlacement; import org.cpsolver.exam.model.ExamStudent; import org.cpsolver.ifs.extension.AssignedValue; import org.cpsolver.ifs.extension.ConflictStatistics; import org.cpsolver.ifs.model.Constraint; import org.dom4j.Element; import org.unitime.timetable.model.PreferenceLevel; import org.unitime.timetable.solver.ui.TimetableInfo; import org.unitime.timetable.webutil.timegrid.ExamGridTable; /** * @author Tomas Muller */ public class ExamConflictStatisticsInfo implements TimetableInfo, Serializable { private static final long serialVersionUID = 7L; public static int sVersion = 7; // to be able to do some changes in the future public static final int sConstraintTypeRoom = 1; public static final int sConstraintTypeInstructor = 2; public static final int sConstraintTypeGroup = 3; public static final int sConstraintTypeStudent = 4; private Hashtable iVariables = new Hashtable(); public Collection getCBS() { return iVariables.values(); } public CBSVariable getCBS(Long classId) { return (CBSVariable)iVariables.get(classId); } public void load(ConflictStatistics cbs) { load(cbs, null); } public ExamConflictStatisticsInfo getConflictStatisticsSubInfo(Vector variables) { ExamConflictStatisticsInfo ret = new ExamConflictStatisticsInfo(); for (Enumeration e=variables.elements();e.hasMoreElements();) { Exam exam = (Exam)e.nextElement(); CBSVariable var = (CBSVariable)iVariables.get(exam.getId()); if (var!=null) ret.iVariables.put(exam.getId(),var); } return ret; } public void merge(ExamConflictStatisticsInfo info) { if (info!=null) iVariables.putAll(info.iVariables); } public void load(ConflictStatistics cbs, Long examId) { iVariables.clear(); for (Iterator i1=cbs.getNoGoods().entrySet().iterator();i1.hasNext();) { Map.Entry entry = (Map.Entry)i1.next(); AssignedValue assignment = (AssignedValue)entry.getKey(); ExamPlacement placement = (ExamPlacement)assignment.getValue(); Exam exam = (Exam)placement.variable(); if (examId!=null && !examId.equals(exam.getId())) continue; CBSVariable var = (CBSVariable)iVariables.get(exam.getId()); if (var==null) { String pref = PreferenceLevel.sNeutral;//SolverGridModel.hardConflicts2pref(exam,null); var = new CBSVariable(exam.getId(),exam.getName(),pref); iVariables.put(exam.getId(),var); } Vector roomIds = new Vector(); Vector roomNames = new Vector(); Vector roomPrefs = new Vector(); for (Iterator i=new TreeSet(placement.getRoomPlacements()).iterator();i.hasNext();) { ExamRoomPlacement room = (ExamRoomPlacement)i.next(); roomIds.add(room.getId()); roomNames.add(room.getName()); roomPrefs.add(exam.getRoomPlacements().size()==placement.getRoomPlacements().size()?PreferenceLevel.sIntLevelRequired:room.getPenalty(placement.getPeriod())); } CBSValue val = new CBSValue(var, placement.getPeriod().getId(), placement.getPeriod().getDayStr()+" "+placement.getPeriod().getTimeStr(), (exam.getPeriodPlacements().size()==1?PreferenceLevel.sIntLevelRequired:placement.getPeriodPlacement().getPenalty()), roomIds, roomNames, roomPrefs); var.values().add(val); List noGoods = (List)entry.getValue(); Hashtable constr2assignments = new Hashtable(); for (Iterator e2=noGoods.iterator();e2.hasNext();) { AssignedValue noGood = (AssignedValue)e2.next(); if (noGood.getConstraint()==null) continue; Vector aaa = (Vector)constr2assignments.get(noGood.getConstraint()); if (aaa == null) { aaa = new Vector(); constr2assignments.put(noGood.getConstraint(), aaa); } aaa.addElement(noGood); } for (Iterator i2=constr2assignments.entrySet().iterator();i2.hasNext();) { Map.Entry entry2 = (Map.Entry)i2.next(); Constraint constraint = (Constraint)entry2.getKey(); Vector noGoodsThisConstraint = (Vector)entry2.getValue(); CBSConstraint con = null; if (constraint instanceof ExamRoom) { con = new CBSConstraint(val, sConstraintTypeRoom, constraint.getId(), constraint.getName(), PreferenceLevel.sRequired); } else if (constraint instanceof ExamInstructor) { con = new CBSConstraint(val, sConstraintTypeInstructor, constraint.getId(), constraint.getName(), PreferenceLevel.sRequired); } else if (constraint instanceof ExamStudent) { con = new CBSConstraint(val, sConstraintTypeStudent, constraint.getId(), constraint.getName(), PreferenceLevel.sRequired); } else if (constraint instanceof ExamDistributionConstraint) { con = new CBSConstraint(val, sConstraintTypeGroup, constraint.getId(), ((ExamDistributionConstraint)constraint).getTypeString(), (constraint.isHard()?PreferenceLevel.sRequired:PreferenceLevel.int2prolog(((ExamDistributionConstraint)constraint).getWeight()))); } else { con = new CBSConstraint(val, -1, constraint.getId(), constraint.getName(), PreferenceLevel.sRequired); } val.constraints().add(con); for (Enumeration e3=noGoodsThisConstraint.elements();e3.hasMoreElements();) { AssignedValue ass = (AssignedValue)e3.nextElement(); ExamPlacement p = (ExamPlacement)ass.getValue(); Exam x = (Exam)p.variable(); String pr = PreferenceLevel.sNeutral;//SolverGridModel.hardConflicts2pref(x,p); Vector aroomIds = new Vector(); Vector aroomNames = new Vector(); Vector aroomPrefs = new Vector(); for (Iterator i=new TreeSet(p.getRoomPlacements()).iterator();i.hasNext();) { ExamRoomPlacement room = (ExamRoomPlacement)i.next(); aroomIds.add(room.getId()); aroomNames.add(room.getName()); aroomPrefs.add(x.getRoomPlacements().size()==p.getRoomPlacements().size()?PreferenceLevel.sIntLevelRequired:room.getPenalty(p.getPeriod())); } CBSAssignment a = new CBSAssignment(con, x.getId(), x.getName(), pr, p.getPeriod().getId(), p.getPeriod().getDayStr()+" "+p.getPeriod().getTimeStr(), (x.getPeriodPlacements().size()==1?PreferenceLevel.sIntLevelRequired:p.getPeriodPlacement().getPenalty()), aroomIds, aroomNames, aroomPrefs); con.assignments().add(a); a.incCounter((int)ass.getCounter(0)); } } } } public void load(Element root) { int version = Integer.parseInt(root.attributeValue("version")); if (version==sVersion) { iVariables.clear(); for (Iterator i1=root.elementIterator("var");i1.hasNext();) { CBSVariable var = new CBSVariable((Element)i1.next()); iVariables.put(Long.valueOf(var.getId()),var); } } } public void save(Element root) { root.addAttribute("version", String.valueOf(sVersion)); for (Iterator i1=iVariables.values().iterator();i1.hasNext();) { ((CBSVariable)i1.next()).save(root.addElement("var")); } } public static interface Counter { public int getCounter(); public void incCounter(int value); } public static class CBSVariable implements Counter, Comparable, Serializable { private static final long serialVersionUID = 1L; int iCounter = 0; long iExamId; String iName; HashSet iValues = new HashSet(); CBSConstraint iConstraint = null; String iPref = null; CBSVariable(long examId, String name, String pref) { iExamId = examId; iName = name; iPref = pref; } CBSVariable(CBSConstraint constraint, long classId, String examId, String pref) { iConstraint = constraint; iExamId = classId; iName = examId; iPref = pref; } CBSVariable(Element element) { iExamId = Long.parseLong(element.attributeValue("exam")); iName = element.attributeValue("name"); iPref = element.attributeValue("pref"); for (Iterator i=element.elementIterator("val");i.hasNext();) iValues.add(new CBSValue(this,(Element)i.next())); } public long getId() { return iExamId; } public int getCounter() { return iCounter; } public String getName() { return iName; } public String getPref() { return iPref; } public void incCounter(int value) { iCounter+=value; if (iConstraint!=null) iConstraint.incCounter(value); } public Set values() { return iValues; } public int hashCode() { return (Long.valueOf(iExamId)).hashCode(); } public boolean equals(Object o) { if (o==null || !(o instanceof CBSVariable)) return false; return ((CBSVariable)o).getId()==getId(); } public int compareTo(Object o) { if (o==null || !(o instanceof CBSVariable)) return -1; int ret = -(Integer.valueOf(iCounter)).compareTo(Integer.valueOf(((CBSVariable)o).getCounter())); if (ret!=0) return ret; return toString().compareTo(o.toString()); } public String toString() { return iName; } public void save(Element element) { element.addAttribute("exam",String.valueOf(iExamId)); element.addAttribute("name", iName); if (iPref!=null) element.addAttribute("pref", iPref); for (Iterator i=iValues.iterator();i.hasNext();) ((CBSValue)i.next()).save(element.addElement("val")); } } public static class CBSValue implements Counter, Comparable, Serializable { private static final long serialVersionUID = 1L; int iCounter = 0; Long iPeriodId; String iPeriodName; int iPeriodPref; Vector iRoomIds; String iInstructorName = null; Vector iRoomNames; Vector iRoomPrefs; CBSVariable iVariable = null; HashSet iConstraints = new HashSet(); HashSet iAssignments = new HashSet(); int iLength; CBSValue(CBSVariable var, Long periodId, String periodName, int periodPref, Vector roomIds, Vector roomNames, Vector roomPrefs) { iVariable = var; iRoomIds = roomIds; iRoomNames = roomNames; iRoomPrefs = roomPrefs; iPeriodId = periodId; iPeriodName = periodName; iPeriodPref = periodPref; } CBSValue(CBSVariable var, Element element) { iVariable = var; iPeriodId = Long.valueOf(element.attributeValue("period")); iPeriodName = element.attributeValue("name"); iPeriodPref = Integer.parseInt(element.attributeValue("pref")); iRoomIds = new Vector(); iRoomNames = new Vector(); iRoomPrefs = new Vector(); for (Iterator i=element.elementIterator("room");i.hasNext();) { Element r = (Element)i.next(); iRoomIds.addElement(Integer.valueOf(r.attributeValue("id"))); iRoomNames.addElement(r.attributeValue("name")); iRoomPrefs.addElement(Integer.valueOf(r.attributeValue("pref"))); } for (Iterator i=element.elementIterator("cons");i.hasNext();) iConstraints.add(new CBSConstraint(this,(Element)i.next())); } public CBSVariable variable() { return iVariable; } public Long getPeriodId() { return iPeriodId; } public String getPeriodName() { return iPeriodName; } public int getPeriodPref() { return iPeriodPref; } public Vector getRoomNames() { return iRoomNames; } public Vector getRoomPrefs() { return iRoomPrefs; } public String toString() { return iPeriodName+" "+iRoomNames; } public int getCounter() { return iCounter; } public void incCounter(int value) { iCounter+=value; if (iVariable!=null) iVariable.incCounter(value); } public Vector getRoomIds() { return iRoomIds; } public Set constraints() { return iConstraints; } public Set assignments() { return iAssignments; } public int hashCode() { return combine(iPeriodId.hashCode(), (iRoomIds==null?0:iRoomIds.hashCode())); } public boolean equals(Object o) { if (o==null || !(o instanceof CBSValue)) return false; CBSValue v = (CBSValue)o; return v.getRoomIds().equals(getRoomIds()) && v.getPeriodId().equals(getPeriodId()); } public int compareTo(Object o) { if (o==null || !(o instanceof CBSValue)) return -1; int ret = -(Integer.valueOf(iCounter)).compareTo(Integer.valueOf(((CBSValue)o).getCounter())); if (ret!=0) return ret; return toString().compareTo(o.toString()); } public void save(Element element) { element.addAttribute("period",String.valueOf(iPeriodId)); element.addAttribute("pref",String.valueOf(iPeriodPref)); element.addAttribute("name", iPeriodName); for (int i=0;i<iRoomIds.size();i++) { Element r = element.addElement("room"); r.addAttribute("id",iRoomIds.elementAt(i).toString()); r.addAttribute("name",iRoomNames.elementAt(i).toString()); r.addAttribute("pref",iRoomPrefs.elementAt(i).toString()); } for (Iterator i=iConstraints.iterator();i.hasNext();) ((CBSConstraint)i.next()).save(element.addElement("cons")); } } public static class CBSConstraint implements Counter, Comparable, Serializable { private static final long serialVersionUID = 1L; CBSValue iValue; int iCounter = 0; long iId; String iName = null; int iType; HashSet iAssignments = new HashSet(); HashSet iVariables = new HashSet(); String iPref; CBSConstraint(int type, long id, String name, String pref) { iId = id; iType = type; iName = name; iPref = pref; } CBSConstraint(CBSValue value, int type, long id, String name, String pref) { iId = id; iType = type; iValue = value; iName = name; iPref = pref; } CBSConstraint(CBSValue value, Element element) { iValue = value; iId = Integer.parseInt(element.attributeValue("id")); iType = Integer.parseInt(element.attributeValue("type")); iName = element.attributeValue("name"); iPref = element.attributeValue("pref"); for (Iterator i=element.elementIterator("nogood");i.hasNext();) iAssignments.add(new CBSAssignment(this,(Element)i.next())); } public long getId() { return iId; } public int getType() { return iType; } public String getName() { return iName; } public CBSValue value() { return iValue; } public Set variables() { return iVariables; } public Set assignments() { return iAssignments; } public String getPref() { return iPref; } public int getCounter() { return iCounter; } public void incCounter(int value) { iCounter+=value; if (iValue!=null) iValue.incCounter(value); } public int hashCode() { return combine((int)iId,iType); } public boolean equals(Object o) { if (o==null || !(o instanceof CBSConstraint)) return false; CBSConstraint c = (CBSConstraint)o; return c.getId()==getId() && c.getType()==getType(); } public int compareTo(Object o) { if (o==null || !(o instanceof CBSConstraint)) return -1; int ret = -(Integer.valueOf(iCounter)).compareTo(Integer.valueOf(((CBSConstraint)o).getCounter())); if (ret!=0) return ret; return toString().compareTo(o.toString()); } public void save(Element element) { element.addAttribute("id",String.valueOf(iId)); element.addAttribute("type",String.valueOf(iType)); if (iName!=null) element.addAttribute("name", iName); if (iPref!=null) element.addAttribute("pref", iPref); for (Iterator i=iAssignments.iterator();i.hasNext();) ((CBSAssignment)i.next()).save(element.addElement("nogood")); } } public static class CBSAssignment implements Counter, Comparable, Serializable { private static final long serialVersionUID = 1L; CBSConstraint iConstraint; Long iExamId; String iExamName; String iExamPref; Long iPeriodId; String iPeriodName; int iPeriodPref; int iCounter = 0; Vector iRoomIds; Vector iRoomPrefs; Vector iRoomNames; CBSAssignment(CBSConstraint constraint, Long examId, String examName, String examPref, Long periodId, String periodName, int periodPref, Vector roomIds, Vector roomNames, Vector roomPrefs) { iExamId = examId; iExamName = examName; iExamPref = examPref; iPeriodId = periodId; iPeriodName = periodName; iPeriodPref = periodPref; iRoomIds = roomIds; iRoomNames = roomNames; iRoomPrefs = roomPrefs; iConstraint = constraint; } CBSAssignment(CBSConstraint constraint, Element element) { iConstraint = constraint; iExamId = Long.valueOf(element.attributeValue("exam")); iExamName = element.attributeValue("name"); iExamPref = element.attributeValue("pref"); iRoomIds = new Vector(); iRoomNames = new Vector(); iRoomPrefs = new Vector(); for (Iterator i=element.elementIterator("room");i.hasNext();) { Element r = (Element)i.next(); iRoomIds.addElement(Integer.valueOf(r.attributeValue("id"))); iRoomNames.addElement(r.attributeValue("name")); iRoomPrefs.addElement(Integer.valueOf(r.attributeValue("pref"))); } iPeriodId = Long.valueOf(element.attributeValue("period")); iPeriodName = element.attributeValue("periodName"); iPeriodPref = Integer.parseInt(element.attributeValue("periodPref")); incCounter(Integer.parseInt(element.attributeValue("cnt"))); } public Long getId() { return iExamId; } public CBSConstraint getConstraint() { return iConstraint; } public String getName() { return iExamName; } public String getPref() { return iExamPref; } public Long getPeriodId() { return iPeriodId; } public String getPeriodName() { return iPeriodName; } public int getPeriodPref() { return iPeriodPref; } public String toString() { return iExamName+" "+iPeriodName+" "+iRoomNames; } public Vector getRoomNames() { return iRoomNames; } public Vector getRoomIds() { return iRoomIds; } public Vector getRoomPrefs() { return iRoomPrefs; } public int hashCode() { return combine(iExamId.hashCode(),combine(iRoomIds.hashCode(),iPeriodId.hashCode())); } public int getCounter() { return iCounter; } public void incCounter(int value) { iCounter+=value; if (iConstraint!=null) iConstraint.incCounter(value); } public boolean equals(Object o) { if (o==null || !(o instanceof CBSAssignment)) return false; CBSAssignment a = (CBSAssignment)o; return a.getId().equals(getId()) && a.getRoomIds().equals(getRoomIds()) && a.getPeriodId().equals(getPeriodId()); } public int compareTo(Object o) { if (o==null || !(o instanceof CBSAssignment)) return -1; int ret = -(Integer.valueOf(iCounter)).compareTo(Integer.valueOf(((CBSAssignment)o).getCounter())); if (ret!=0) return ret; return toString().compareTo(o.toString()); } public void save(Element element) { element.addAttribute("exam",String.valueOf(iExamId)); element.addAttribute("name",iExamName); element.addAttribute("pref",iExamPref); for (int i=0;i<iRoomIds.size();i++) { Element r = element.addElement("room"); r.addAttribute("id",iRoomIds.elementAt(i).toString()); r.addAttribute("name",iRoomNames.elementAt(i).toString()); r.addAttribute("pref",iRoomPrefs.elementAt(i).toString()); } element.addAttribute("period", String.valueOf(iPeriodId)); element.addAttribute("periodName", iPeriodName); element.addAttribute("periodPref", String.valueOf(iPeriodPref)); element.addAttribute("cnt", String.valueOf(iCounter)); } } private static int combine(int a, int b) { int ret = 0; for (int i=0;i<15;i++) ret = ret | ((a & (1<<i))<<i) | ((b & (1<<i))<<(i+1)); return ret; } //--------- toHtml ------------------------------------------------- private static String IMG_BASE = "images/"; private static String IMG_EXPAND = IMG_BASE+"expand_node_btn.gif"; private static String IMG_COLLAPSE = IMG_BASE+"collapse_node_btn.gif"; private static String IMG_LEAF = IMG_BASE+"end_node_btn.gif"; public static int TYPE_VARIABLE_BASED = 0; public static int TYPE_CONSTRAINT_BASED = 1; private void menu_item(PrintWriter out, String id, String name, String description, String page, boolean isCollapsed) { out.println("<div style=\"margin-left:5px;\">"); out.println("<A style=\"border:0;background:0\" id=\"__idMenu"+id+"\" href=\"javascript:toggle('"+id+"')\" name=\""+name+"\">"); out.println("<img id=\"__idMenuImg"+id+"\" border=\"0\" src=\""+(isCollapsed ? IMG_EXPAND : IMG_COLLAPSE)+"\" align=\"absmiddle\"></A>"); out.println("&nbsp;<A class='noFancyLinks' target=\"__idContentFrame\" "+(page == null ? "" : page+" onmouseover=\"this.style.cursor='hand';this.style.cursor='pointer';\" ")+"title=\""+(description == null ? "" : description)+"\" >"+ name+(description == null?"":" <font color='gray'>[" + description + "]</font>")+"</A><br>"); out.println("</div>"); out.println("<div ID=\"__idMenuDiv"+id+"\" style=\"display:"+(isCollapsed ? "none" : "block")+";position:relative;margin-left:18px;\">"); } private void leaf_item(PrintWriter out, String name, String description, String page) { out.println("<div style=\"margin-left:5px;\">"); out.println("<img border=\"0\" src=\""+IMG_LEAF+"\" align=\"absmiddle\">"); out.println("&nbsp;<A class='noFancyLinks' target=\"__idContentFrame\" "+(page == null ? "" : page + " onmouseover=\"this.style.cursor='hand';this.style.cursor='pointer';\" ")+"title=\""+(description == null ? "" : description)+"\" >"+name+(description == null ? "" : " <font color='gray'>[" + description + "]</font>")+"</A><br>"); out.println("</div>"); } private void end_item(PrintWriter out) { out.println("</div>"); } private void unassignedVariableMenuItem(PrintWriter out, String menuId, CBSVariable variable, boolean clickable) { String name = "<font color='"+PreferenceLevel.prolog2color(variable.getPref())+"'>"+ variable.getName()+ "</font>"; String description = null; String onClick = null; if (clickable) onClick = "onclick=\"(parent ? parent : window).showGwtDialog('Examination Assignment', 'examInfo.do?examId="+variable.getId()+"&op=Reset','900','90%');\""; menu_item(out, menuId, variable.getCounter() + "&times; " + name, description, onClick, true); } private void unassignmentMenuItem(PrintWriter out, String menuId, CBSValue value, boolean clickable) { String name = "<font color='"+PreferenceLevel.int2color(value.getPeriodPref())+"'>"+ value.getPeriodName()+ "</font> "; String roomLink = ""; for (int i=0;i<value.getRoomIds().size();i++) { name += (i>0?", ":"")+"<font color='"+PreferenceLevel.int2color(((Integer)value.getRoomPrefs().elementAt(i)).intValue())+"'>"+ value.getRoomNames().elementAt(i)+"</font>"; roomLink += (i>0?":":"")+value.getRoomIds().elementAt(i); } String description = null; String onClick = null; if (clickable) onClick = "onclick=\"(parent ? parent : window).showGwtDialog('Examination Assignment', 'examInfo.do?examId="+value.variable().getId()+"&period="+value.getPeriodId()+"&room="+roomLink+"&op=Try&reset=1','900','90%');\""; menu_item(out, menuId, value.getCounter() + "&times; " + name, description, onClick, true); } private void constraintMenuItem(PrintWriter out, String menuId, CBSConstraint constraint, boolean clickable) { String name = "<font color='"+PreferenceLevel.prolog2color(constraint.getPref())+"'>"; String link = null; switch (constraint.getType()) { case sConstraintTypeGroup : name += "Distribution "+constraint.getName(); break; case sConstraintTypeInstructor : name += "Instructor "+constraint.getName(); if (clickable) link = "examGrid.do?filter="+constraint.getName()+"&resource="+ExamGridTable.sResourceInstructor+"&op=Cbs"; break; case sConstraintTypeRoom : name += "Room "+constraint.getName(); if (clickable) link = "examGrid.do?filter="+constraint.getName()+"&resource="+ExamGridTable.sResourceRoom+"&op=Cbs"; break; case sConstraintTypeStudent : name += "Student "+constraint.getName(); break; default : name += (constraint.getName()==null?"Unknown":constraint.getName()); } name += "</font>"; String description = null; String onClick = null; if (link!=null) onClick = "href=\""+link+"\""; menu_item(out, menuId, constraint.getCounter() + "&times; " + name, description, onClick, true); } private void assignmentLeafItem(PrintWriter out, CBSAssignment assignment, boolean clickable) { String name = "<font color='"+PreferenceLevel.prolog2color(assignment.getPref())+"'>"+ assignment.getName()+ "</font> &larr; "+ "<font color='"+PreferenceLevel.int2color(assignment.getPeriodPref())+"'>"+ assignment.getPeriodName()+ "</font> "; String roomLink = ""; for (int i=0;i<assignment.getRoomIds().size();i++) { name += (i>0?", ":"")+"<font color='"+PreferenceLevel.int2color(((Integer)assignment.getRoomPrefs().elementAt(i)).intValue())+"'>"+ assignment.getRoomNames().elementAt(i)+"</font>"; roomLink += (i>0?":":"")+assignment.getRoomIds().elementAt(i); } String onClick = null; if (clickable) onClick = "onclick=\"(parent ? parent : window).showGwtDialog('Examination Assignment', 'examInfo.do?examId="+assignment.getId()+"&period="+assignment.getPeriodId()+"&room="+roomLink+"&op=Try&reset=1','900','90%');\""; leaf_item(out, assignment.getCounter()+"&times; "+name, null, onClick); } public static void printHtmlHeader(JspWriter jsp) { PrintWriter out = new PrintWriter(jsp); printHtmlHeader(out, false); } public static void printHtmlHeader(PrintWriter out, boolean style) { if (style) { out.println("<style type=\"text/css\">"); out.println("<!--"); out.println("A:link { color: blue; text-decoration: none; border:0; background:0; }"); out.println("A:visited { color: blue; text-decoration: none; border:0; background:0; }"); out.println("A:active { color: blue; text-decoration: none; border:0; background:0; }"); out.println("A:hover { color: blue; text-decoration: none; border:0; background:0; }"); out.println(".TextBody { background-color: white; color:black; font-size: 12px; }"); out.println(".WelcomeHead { color: black; margin-top: 0px; margin-left: 0px; font-weight: bold; text-align: right; font-size: 30px; font-family: Comic Sans MS}"); out.println("-->"); out.println("</style>"); out.println(); } out.println("<script language=\"javascript\" type=\"text/javascript\">"); out.println("function toggle(item) {"); out.println(" obj=document.getElementById(\"__idMenuDiv\"+item);"); out.println(" visible=(obj.style.display!=\"none\");"); out.println(" img=document.getElementById(\"__idMenuImg\" + item);"); out.println(" menu=document.getElementById(\"__idMenu\" + item);"); out.println(" if (visible) {obj.style.display=\"none\";img.src=\""+IMG_EXPAND+"\";}"); out.println(" else {obj.style.display=\"block\";img.src=\""+IMG_COLLAPSE+"\";}"); out.println("}"); out.println("</script>"); out.flush(); } private Vector filter(Collection counters, double limit) { Vector cnt = new Vector(counters); Collections.sort(cnt); int total = 0; for (Enumeration e=cnt.elements();e.hasMoreElements();) total += ((Counter)e.nextElement()).getCounter(); int totalLimit = (int)Math.ceil(limit*total); int current = 0; Vector ret = new Vector(); for (Enumeration e=cnt.elements();e.hasMoreElements();) { Counter c = (Counter)e.nextElement(); ret.addElement(c); current += c.getCounter(); if (current>=totalLimit) break; } return ret; } /** Print conflict-based statistics in HTML format */ public void printHtml(JspWriter jsp, double limit, int type, boolean clickable) { printHtml(jsp, null, new double[] {limit,limit,limit,limit}, type, clickable); } /** Print conflict-based statistics in HTML format */ public void printHtml(PrintWriter out, double limit, int type, boolean clickable) { printHtml(out, null, new double[] {limit,limit,limit,limit}, type, clickable); } /** Print conflict-based statistics in HTML format */ public void printHtml(JspWriter jsp, double[] limit, int type, boolean clickable) { printHtml(jsp, null, limit, type, clickable); } /** Print conflict-based statistics in HTML format */ public void printHtml(PrintWriter out, double[] limit, int type, boolean clickable) { printHtml(out, null, limit, type, clickable); } /** Print conflict-based statistics in HTML format */ public void printHtml(JspWriter jsp, Long classId, double limit, int type, boolean clickable) { printHtml(jsp, classId, new double[] {limit,limit,limit,limit}, type, clickable); } /** Print conflict-based statistics in HTML format */ public void printHtml(PrintWriter out, Long classId, double limit, int type, boolean clickable) { printHtml(out, classId, new double[] {limit,limit,limit,limit}, type, clickable); } /** Print conflict-based statistics in HTML format */ public void printHtml(JspWriter jsp, Long classId, double[] limit, int type, boolean clickable) { PrintWriter out = new PrintWriter(jsp); printHtml(out, classId, limit, type, clickable); } /** Print conflict-based statistics in HTML format */ public void printHtml(PrintWriter out, Long classId, double[] limit, int type, boolean clickable) { if (type == TYPE_VARIABLE_BASED) { Vector vars = filter(iVariables.values(), limit[0]); if (classId!=null) { CBSVariable var = (CBSVariable)iVariables.get(classId); vars.clear(); if (var!=null) vars.add(var); } for (Enumeration e1 = vars.elements(); e1.hasMoreElements();) { CBSVariable variable = (CBSVariable)e1.nextElement(); String m1 = String.valueOf(variable.getId()); if (classId==null) unassignedVariableMenuItem(out,m1,variable, clickable); Vector vals = filter(variable.values(), limit[1]); int id = 0; for (Enumeration e2 = vals.elements();e2.hasMoreElements();) { CBSValue value = (CBSValue)e2.nextElement(); String m2 = m1+"."+(id++); unassignmentMenuItem(out,m2,value, clickable); Vector constraints =filter(value.constraints(),limit[2]); for (Enumeration e3 = constraints.elements(); e3.hasMoreElements();) { CBSConstraint constraint = (CBSConstraint)e3.nextElement(); String m3 = m2 + constraint.getType()+"."+constraint.getId(); constraintMenuItem(out,m3,constraint, clickable); Vector assignments = filter(constraint.assignments(),limit[3]); for (Enumeration e4 = assignments.elements();e4.hasMoreElements();) { CBSAssignment assignment = (CBSAssignment)e4.nextElement(); assignmentLeafItem(out, assignment, clickable); } end_item(out); } end_item(out); } end_item(out); } } else if (type == TYPE_CONSTRAINT_BASED) { Hashtable constraints = new Hashtable(); for (Enumeration e1 = iVariables.elements(); e1.hasMoreElements();) { CBSVariable variable = (CBSVariable)e1.nextElement(); if (classId!=null && classId.longValue()!=variable.getId()) continue; for (Iterator e2=variable.values().iterator();e2.hasNext();) { CBSValue value = (CBSValue)e2.next(); for (Iterator e3=value.constraints().iterator();e3.hasNext();) { CBSConstraint constraint = (CBSConstraint)e3.next(); CBSConstraint xConstraint = (CBSConstraint)constraints.get(constraint.getType()+"."+constraint.getId()); if (xConstraint==null) { xConstraint = new CBSConstraint(constraint.getType(),constraint.getId(),constraint.getName(),constraint.getPref()); constraints.put(constraint.getType()+"."+constraint.getId(),xConstraint); } CBSVariable xVariable = null; for (Iterator i=xConstraint.variables().iterator();i.hasNext();) { CBSVariable v = (CBSVariable)i.next(); if (v.getId()==variable.getId()) { xVariable = v; break; } } if (xVariable==null) { xVariable = new CBSVariable(xConstraint,variable.getId(),variable.getName(),variable.getPref()); xConstraint.variables().add(xVariable); } CBSValue xValue = new CBSValue(xVariable, value.getPeriodId(), value.getPeriodName(), value.getPeriodPref(), value.getRoomIds(), value.getRoomNames(), value.getRoomPrefs()); xVariable.values().add(xValue); for (Iterator e4=constraint.assignments().iterator();e4.hasNext();) { CBSAssignment assignment = (CBSAssignment)e4.next(); xValue.assignments().add(assignment); xValue.incCounter(assignment.getCounter()); } } } } Vector consts = filter(constraints.values(), limit[0]); for (Enumeration e1 = consts.elements(); e1.hasMoreElements();) { CBSConstraint constraint = (CBSConstraint)e1.nextElement(); String m1 = constraint.getType()+"."+constraint.getId(); constraintMenuItem(out,m1,constraint, clickable); Vector variables = filter(constraint.variables(), limit[1]); Collections.sort(variables); for (Enumeration e2 = variables.elements(); e2.hasMoreElements();) { CBSVariable variable = (CBSVariable)e2.nextElement(); String m2 = m1+"."+variable.getId(); if (classId==null) unassignedVariableMenuItem(out,m2,variable, clickable); Vector vals = filter(variable.values(), limit[2]); int id = 0; for (Enumeration e3 = vals.elements();e3.hasMoreElements();) { CBSValue value = (CBSValue)e3.nextElement(); String m3 = m2+"."+(id++); unassignmentMenuItem(out,m3,value, clickable); Vector assignments = filter(value.assignments(), limit[3]); for (Enumeration e4 = assignments.elements();e4.hasMoreElements();) { CBSAssignment assignment = (CBSAssignment)e4.nextElement(); assignmentLeafItem(out, assignment, clickable); } end_item(out); } if (classId==null) end_item(out); } end_item(out); } } out.flush(); } public boolean saveToFile() { return true; } }
UniTime/unitime
JavaSource/org/unitime/timetable/solver/exam/ui/ExamConflictStatisticsInfo.java
Java
apache-2.0
36,624
#ifndef _CUFTPD_H #define _CUFTPD_H #define CUFTPD_DEBUG(fmt, ...) cuftpd_debug(__FILE__, __LINE__, fmt, __VA_ARGS__) #define CUFTPD_ARR_LEN(arr) (sizeof(arr)/sizeof(arr[0])) #define CUFTPD_VER "1.0" #define CUFTPD_DEF_SRV_PORT 21 #define CUFTPD_LISTEN_QU_LEN 8 #define CUFTPD_LINE_END "\r\n" #define CUFTPD_OK 0 #define CUFTPD_ERR (-1) #define CUFTPD_CHECK_LOGIN() \ do { \ if (!cuftpd_cur_user) { \ cuftpd_send_resp(ctrlfd, 530, "first please"); \ return CUFTPD_ERR; \ } \ } while(0) struct cuftpd_cmd_struct { char *cmd_name; int (*cmd_handler)(int ctrlfd, char *cmd_line); }; struct cuftpd_user_struct { char user[128]; char pass[128]; }; #endif
easion/os_sdk
system/ftpd.h
C
apache-2.0
901
package no.nb.nna.veidemann.chrome.client.ws; import no.nb.nna.veidemann.chrome.client.ws.GetBrowserVersionCmd.Response; public class GetBrowserVersionCmd extends Command<Response> { public GetBrowserVersionCmd(Cdp client) { super(client, "Browser", "getVersion", Response.class); } public static class Response { private String protocolVersion; private String product; private String revision; private String userAgent; private String jsVersion; /** * Protocol version. */ public String protocolVersion() { return protocolVersion; } /** * Product name. */ public String product() { return product; } /** * Product revision. */ public String revision() { return revision; } /** * User-Agent. */ public String userAgent() { return userAgent; } /** * V8 version. */ public String jsVersion() { return jsVersion; } public String toString() { return "Version{protocolVersion=" + protocolVersion + ", product=" + product + ", revision=" + revision + ", userAgent=" + userAgent + ", jsVersion=" + jsVersion + "}"; } } }
nlnwa/broprox
veidemann-chrome-client/src/main/java/no/nb/nna/veidemann/chrome/client/ws/GetBrowserVersionCmd.java
Java
apache-2.0
1,385
package fr.javatronic.blog.massive.annotation1; import fr.javatronic.blog.processor.Annotation_001; @Annotation_001 public class Class_914 { }
lesaint/experimenting-annotation-processing
experimenting-rounds/massive-count-of-annotated-classes/src/main/java/fr/javatronic/blog/massive/annotation1/Class_914.java
Java
apache-2.0
145
OC.L10N.register( "settings", { "Security & Setup Warnings" : "Säkerhets & Inställningsvarningar", "Cron" : "Cron", "Sharing" : "Dela", "Security" : "Säkerhet", "Email Server" : "E-postserver", "Log" : "Logg", "Authentication error" : "Fel vid autentisering", "Your full name has been changed." : "Hela ditt namn har ändrats", "Unable to change full name" : "Kunde inte ändra hela namnet", "Files decrypted successfully" : "Filerna dekrypterades utan fel", "Couldn't decrypt your files, please check your owncloud.log or ask your administrator" : "Det gick inte att dekryptera dina filer, kontrollera din owncloud.log eller fråga administratören", "Couldn't decrypt your files, check your password and try again" : "Det gick inte att dekryptera filerna, kontrollera ditt lösenord och försök igen", "Encryption keys deleted permanently" : "Krypteringsnycklar raderades permanent", "Couldn't permanently delete your encryption keys, please check your owncloud.log or ask your administrator" : "Det gick inte att permanent ta bort dina krypteringsnycklar, kontrollera din owncloud.log eller fråga din administratör", "Couldn't remove app." : "Kunde inte ta bort applikationen.", "Backups restored successfully" : "Återställning av säkerhetskopior lyckades", "Couldn't restore your encryption keys, please check your owncloud.log or ask your administrator" : "Kan inte återställa dina krypteringsnycklar, vänligen kontrollera din owncloud.log eller fråga din administratör.", "Language changed" : "Språk ändrades", "Invalid request" : "Ogiltig begäran", "Admins can't remove themself from the admin group" : "Administratörer kan inte ta bort sig själva från admingruppen", "Unable to add user to group %s" : "Kan inte lägga till användare i gruppen %s", "Unable to remove user from group %s" : "Kan inte radera användare från gruppen %s", "Couldn't update app." : "Kunde inte uppdatera appen.", "Wrong password" : "Fel lösenord", "No user supplied" : "Ingen användare angiven", "Please provide an admin recovery password, otherwise all user data will be lost" : "Ange ett återställningslösenord för administratören. Annars kommer all användardata förloras", "Wrong admin recovery password. Please check the password and try again." : "Felaktigt återställningslösenord för administratör. Kolla lösenordet och prova igen.", "Back-end doesn't support password change, but the users encryption key was successfully updated." : "Gränssnittet stödjer inte byte av lösenord, men användarnas krypteringsnyckel blev uppdaterad.", "Unable to change password" : "Kunde inte ändra lösenord", "Enabled" : "Aktiverad", "Not enabled" : "Inte aktiverad", "Recommended" : "Rekomenderad", "Group already exists." : "Gruppen finns redan.", "Unable to add group." : "Lyckades inte lägga till grupp.", "Unable to delete group." : "Lyckades inte radera grupp.", "log-level out of allowed range" : "logg-nivå utanför tillåtet område", "Saved" : "Sparad", "test email settings" : "testa e-post inställningar", "If you received this email, the settings seem to be correct." : "Om du mottog detta e-postmeddelande, verkar dina inställningar vara korrekta.", "A problem occurred while sending the email. Please revise your settings." : "Ett problem uppstod när e-postmeddelandet skickades. Vänligen se över dina inställningar.", "Email sent" : "E-post skickat", "You need to set your user email before being able to send test emails." : "Du behöver ställa in din användares e-postadress före du kan skicka test e-post.", "Invalid mail address" : "Ogiltig e-postadress", "Unable to create user." : "Kan inte skapa användare.", "Your %s account was created" : "Ditt %s konto skapades", "Unable to delete user." : "Kan inte radera användare.", "Forbidden" : "Förbjuden", "Invalid user" : "Ogiltig användare", "Unable to change mail address" : "Kan inte ändra e-postadress", "Email saved" : "E-post sparad", "Are you really sure you want add \"{domain}\" as trusted domain?" : "Är du verkligen säker på att du vill lägga till \"{domain}\" som en trusted domian?", "Add trusted domain" : "Lägg till betrodd domän", "Sending..." : "Skickar ...", "All" : "Alla", "Please wait...." : "Var god vänta ...", "Error while disabling app" : "Fel vid inaktivering av app", "Disable" : "Deaktivera", "Enable" : "Aktivera", "Error while enabling app" : "Fel vid aktivering av app", "Updating...." : "Uppdaterar ...", "Error while updating app" : "Fel uppstod vid uppdatering av appen", "Updated" : "Uppdaterad", "Uninstalling ...." : "Avinstallerar ...", "Error while uninstalling app" : "Ett fel inträffade när applikatonen avinstallerades", "Uninstall" : "Avinstallera", "Select a profile picture" : "Välj en profilbild", "Very weak password" : "Väldigt svagt lösenord", "Weak password" : "Svagt lösenord", "So-so password" : "Okej lösenord", "Good password" : "Bra lösenord", "Strong password" : "Starkt lösenord", "Valid until {date}" : "Giltig t.o.m. {date}", "Delete" : "Radera", "Decrypting files... Please wait, this can take some time." : "Dekrypterar filer ... Vänligen vänta, detta kan ta en stund.", "Delete encryption keys permanently." : "Radera krypteringsnycklar permanent", "Restore encryption keys." : "Återställ krypteringsnycklar", "Groups" : "Grupper", "Unable to delete {objName}" : "Kunde inte radera {objName}", "Error creating group" : "Fel vid skapande av grupp", "A valid group name must be provided" : "Ett giltigt gruppnamn måste anges", "deleted {groupName}" : "raderade {groupName} ", "undo" : "ångra", "no group" : "ingen grupp", "never" : "aldrig", "deleted {userName}" : "raderade {userName}", "add group" : "lägg till grupp", "A valid username must be provided" : "Ett giltigt användarnamn måste anges", "Error creating user" : "Fel vid skapande av användare", "A valid password must be provided" : "Ett giltigt lösenord måste anges", "A valid email must be provided" : "En giltig e-postadress måste anges", "__language_name__" : "__language_name__", "Personal Info" : "Personlig info", "SSL root certificates" : "SSL rotcertifikat", "Encryption" : "Kryptering", "Everything (fatal issues, errors, warnings, info, debug)" : "Allting (allvarliga fel, fel, varningar, info, debug)", "Info, warnings, errors and fatal issues" : "Info, varningar och allvarliga fel", "Warnings, errors and fatal issues" : "Varningar, fel och allvarliga fel", "Errors and fatal issues" : "Fel och allvarliga fel", "Fatal issues only" : "Endast allvarliga fel", "None" : "Ingen", "Login" : "Logga in", "Plain" : "Enkel", "NT LAN Manager" : "NT LAN Manager", "SSL" : "SSL", "TLS" : "TLS", "Security Warning" : "Säkerhetsvarning", "You are accessing %s via HTTP. We strongly suggest you configure your server to require using HTTPS instead." : "Du ansluter till %s via HTTP. Vi rekommenderar starkt att du konfigurerar din server att använda HTTPS istället.", "Read-Only config enabled" : "Skrivskyddad konfiguration påslagen", "The Read-Only config has been enabled. This prevents setting some configurations via the web-interface. Furthermore, the file needs to be made writable manually for every update." : "Lär-bara konfigureringen har blivit aktiv. Detta förhindrar att några konfigureringar kan sättas via web-gränssnittet.", "Setup Warning" : "Installationsvarning", "PHP is apparently setup to strip inline doc blocks. This will make several core apps inaccessible." : "PHP är tydligen inställd för att rensa inline doc block. Detta kommer att göra flera kärnapplikationer otillgängliga.", "This is probably caused by a cache/accelerator such as Zend OPcache or eAccelerator." : "Detta orsakas troligtvis av en cache/accelerator som t ex Zend OPchache eller eAccelerator.", "Database Performance Info" : "Databasprestanda Information", "Microsoft Windows Platform" : "Microsoft Windows-platform", "Your server is running on Microsoft Windows. We highly recommend Linux for optimal user experience." : "Din server använder Microsoft Windows. Vi rekommenderar starkt Linux för en optimal användarerfarenhet.", "Module 'fileinfo' missing" : "Modulen \"fileinfo\" saknas", "The PHP module 'fileinfo' is missing. We strongly recommend to enable this module to get best results with mime-type detection." : "PHP-modulen 'fileinfo' saknas. Vi rekommenderar starkt att aktivera den här modulen för att kunna upptäcka korrekt mime-typ.", "PHP charset is not set to UTF-8" : "PHP-teckenuppsättning är inte satt till UTF-8", "PHP charset is not set to UTF-8. This can cause major issues with non-ASCII characters in file names. We highly recommend to change the value of 'default_charset' php.ini to 'UTF-8'." : "PHP-teckenuppsättning är inte satt till UTF-8. Detta kan orsaka stora problem med icke-ASCII-tecken i filnamn. Vi rekommenderar starkt att ändra värdet \"default_charset\" i php.ini till \"UTF-8\".", "Locale not working" : "\"Locale\" fungerar inte", "System locale can not be set to a one which supports UTF-8." : "Systemspråk kan inte ställas in till ett som stödjer UTF-8.", "This means that there might be problems with certain characters in file names." : "Detta betyder att där kan komma att uppstå problem med vissa tecken i filnamn.", "We strongly suggest installing the required packages on your system to support one of the following locales: %s." : "Vi rekommenderar starkt att installera de nödvändiga paketen på ditt system för att stödja en av följande språkversioner: %s.", "URL generation in notification emails" : "URL-generering i notifieringsmejl", "If your installation is not installed in the root of the domain and uses system cron, there can be issues with the URL generation. To avoid these problems, please set the \"overwrite.cli.url\" option in your config.php file to the webroot path of your installation (Suggested: \"%s\")" : "Om din installation inte installerades på roten av domänen och använder system cron så kan det uppstå problem med URL-genereringen. För att undvika dessa problem, var vänlig sätt \"overwrite.cli.url\"-inställningen i din config.php-fil till webbrotsökvägen av din installation (Föreslagen: \"%s\")", "Configuration Checks" : "Konfigurationskontroller", "No problems found" : "Inga problem hittades", "Please double check the <a href='%s'>installation guides</a>." : "Var god kontrollera <a href='%s'>installationsguiden</a>.", "Last cron was executed at %s." : "Sista cron kördes vid %s", "Last cron was executed at %s. This is more than an hour ago, something seems wrong." : "Sista cron kördes vid %s. Detta är mer än en timme sedan, något verkar fel.", "Cron was not executed yet!" : "Cron kördes inte ännu!", "Execute one task with each page loaded" : "Exekvera en uppgift vid varje sidladdning", "cron.php is registered at a webcron service to call cron.php every 15 minutes over http." : "cron.php är registrerad som en webcron service att ropa på cron.php varje 15 minuter över http.", "Use system's cron service to call the cron.php file every 15 minutes." : "Använd systemets cron-tjänst för att anropa cron.php var 15:e minut.", "Allow apps to use the Share API" : "Tillåt applikationer att använda delat API", "Allow users to share via link" : "Tillåt användare att dela via länk", "Enforce password protection" : "Tillämpa lösenordskydd", "Allow public uploads" : "Tillåt offentlig uppladdning", "Allow users to send mail notification for shared files" : "Tillåt användare att skicka mailnotifieringar för delade filer", "Set default expiration date" : "Ställ in standardutgångsdatum", "Expire after " : "Förfaller efter", "days" : "dagar", "Enforce expiration date" : "Tillämpa förfallodatum", "Allow resharing" : "Tillåt vidaredelning", "Restrict users to only share with users in their groups" : "Begränsa användare till att enbart kunna dela med användare i deras grupper", "Allow users to send mail notification for shared files to other users" : "Tillåt användare att skicka mejlnotifiering för delade filer till andra användare", "Exclude groups from sharing" : "Exkludera grupp från att dela", "These groups will still be able to receive shares, but not to initiate them." : "Dessa grupper kommer fortfarande kunna ta emot delningar, men inte skapa delningar.", "Enforce HTTPS" : "Kräv HTTPS", "Forces the clients to connect to %s via an encrypted connection." : "Tvingar klienterna att ansluta till %s via en krypterad anslutning.", "Enforce HTTPS for subdomains" : "Framtvinga HTTPS för underdomäner", "Forces the clients to connect to %s and subdomains via an encrypted connection." : "Tvingar klienter att ansluta till %s och underdomäner via en krypterad anslutning.", "Please connect to your %s via HTTPS to enable or disable the SSL enforcement." : "Anslut till din %s via HTTPS för att aktivera/deaktivera SSL", "This is used for sending out notifications." : "Detta används för att skicka ut notifieringar.", "Send mode" : "Sändningsläge", "From address" : "Från adress", "mail" : "mail", "Authentication method" : "Autentiseringsmetod", "Authentication required" : "Autentisering krävs", "Server address" : "Serveradress", "Port" : "Port", "Credentials" : "Inloggningsuppgifter", "SMTP Username" : "SMTP-användarnamn", "SMTP Password" : "SMTP-lösenord", "Store credentials" : "Lagra inloggningsuppgifter", "Test email settings" : "Testa e-postinställningar", "Send email" : "Skicka e-post", "Log level" : "Nivå på loggning", "Download logfile" : "Ladda ner loggfil", "More" : "Mer", "Less" : "Mindre", "The logfile is bigger than 100MB. Downloading it may take some time!" : "Loggfilen är större än 100MB. Att ladda ner den kan ta lite tid!", "Version" : "Version", "Developed by the <a href=\"http://ownCloud.org/contact\" target=\"_blank\">ownCloud community</a>, the <a href=\"https://github.com/owncloud\" target=\"_blank\">source code</a> is licensed under the <a href=\"http://www.gnu.org/licenses/agpl-3.0.html\" target=\"_blank\"><abbr title=\"Affero General Public License\">AGPL</abbr></a>." : "Utvecklad av <a href=\"http://ownCloud.org/contact\" target=\"_blank\">ownCloud Community</a>, <a href=\"https://github.com/owncloud\" target=\"_blank\">källkoden</a> är licenserad under <a href=\"http://www.gnu.org/licenses/agpl-3.0.html\" target=\"_blank\"><abbr title=\"Affero General Public License\">AGPL</abbr></a>.", "More apps" : "Fler appar", "Add your app" : "Lägg till din app", "by" : "av", "licensed" : "licensierad", "Documentation:" : "Dokumentation:", "User Documentation" : "Användardokumentation", "Admin Documentation" : "Administratörsdokumentation", "This app cannot be installed because the following dependencies are not fulfilled:" : "Denna applikation kan inte installeras då följande beroenden inte är uppfyllda: %s", "Update to %s" : "Uppdatera till %s", "Enable only for specific groups" : "Aktivera endast för specifika grupper", "Uninstall App" : "Avinstallera applikation", "Hey there,<br><br>just letting you know that you now have an %s account.<br><br>Your username: %s<br>Access it: <a href=\"%s\">%s</a><br><br>" : "Hej där,<br><br>vill bara informera dig om att du nu har ett %s konto.<br><br>Ditt användarnamn: %s<br>Accessa det genom: <a href=\"%s\">%s</a><br><br>", "Cheers!" : "Ha de fint!", "Hey there,\n\njust letting you know that you now have an %s account.\n\nYour username: %s\nAccess it: %s\n\n" : "Hej där,\n\nvill bara informera dig om att du nu har ett %s konto.\n\nDitt användarnamn: %s\nAccessa det genom: %s\n", "Administrator Documentation" : "Administratörsdokumentation", "Online Documentation" : "Onlinedokumentation", "Forum" : "Forum", "Bugtracker" : "Bugtracker", "Commercial Support" : "Kommersiell support", "Get the apps to sync your files" : "Skaffa apparna för att synkronisera dina filer", "Desktop client" : "Skrivbordsklient", "Android app" : "Android-app", "iOS app" : "iOS-app", "If you want to support the project\n\t\t<a href=\"https://owncloud.org/contribute\"\n\t\t\ttarget=\"_blank\">join development</a>\n\t\tor\n\t\t<a href=\"https://owncloud.org/promote\"\n\t\t\ttarget=\"_blank\">spread the word</a>!" : "Om du vill stödja projektet\n<a href=\"https://owncloud.org/contribute\"\n\t\t\ttarget=\"_blank\">hjälp till med utvecklingen</a>\n\t\teller\n\t\t<a href=\"https://owncloud.org/promote\"\n\t\t\ttarget=\"_blank\">sprid budskapet vidare</a>!", "Show First Run Wizard again" : "Visa Första uppstarts-guiden igen", "You have used <strong>%s</strong> of the available <strong>%s</strong>" : "Du har använt <strong>%s</strong> av tillgängliga <strong>%s</strong>", "Password" : "Lösenord", "Your password was changed" : "Ditt lösenord har ändrats", "Unable to change your password" : "Kunde inte ändra ditt lösenord", "Current password" : "Nuvarande lösenord", "New password" : "Nytt lösenord", "Change password" : "Ändra lösenord", "Full Name" : "Hela namnet", "No display name set" : "Inget visningsnamn angivet", "Email" : "E-post", "Your email address" : "Din e-postadress", "Fill in an email address to enable password recovery and receive notifications" : "Fyll i en e-postadress för att aktivera återställning av lösenord och mottagande av notifieringar", "No email address set" : "Ingen e-postadress angiven", "Profile picture" : "Profilbild", "Upload new" : "Ladda upp ny", "Select new from Files" : "Välj ny från filer", "Remove image" : "Radera bild", "Either png or jpg. Ideally square but you will be able to crop it." : "Antingen png eller jpg. Helst fyrkantig, men du kommer att kunna beskära den.", "Your avatar is provided by your original account." : "Din avatar tillhandahålls av ditt ursprungliga konto.", "Cancel" : "Avbryt", "Choose as profile image" : "Välj som profilbild", "Language" : "Språk", "Help translate" : "Hjälp att översätta", "Common Name" : "Vanligt namn", "Valid until" : "Giltigt till", "Issued By" : "Utfärdat av", "Valid until %s" : "Giltigt till %s", "Import Root Certificate" : "Importera rotcertifikat", "The encryption app is no longer enabled, please decrypt all your files" : "Krypteringsapplikationen är inte längre aktiverad, vänligen dekryptera alla dina filer", "Log-in password" : "Inloggningslösenord", "Decrypt all Files" : "Dekryptera alla filer", "Your encryption keys are moved to a backup location. If something went wrong you can restore the keys. Only delete them permanently if you are sure that all files are decrypted correctly." : "Dina krypteringsnycklar flyttas till en backup. Om något gick fel kan du återställa nycklarna. Bara ta bort dem permanent om du är säker på att alla filer dekrypteras korrekt.", "Restore Encryption Keys" : "Återställ krypteringsnycklar", "Delete Encryption Keys" : "Radera krypteringsnycklar", "Show storage location" : "Visa lagringsplats", "Show last log in" : "Visa senaste inloggning", "Show user backend" : "Visa användar-back-end", "Send email to new user" : "Skicka e-post till ny användare", "Show email address" : "Visa e-postadress", "Username" : "Användarnamn", "E-Mail" : "E-post", "Create" : "Skapa", "Admin Recovery Password" : "Admin-återställningslösenord", "Enter the recovery password in order to recover the users files during password change" : "Ange återställningslösenordet för att återställa användarnas filer vid lösenordsbyte", "Search Users" : "Sök användare", "Add Group" : "Lägg till Grupp", "Group" : "Grupp", "Everyone" : "Alla", "Admins" : "Administratörer", "Default Quota" : "Förvald datakvot", "Please enter storage quota (ex: \"512 MB\" or \"12 GB\")" : "Var god skriv in lagringskvot (ex: \"512MB\" eller \"12 GB\")", "Unlimited" : "Obegränsad", "Other" : "Annat", "Group Admin for" : "Gruppadministratör för", "Quota" : "Kvot", "Storage Location" : "Lagringsplats", "User Backend" : "Användar-back-end", "Last Login" : "Senaste inloggning", "change full name" : "ändra hela namnet", "set new password" : "ange nytt lösenord", "change email address" : "ändra e-postadress", "Default" : "Förvald" }, "nplurals=2; plural=(n != 1);");
kebenxiaoming/owncloudRedis
settings/l10n/sv.js
JavaScript
apache-2.0
21,151
// +build integration,ethereum package ethclient import ( "encoding/json" "fmt" "testing" "github.com/hyperledger/burrow/crypto" "github.com/hyperledger/burrow/encoding/web3hex" "github.com/hyperledger/burrow/execution/solidity" "github.com/hyperledger/burrow/rpc/rpcevents" "github.com/hyperledger/burrow/tests/web3/web3test" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) var client = NewEthClient(web3test.GetChainRPCClient()) func TestEthAccounts(t *testing.T) { accounts, err := client.Accounts() require.NoError(t, err) fmt.Println(accounts) } func TestEthSendTransaction(t *testing.T) { pk := web3test.GetPrivateKey(t) d := new(web3hex.Decoder) param := &EthSendTransactionParam{ From: web3hex.Encoder.Address(pk.GetAddress()), Gas: web3hex.Encoder.Uint64(999999), Data: web3hex.Encoder.BytesTrim(solidity.Bytecode_EventEmitter), } txHash, err := client.SendTransaction(param) require.NoError(t, err) require.NotEmpty(t, txHash) tx, err := client.GetTransactionByHash(txHash) require.NoError(t, err) assert.Greater(t, d.Uint64(tx.BlockNumber), uint64(0)) receipt, err := client.GetTransactionReceipt(txHash) require.NoError(t, err) assert.Equal(t, txHash, receipt.TransactionHash) require.NoError(t, d.Err()) } func TestNonExistentTransaction(t *testing.T) { txHash := "0x990258f47aba0cf913c14cc101ddf5b589c04765429d5709f643c891442bfcf7" receipt, err := client.GetTransactionReceipt(txHash) require.NoError(t, err) require.Equal(t, "", receipt.TransactionHash) require.Equal(t, "", receipt.BlockNumber) require.Equal(t, "", receipt.BlockHash) tx, err := client.GetTransactionByHash(txHash) require.NoError(t, err) require.Equal(t, "", tx.Hash) require.Equal(t, "", tx.BlockNumber) require.Equal(t, "", tx.BlockHash) } func TestEthClient_GetLogs(t *testing.T) { // TODO: make this test generate its own fixutres filter := &Filter{ BlockRange: rpcevents.AbsoluteRange(1, 34340), Addresses: []crypto.Address{ crypto.MustAddressFromHexString("a1e378f122fec6aa8c841397042e21bc19368768"), crypto.MustAddressFromHexString("f73aaa468496a87675d27638878a1600b0db3c71"), }, } result, err := client.GetLogs(filter) require.NoError(t, err) bs, err := json.Marshal(result) require.NoError(t, err) fmt.Printf("%s\n", string(bs)) } func TestEthClient_GetBlockByNumber(t *testing.T) { block, err := client.GetBlockByNumber("latest") require.NoError(t, err) d := new(web3hex.Decoder) assert.Greater(t, d.Uint64(block.Number), uint64(0)) require.NoError(t, d.Err()) } func TestNetVersion(t *testing.T) { chainID, err := client.NetVersion() require.NoError(t, err) require.NotEmpty(t, chainID) } func TestWeb3ClientVersion(t *testing.T) { version, err := client.Web3ClientVersion() require.NoError(t, err) require.NotEmpty(t, version) } func TestEthSyncing(t *testing.T) { result, err := client.Syncing() require.NoError(t, err) fmt.Printf("%#v\n", result) } func TestEthBlockNumber(t *testing.T) { height, err := client.BlockNumber() require.NoError(t, err) require.Greater(t, height, uint64(0)) }
eris-ltd/eris-db
rpc/web3/ethclient/client_test.go
GO
apache-2.0
3,116
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """Tests for the output module field formatting helper.""" import unittest from dfdatetime import semantic_time as dfdatetime_semantic_time from dfvfs.path import fake_path_spec from plaso.containers import events from plaso.lib import definitions from plaso.output import formatting_helper from tests.containers import test_lib as containers_test_lib from tests.output import test_lib class TestFieldFormattingHelper(formatting_helper.FieldFormattingHelper): """Field formatter helper for testing purposes.""" _FIELD_FORMAT_CALLBACKS = {'zone': '_FormatTimeZone'} class FieldFormattingHelperTest(test_lib.OutputModuleTestCase): """Test the output module field formatting helper.""" # pylint: disable=protected-access _TEST_EVENTS = [ {'data_type': 'test:event', 'filename': 'log/syslog.1', 'hostname': 'ubuntu', 'path_spec': fake_path_spec.FakePathSpec( location='log/syslog.1'), 'text': ( 'Reporter <CRON> PID: 8442 (pam_unix(cron:session): session\n ' 'closed for user root)'), 'timestamp': '2012-06-27 18:17:01', 'timestamp_desc': definitions.TIME_DESCRIPTION_CHANGE}] def testFormatDateTime(self): """Tests the _FormatDateTime function with dynamic time.""" output_mediator = self._CreateOutputMediator() test_helper = formatting_helper.FieldFormattingHelper(output_mediator) event, event_data, event_data_stream = ( containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0])) date_time_string = test_helper._FormatDateTime( event, event_data, event_data_stream) self.assertEqual(date_time_string, '2012-06-27T18:17:01.000000+00:00') output_mediator.SetTimezone('Europe/Amsterdam') date_time_string = test_helper._FormatDateTime( event, event_data, event_data_stream) self.assertEqual(date_time_string, '2012-06-27T20:17:01.000000+02:00') output_mediator.SetTimezone('UTC') event.date_time = dfdatetime_semantic_time.InvalidTime() date_time_string = test_helper._FormatDateTime( event, event_data, event_data_stream) self.assertEqual(date_time_string, 'Invalid') def testFormatDateTimeWithoutDynamicTime(self): """Tests the _FormatDateTime function without dynamic time.""" output_mediator = self._CreateOutputMediator(dynamic_time=False) test_helper = formatting_helper.FieldFormattingHelper(output_mediator) event, event_data, event_data_stream = ( containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0])) # Test with event.date_time date_time_string = test_helper._FormatDateTime( event, event_data, event_data_stream) self.assertEqual(date_time_string, '2012-06-27T18:17:01.000000+00:00') output_mediator.SetTimezone('Europe/Amsterdam') date_time_string = test_helper._FormatDateTime( event, event_data, event_data_stream) self.assertEqual(date_time_string, '2012-06-27T20:17:01.000000+02:00') output_mediator.SetTimezone('UTC') event.date_time = dfdatetime_semantic_time.InvalidTime() date_time_string = test_helper._FormatDateTime( event, event_data, event_data_stream) self.assertEqual(date_time_string, '0000-00-00T00:00:00.000000+00:00') # Test with event.timestamp event.date_time = None date_time_string = test_helper._FormatDateTime( event, event_data, event_data_stream) self.assertEqual(date_time_string, '2012-06-27T18:17:01.000000+00:00') event.timestamp = 0 date_time_string = test_helper._FormatDateTime( event, event_data, event_data_stream) self.assertEqual(date_time_string, '0000-00-00T00:00:00.000000+00:00') event.timestamp = -9223372036854775808 date_time_string = test_helper._FormatDateTime( event, event_data, event_data_stream) self.assertEqual(date_time_string, '0000-00-00T00:00:00.000000+00:00') def testFormatDisplayName(self): """Tests the _FormatDisplayName function.""" output_mediator = self._CreateOutputMediator() test_helper = formatting_helper.FieldFormattingHelper(output_mediator) event, event_data, event_data_stream = ( containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0])) display_name_string = test_helper._FormatDisplayName( event, event_data, event_data_stream) self.assertEqual(display_name_string, 'FAKE:log/syslog.1') def testFormatFilename(self): """Tests the _FormatFilename function.""" output_mediator = self._CreateOutputMediator() test_helper = formatting_helper.FieldFormattingHelper(output_mediator) event, event_data, event_data_stream = ( containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0])) filename_string = test_helper._FormatFilename( event, event_data, event_data_stream) self.assertEqual(filename_string, 'log/syslog.1') def testFormatHostname(self): """Tests the _FormatHostname function.""" output_mediator = self._CreateOutputMediator() test_helper = formatting_helper.FieldFormattingHelper(output_mediator) event, event_data, event_data_stream = ( containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0])) hostname_string = test_helper._FormatHostname( event, event_data, event_data_stream) self.assertEqual(hostname_string, 'ubuntu') def testFormatInode(self): """Tests the _FormatInode function.""" output_mediator = self._CreateOutputMediator() test_helper = formatting_helper.FieldFormattingHelper(output_mediator) event, event_data, event_data_stream = ( containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0])) inode_string = test_helper._FormatInode( event, event_data, event_data_stream) self.assertEqual(inode_string, '-') def testFormatMACB(self): """Tests the _FormatMACB function.""" output_mediator = self._CreateOutputMediator() test_helper = formatting_helper.FieldFormattingHelper(output_mediator) event, event_data, event_data_stream = ( containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0])) macb_string = test_helper._FormatMACB(event, event_data, event_data_stream) self.assertEqual(macb_string, '..C.') def testFormatMessage(self): """Tests the _FormatMessage function.""" output_mediator = self._CreateOutputMediator() formatters_directory_path = self._GetTestFilePath(['formatters']) output_mediator.ReadMessageFormattersFromDirectory( formatters_directory_path) test_helper = formatting_helper.FieldFormattingHelper(output_mediator) event, event_data, event_data_stream = ( containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0])) message_string = test_helper._FormatMessage( event, event_data, event_data_stream) expected_message_string = ( 'Reporter <CRON> PID: 8442 (pam_unix(cron:session): session closed ' 'for user root)') self.assertEqual(message_string, expected_message_string) def testFormatMessageShort(self): """Tests the _FormatMessageShort function.""" output_mediator = self._CreateOutputMediator() formatters_directory_path = self._GetTestFilePath(['formatters']) output_mediator.ReadMessageFormattersFromDirectory( formatters_directory_path) test_helper = formatting_helper.FieldFormattingHelper(output_mediator) event, event_data, event_data_stream = ( containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0])) message_short_string = test_helper._FormatMessageShort( event, event_data, event_data_stream) expected_message_short_string = ( 'Reporter <CRON> PID: 8442 (pam_unix(cron:session): session closed ' 'for user root)') self.assertEqual(message_short_string, expected_message_short_string) def testFormatSource(self): """Tests the _FormatSource function.""" output_mediator = self._CreateOutputMediator() test_helper = formatting_helper.FieldFormattingHelper(output_mediator) event, event_data, event_data_stream = ( containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0])) source_string = test_helper._FormatSource( event, event_data, event_data_stream) self.assertEqual(source_string, 'Test log file') def testFormatSourceShort(self): """Tests the _FormatSourceShort function.""" output_mediator = self._CreateOutputMediator() test_helper = formatting_helper.FieldFormattingHelper(output_mediator) event, event_data, event_data_stream = ( containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0])) source_short_string = test_helper._FormatSourceShort( event, event_data, event_data_stream) self.assertEqual(source_short_string, 'FILE') def testFormatTag(self): """Tests the _FormatTag function.""" output_mediator = self._CreateOutputMediator() test_helper = formatting_helper.FieldFormattingHelper(output_mediator) tag_string = test_helper._FormatTag(None) self.assertEqual(tag_string, '-') event_tag = events.EventTag() event_tag.AddLabel('one') event_tag.AddLabel('two') tag_string = test_helper._FormatTag(event_tag) self.assertEqual(tag_string, 'one two') def testFormatTime(self): """Tests the _FormatTime function.""" output_mediator = self._CreateOutputMediator() test_helper = formatting_helper.FieldFormattingHelper(output_mediator) event, event_data, event_data_stream = ( containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0])) # Test with event.date_time time_string = test_helper._FormatTime( event, event_data, event_data_stream) self.assertEqual(time_string, '18:17:01') output_mediator.SetTimezone('Europe/Amsterdam') time_string = test_helper._FormatTime( event, event_data, event_data_stream) self.assertEqual(time_string, '20:17:01') output_mediator.SetTimezone('UTC') # Test with event.timestamp event.date_time = None time_string = test_helper._FormatTime( event, event_data, event_data_stream) self.assertEqual(time_string, '18:17:01') event.timestamp = 0 time_string = test_helper._FormatTime( event, event_data, event_data_stream) self.assertEqual(time_string, '--:--:--') event.timestamp = -9223372036854775808 time_string = test_helper._FormatTime( event, event_data, event_data_stream) self.assertEqual(time_string, '--:--:--') def testFormatTimeZone(self): """Tests the _FormatTimeZone function.""" output_mediator = self._CreateOutputMediator() test_helper = formatting_helper.FieldFormattingHelper(output_mediator) event, event_data, event_data_stream = ( containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0])) zone_string = test_helper._FormatTimeZone( event, event_data, event_data_stream) self.assertEqual(zone_string, 'UTC') def testFormatUsername(self): """Tests the _FormatUsername function.""" output_mediator = self._CreateOutputMediator() test_helper = formatting_helper.FieldFormattingHelper(output_mediator) event, event_data, event_data_stream = ( containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0])) username_string = test_helper._FormatUsername( event, event_data, event_data_stream) self.assertEqual(username_string, '-') # TODO: add coverage for _ReportEventError def testGetFormattedField(self): """Tests the GetFormattedField function.""" output_mediator = self._CreateOutputMediator() test_helper = TestFieldFormattingHelper(output_mediator) event, event_data, event_data_stream = ( containers_test_lib.CreateEventFromValues(self._TEST_EVENTS[0])) zone_string = test_helper.GetFormattedField( 'zone', event, event_data, event_data_stream, None) self.assertEqual(zone_string, 'UTC') if __name__ == '__main__': unittest.main()
kiddinn/plaso
tests/output/formatting_helper.py
Python
apache-2.0
12,029
package de.tototec.sbuild.internal import de.tototec.sbuild.TargetContext import de.tototec.sbuild.InvalidApiUsageException import de.tototec.sbuild.Project trait WithinTargetExecution { def targetContext: TargetContext protected[sbuild] def directDepsTargetContexts: Seq[TargetContext] } object WithinTargetExecution extends ThreadLocal[WithinTargetExecution] { private[sbuild] override def remove: Unit = super.remove private[sbuild] override def set(withinTargetExecution: WithinTargetExecution): Unit = super.set(withinTargetExecution) /** * To use a WithinTargetExecution, one should use this method. */ private[sbuild] def safeWithinTargetExecution[T](callingMethodName: String, project: Option[Project] = None)(doWith: WithinTargetExecution => T): T = get match { case null => val msg = I18n.marktr("'{0}' can only be used inside an exec block of a target.") val ex = new InvalidApiUsageException(I18n.notr(msg, callingMethodName), null, I18n[WithinTargetExecution.type].tr(msg, callingMethodName)) ex.buildScript = project.map(_.projectFile) throw ex case withinExecution => doWith(withinExecution) } // private[sbuild] def safeTargetContext(callingMethodName: String, project: Option[Project] = None): TargetContext = // safeWithinTargetExecution(callingMethodName, project)(_.targetContext) }
SBuild-org/sbuild
de.tototec.sbuild/src/main/scala/de/tototec/sbuild/internal/WithinTargetExecution.scala
Scala
apache-2.0
1,387
package org.wikipedia.concurrency; // Copied from Android 4.4.2_r2 source // so we can use executeOnExecutor :P // // https://android.googlesource.com/platform/frameworks/base/+/android-4.4.2_r2/core/java/android/os/AsyncTask.java /* * Copyright (C) 2008 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import android.os.Handler; import android.os.Message; import android.os.Process; import android.support.annotation.NonNull; import java.util.ArrayDeque; import java.util.concurrent.BlockingQueue; import java.util.concurrent.Callable; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; import java.util.concurrent.FutureTask; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; /** * <p>AsyncTask enables proper and easy use of the UI thread. This class allows to * perform background operations and publish results on the UI thread without * having to manipulate threads and/or handlers.</p> * * <p>AsyncTask is designed to be a helper class around {@link Thread} and {@link Handler} * and does not constitute a generic threading framework. AsyncTasks should ideally be * used for short operations (a few seconds at the most.) If you need to keep threads * running for long periods of time, it is highly recommended you use the various APIs * provided by the <code>java.util.concurrent</code> pacakge such as {@link Executor}, * {@link ThreadPoolExecutor} and {@link FutureTask}.</p> * * <p>An asynchronous task is defined by a computation that runs on a background thread and * whose result is published on the UI thread. An asynchronous task is defined by 3 generic * types, called <code>Params</code>, <code>Progress</code> and <code>Result</code>, * and 4 steps, called <code>onPreExecute</code>, <code>doInBackground</code>, * <code>onProgressUpdate</code> and <code>onPostExecute</code>.</p> * * <div class="special reference"> * <h3>Developer Guides</h3> * <p>For more information about using tasks and threads, read the * <a href="{@docRoot}guide/topics/fundamentals/processes-and-threads.html">Processes and * Threads</a> developer guide.</p> * </div> * * <h2>Usage</h2> * <p>AsyncTask must be subclassed to be used. The subclass will override at least * one method ({@link #doInBackground}), and most often will override a * second one ({@link #onPostExecute}.)</p> * * <p>Here is an example of subclassing:</p> * <pre class="prettyprint"> * private class DownloadFilesTask extends AsyncTask&lt;URL, Integer, Long&gt; { * protected Long doInBackground(URL... urls) { * int count = urls.length; * long totalSize = 0; * for (int i = 0; i < count; i++) { * totalSize += Downloader.downloadFile(urls[i]); * publishProgress((int) ((i / (float) count) * 100)); * // Escape early if cancel() is called * if (isCancelled()) break; * } * return totalSize; * } * * protected void onProgressUpdate(Integer... progress) { * setProgressPercent(progress[0]); * } * * protected void onPostExecute(Long result) { * showDialog("Downloaded " + result + " bytes"); * } * } * </pre> * * <p>Once created, a task is executed very simply:</p> * <pre class="prettyprint"> * new DownloadFilesTask().execute(url1, url2, url3); * </pre> * * <h2>AsyncTask's generic types</h2> * <p>The three types used by an asynchronous task are the following:</p> * <ol> * <li><code>Params</code>, the type of the parameters sent to the task upon * execution.</li> * <li><code>Progress</code>, the type of the progress units published during * the background computation.</li> * <li><code>Result</code>, the type of the result of the background * computation.</li> * </ol> * <p>Not all types are always used by an asynchronous task. To mark a type as unused, * simply use the type {@link Void}:</p> * <pre> * private class MyTask extends AsyncTask&lt;Void, Void, Void&gt; { ... } * </pre> * * <h2>The 4 steps</h2> * <p>When an asynchronous task is executed, the task goes through 4 steps:</p> * <ol> * <li>{@link #onPreExecute()}, invoked on the UI thread before the task * is executed. This step is normally used to setup the task, for instance by * showing a progress bar in the user interface.</li> * <li>{@link #doInBackground}, invoked on the background thread * immediately after {@link #onPreExecute()} finishes executing. This step is used * to perform background computation that can take a long time. The parameters * of the asynchronous task are passed to this step. The result of the computation must * be returned by this step and will be passed back to the last step. This step * can also use {@link #publishProgress} to publish one or more units * of progress. These values are published on the UI thread, in the * {@link #onProgressUpdate} step.</li> * <li>{@link #onProgressUpdate}, invoked on the UI thread after a * call to {@link #publishProgress}. The timing of the execution is * undefined. This method is used to display any form of progress in the user * interface while the background computation is still executing. For instance, * it can be used to animate a progress bar or show logs in a text field.</li> * <li>{@link #onPostExecute}, invoked on the UI thread after the background * computation finishes. The result of the background computation is passed to * this step as a parameter.</li> * </ol> * * <h2>Cancelling a task</h2> * <p>A task can be cancelled at any time by invoking {@link #cancel(boolean)}. Invoking * this method will cause subsequent calls to {@link #isCancelled()} to return true. * After invoking this method, {@link #onCancelled(Object)}, instead of * {@link #onPostExecute(Object)} will be invoked after {@link #doInBackground(Object[])} * returns. To ensure that a task is cancelled as quickly as possible, you should always * check the return value of {@link #isCancelled()} periodically from * {@link #doInBackground(Object[])}, if possible (inside a loop for instance.)</p> * * <h2>Threading rules</h2> * <p>There are a few threading rules that must be followed for this class to * work properly:</p> * <ul> * <li>The AsyncTask class must be loaded on the UI thread. This is done * automatically as of {@link android.os.Build.VERSION_CODES#JELLY_BEAN}.</li> * <li>The task instance must be created on the UI thread.</li> * <li>{@link #execute} must be invoked on the UI thread.</li> * <li>Do not call {@link #onPreExecute()}, {@link #onPostExecute}, * {@link #doInBackground}, {@link #onProgressUpdate} manually.</li> * <li>The task can be executed only once (an exception will be thrown if * a second execution is attempted.)</li> * </ul> * * <h2>Memory observability</h2> * <p>AsyncTask guarantees that all callback calls are synchronized in such a way that the following * operations are safe without explicit synchronizations.</p> * <ul> * <li>Set member fields in the constructor or {@link #onPreExecute}, and refer to them * in {@link #doInBackground}. * <li>Set member fields in {@link #doInBackground}, and refer to them in * {@link #onProgressUpdate} and {@link #onPostExecute}. * </ul> * * <h2>Order of execution</h2> * <p>When first introduced, AsyncTasks were executed serially on a single background * thread. Starting with {@link android.os.Build.VERSION_CODES#DONUT}, this was changed * to a pool of threads allowing multiple tasks to operate in parallel. Starting with * {@link android.os.Build.VERSION_CODES#HONEYCOMB}, tasks are executed on a single * thread to avoid common application errors caused by parallel execution.</p> * <p>If you truly want parallel execution, you can invoke * {@link #executeOnExecutor(java.util.concurrent.Executor, Object[])} with * {@link #THREAD_POOL_EXECUTOR}.</p> */ public abstract class AsyncTask<Params, Progress, Result> { private static final String LOG_TAG = "AsyncTask"; private static final int CPU_COUNT = Runtime.getRuntime().availableProcessors(); private static final int CORE_POOL_SIZE = CPU_COUNT + 1; private static final int MAXIMUM_POOL_SIZE = CPU_COUNT * 2 + 1; private static final int KEEP_ALIVE = 1; private static final ThreadFactory sThreadFactory = new ThreadFactory() { private final AtomicInteger mCount = new AtomicInteger(1); public Thread newThread(@NonNull Runnable r) { return new Thread(r, "AsyncTask #" + mCount.getAndIncrement()); } }; private static final BlockingQueue<Runnable> sPoolWorkQueue = new LinkedBlockingQueue<>(128); /** * An {@link Executor} that can be used to execute tasks in parallel. */ public static final Executor THREAD_POOL_EXECUTOR = new ThreadPoolExecutor(CORE_POOL_SIZE, MAXIMUM_POOL_SIZE, KEEP_ALIVE, TimeUnit.SECONDS, sPoolWorkQueue, sThreadFactory); /** * An {@link Executor} that executes tasks one at a time in serial * order. This serialization is global to a particular process. */ public static final Executor SERIAL_EXECUTOR = new SerialExecutor(); private static final int MESSAGE_POST_RESULT = 0x1; private static final int MESSAGE_POST_PROGRESS = 0x2; private static final InternalHandler sHandler = new InternalHandler(); private static volatile Executor sDefaultExecutor = SERIAL_EXECUTOR; private final WorkerRunnable<Params, Result> mWorker; private final FutureTask<Result> mFuture; private volatile Status mStatus = Status.PENDING; private final AtomicBoolean mCancelled = new AtomicBoolean(); private final AtomicBoolean mTaskInvoked = new AtomicBoolean(); private static class SerialExecutor implements Executor { final ArrayDeque<Runnable> mTasks = new ArrayDeque<>(); Runnable mActive; public synchronized void execute(@NonNull final Runnable r) { mTasks.offer(new Runnable() { public void run() { try { r.run(); } finally { scheduleNext(); } } }); if (mActive == null) { scheduleNext(); } } protected synchronized void scheduleNext() { if ((mActive = mTasks.poll()) != null) { THREAD_POOL_EXECUTOR.execute(mActive); } } } /** * Indicates the current status of the task. Each status will be set only once * during the lifetime of a task. */ public enum Status { /** * Indicates that the task has not been executed yet. */ PENDING, /** * Indicates that the task is running. */ RUNNING, /** * Indicates that {@link AsyncTask#onPostExecute} has finished. */ FINISHED, } /** @hide Used to force static handler to be created. */ public static void init() { sHandler.getLooper(); } /** @hide */ public static void setDefaultExecutor(Executor exec) { sDefaultExecutor = exec; } /** * Creates a new asynchronous task. This constructor must be invoked on the UI thread. */ public AsyncTask() { mWorker = new WorkerRunnable<Params, Result>() { public Result call() throws Exception { mTaskInvoked.set(true); Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND); //noinspection unchecked return postResult(doInBackground(mParams)); } }; mFuture = new FutureTask<Result>(mWorker) { @Override protected void done() { try { postResultIfNotInvoked(get()); } catch (InterruptedException e) { android.util.Log.w(LOG_TAG, e); } catch (ExecutionException e) { throw new RuntimeException("An error occured while executing doInBackground()", e.getCause()); } catch (CancellationException e) { postResultIfNotInvoked(null); } } }; } private void postResultIfNotInvoked(Result result) { final boolean wasTaskInvoked = mTaskInvoked.get(); if (!wasTaskInvoked) { postResult(result); } } private Result postResult(Result result) { @SuppressWarnings("unchecked") Message message = sHandler.obtainMessage(MESSAGE_POST_RESULT, new AsyncTaskResult<>(this, result)); message.sendToTarget(); return result; } /** * Returns the current status of this task. * * @return The current status. */ public final Status getStatus() { return mStatus; } /** * Override this method to perform a computation on a background thread. The * specified parameters are the parameters passed to {@link #execute} * by the caller of this task. * * This method can call {@link #publishProgress} to publish updates * on the UI thread. * * @param params The parameters of the task. * * @return A result, defined by the subclass of this task. * * @see #onPreExecute() * @see #onPostExecute * @see #publishProgress */ protected abstract Result doInBackground(Params... params); /** * Runs on the UI thread before {@link #doInBackground}. * * @see #onPostExecute * @see #doInBackground */ protected void onPreExecute() { } /** * <p>Runs on the UI thread after {@link #doInBackground}. The * specified result is the value returned by {@link #doInBackground}.</p> * * <p>This method won't be invoked if the task was cancelled.</p> * * @param result The result of the operation computed by {@link #doInBackground}. * * @see #onPreExecute * @see #doInBackground * @see #onCancelled(Object) */ @SuppressWarnings({"UnusedDeclaration"}) protected void onPostExecute(Result result) { } /** * Runs on the UI thread after {@link #publishProgress} is invoked. * The specified values are the values passed to {@link #publishProgress}. * * @param values The values indicating progress. * * @see #publishProgress * @see #doInBackground */ @SuppressWarnings({"UnusedDeclaration"}) protected void onProgressUpdate(Progress... values) { } /** * <p>Runs on the UI thread after {@link #cancel(boolean)} is invoked and * {@link #doInBackground(Object[])} has finished.</p> * * <p>The default implementation simply invokes {@link #onCancelled()} and * ignores the result. If you write your own implementation, do not call * <code>super.onCancelled(result)</code>.</p> * * @param result The result, if any, computed in * {@link #doInBackground(Object[])}, can be null * * @see #cancel(boolean) * @see #isCancelled() */ @SuppressWarnings({"UnusedParameters"}) protected void onCancelled(Result result) { onCancelled(); } /** * <p>Applications should preferably override {@link #onCancelled(Object)}. * This method is invoked by the default implementation of * {@link #onCancelled(Object)}.</p> * * <p>Runs on the UI thread after {@link #cancel(boolean)} is invoked and * {@link #doInBackground(Object[])} has finished.</p> * * @see #onCancelled(Object) * @see #cancel(boolean) * @see #isCancelled() */ protected void onCancelled() { } /** * Returns <tt>true</tt> if this task was cancelled before it completed * normally. If you are calling {@link #cancel(boolean)} on the task, * the value returned by this method should be checked periodically from * {@link #doInBackground(Object[])} to end the task as soon as possible. * * @return <tt>true</tt> if task was cancelled before it completed * * @see #cancel(boolean) */ public final boolean isCancelled() { return mCancelled.get(); } /** * <p>Attempts to cancel execution of this task. This attempt will * fail if the task has already completed, already been cancelled, * or could not be cancelled for some other reason. If successful, * and this task has not started when <tt>cancel</tt> is called, * this task should never run. If the task has already started, * then the <tt>mayInterruptIfRunning</tt> parameter determines * whether the thread executing this task should be interrupted in * an attempt to stop the task.</p> * * <p>Calling this method will result in {@link #onCancelled(Object)} being * invoked on the UI thread after {@link #doInBackground(Object[])} * returns. Calling this method guarantees that {@link #onPostExecute(Object)} * is never invoked. After invoking this method, you should check the * value returned by {@link #isCancelled()} periodically from * {@link #doInBackground(Object[])} to finish the task as early as * possible.</p> * * @param mayInterruptIfRunning <tt>true</tt> if the thread executing this * task should be interrupted; otherwise, in-progress tasks are allowed * to complete. * * @return <tt>false</tt> if the task could not be cancelled, * typically because it has already completed normally; * <tt>true</tt> otherwise * * @see #isCancelled() * @see #onCancelled(Object) */ public final boolean cancel(boolean mayInterruptIfRunning) { mCancelled.set(true); return mFuture.cancel(mayInterruptIfRunning); } /** * Waits if necessary for the computation to complete, and then * retrieves its result. * * @return The computed result. * * @throws CancellationException If the computation was cancelled. * @throws ExecutionException If the computation threw an exception. * @throws InterruptedException If the current thread was interrupted * while waiting. */ public final Result get() throws InterruptedException, ExecutionException { return mFuture.get(); } /** * Waits if necessary for at most the given time for the computation * to complete, and then retrieves its result. * * @param timeout Time to wait before cancelling the operation. * @param unit The time unit for the timeout. * * @return The computed result. * * @throws CancellationException If the computation was cancelled. * @throws ExecutionException If the computation threw an exception. * @throws InterruptedException If the current thread was interrupted * while waiting. * @throws TimeoutException If the wait timed out. */ public final Result get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { return mFuture.get(timeout, unit); } /** * Executes the task with the specified parameters. The task returns * itself (this) so that the caller can keep a reference to it. * * <p>Note: this function schedules the task on a queue for a single background * thread or pool of threads depending on the platform version. When first * introduced, AsyncTasks were executed serially on a single background thread. * Starting with {@link android.os.Build.VERSION_CODES#DONUT}, this was changed * to a pool of threads allowing multiple tasks to operate in parallel. Starting * {@link android.os.Build.VERSION_CODES#HONEYCOMB}, tasks are back to being * executed on a single thread to avoid common application errors caused * by parallel execution. If you truly want parallel execution, you can use * the {@link #executeOnExecutor} version of this method * with {@link #THREAD_POOL_EXECUTOR}; however, see commentary there for warnings * on its use. * * <p>This method must be invoked on the UI thread. * * @param params The parameters of the task. * * @return This instance of AsyncTask. * * @throws IllegalStateException If {@link #getStatus()} returns either * {@link AsyncTask.Status#RUNNING} or {@link AsyncTask.Status#FINISHED}. * * @see #executeOnExecutor(java.util.concurrent.Executor, Object[]) * @see #execute(Runnable) */ public final AsyncTask<Params, Progress, Result> execute(Params... params) { return executeOnExecutor(sDefaultExecutor, params); } /** * Executes the task with the specified parameters. The task returns * itself (this) so that the caller can keep a reference to it. * * <p>This method is typically used with {@link #THREAD_POOL_EXECUTOR} to * allow multiple tasks to run in parallel on a pool of threads managed by * AsyncTask, however you can also use your own {@link Executor} for custom * behavior. * * <p><em>Warning:</em> Allowing multiple tasks to run in parallel from * a thread pool is generally <em>not</em> what one wants, because the order * of their operation is not defined. For example, if these tasks are used * to modify any state in common (such as writing a file due to a button click), * there are no guarantees on the order of the modifications. * Without careful work it is possible in rare cases for the newer version * of the data to be over-written by an older one, leading to obscure data * loss and stability issues. Such changes are best * executed in serial; to guarantee such work is serialized regardless of * platform version you can use this function with {@link #SERIAL_EXECUTOR}. * * <p>This method must be invoked on the UI thread. * * @param exec The executor to use. {@link #THREAD_POOL_EXECUTOR} is available as a * convenient process-wide thread pool for tasks that are loosely coupled. * @param params The parameters of the task. * * @return This instance of AsyncTask. * * @throws IllegalStateException If {@link #getStatus()} returns either * {@link AsyncTask.Status#RUNNING} or {@link AsyncTask.Status#FINISHED}. * * @see #execute(Object[]) */ public final AsyncTask<Params, Progress, Result> executeOnExecutor(Executor exec, Params... params) { if (mStatus != Status.PENDING) { switch (mStatus) { case RUNNING: throw new IllegalStateException("Cannot execute task:" + " the task is already running."); case FINISHED: throw new IllegalStateException("Cannot execute task:" + " the task has already been executed " + "(a task can be executed only once)"); } } mStatus = Status.RUNNING; onPreExecute(); mWorker.mParams = params; exec.execute(mFuture); return this; } /** * Convenience version of {@link #execute(Object...)} for use with * a simple Runnable object. See {@link #execute(Object[])} for more * information on the order of execution. * * @see #execute(Object[]) * @see #executeOnExecutor(java.util.concurrent.Executor, Object[]) */ public static void execute(Runnable runnable) { sDefaultExecutor.execute(runnable); } /** * This method can be invoked from {@link #doInBackground} to * publish updates on the UI thread while the background computation is * still running. Each call to this method will trigger the execution of * {@link #onProgressUpdate} on the UI thread. * * {@link #onProgressUpdate} will note be called if the task has been * canceled. * * @param values The progress values to update the UI with. * * @see #onProgressUpdate * @see #doInBackground */ protected final void publishProgress(Progress... values) { if (!isCancelled()) { sHandler.obtainMessage(MESSAGE_POST_PROGRESS, new AsyncTaskResult<>(this, values)).sendToTarget(); } } private void finish(Result result) { if (isCancelled()) { onCancelled(result); } else { onPostExecute(result); } mStatus = Status.FINISHED; } private static class InternalHandler extends Handler { @SuppressWarnings({"unchecked", "RawUseOfParameterizedType"}) @Override public void handleMessage(Message msg) { AsyncTaskResult result = (AsyncTaskResult) msg.obj; switch (msg.what) { case MESSAGE_POST_RESULT: // There is only one result result.mTask.finish(result.mData[0]); break; case MESSAGE_POST_PROGRESS: result.mTask.onProgressUpdate(result.mData); break; } } } private static abstract class WorkerRunnable<Params, Result> implements Callable<Result> { Params[] mParams; } @SuppressWarnings({"RawUseOfParameterizedType"}) private static class AsyncTaskResult<Data> { final AsyncTask mTask; final Data[] mData; AsyncTaskResult(AsyncTask task, Data... data) { mTask = task; mData = data; } } }
reproio/apps-android-wikipedia
wikipedia/src/main/java/org/wikipedia/concurrency/AsyncTask.java
Java
apache-2.0
26,978
package cn.edu.hhu.reg.vo; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.Table; @Entity @Table(name="doctor_login") public class DoctorLogin { @Id @GeneratedValue(strategy=GenerationType.IDENTITY) @Column(length = 16) private Integer id; /** * 医生id */ @Column(name="doctor_id",length=16) private Integer doctorId; /** * 医生登录名 */ @Column(name="login_name",length=50) private String loginName; /** * 医生登录密码 */ @Column(name="password",length=50) private String password; public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public Integer getDoctorId() { return doctorId; } public void setDoctorId(Integer doctorId) { this.doctorId = doctorId; } public String getLoginName() { return loginName; } public void setLoginName(String loginName) { this.loginName = loginName; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public DoctorLogin() { } }
pqpo/registration_api
src/cn/edu/hhu/reg/vo/DoctorLogin.java
Java
apache-2.0
1,216
# remark-stringify [![Build][build-badge]][build] [![Coverage][coverage-badge]][coverage] [![Downloads][downloads-badge]][downloads] [![Size][size-badge]][size] [![Chat][chat-badge]][chat] [![Sponsors][sponsors-badge]][collective] [![Backers][backers-badge]][collective] [Compiler][] for [**unified**][unified]. Serializes [**mdast**][mdast] syntax trees to Markdown. Used in the [**remark** processor][remark] but can be used on its own as well. Can be [extended][extend] to change how Markdown is serialized. ## Install [npm][]: ```sh npm install remark-stringify ``` ## Use ```js var unified = require('unified') var createStream = require('unified-stream') var html = require('rehype-parse') var rehype2remark = require('rehype-remark') var stringify = require('remark-stringify') var processor = unified().use(html).use(rehype2remark).use(stringify, { bullet: '*', fence: '~', fences: true, incrementListMarker: false }) process.stdin.pipe(createStream(processor)).pipe(process.stdout) ``` [See **unified** for more examples »][unified] ## API [See **unified** for API docs »][unified] ### `processor().use(stringify[, options])` Configure the `processor` to serialize [**mdast**][mdast] syntax trees to Markdown. ###### `options` Options can be passed directly, or passed later through [`processor.data()`][data]. All the formatting options of [`mdast-util-to-markdown`][to-markdown-options] are supported and will be passed through. ## Extending the compiler See [`mdast-util-to-markdown`][to-markdown]. Then create a wrapper plugin such as [`remark-gfm`][remark-gfm]. ## Security `remark-stringify` will do its best to serialize markdown to match the syntax tree, but there are several cases where that is impossible. It’ll do its best, but complete roundtripping is impossible given that any value could be injected into the tree. As Markdown is sometimes used for HTML, and improper use of HTML can open you up to a [cross-site scripting (XSS)][xss] attack, use of `remark-stringify` and parsing it again later can potentially be unsafe. When parsing Markdown afterwards, use remark in combination with the [**rehype**][rehype] ecosystem, and use [`rehype-sanitize`][sanitize] to make the tree safe. Use of remark plugins could also open you up to other attacks. Carefully assess each plugin and the risks involved in using them. ## Contribute See [`contributing.md`][contributing] in [`remarkjs/.github`][health] for ways to get started. See [`support.md`][support] for ways to get help. Ideas for new plugins and tools can be posted in [`remarkjs/ideas`][ideas]. A curated list of awesome remark resources can be found in [**awesome remark**][awesome]. This project has a [code of conduct][coc]. By interacting with this repository, organization, or community you agree to abide by its terms. ## Sponsor Support this effort and give back by sponsoring on [OpenCollective][collective]! <!--lint ignore no-html--> <table> <tr valign="middle"> <td width="20%" align="center" colspan="2"> <a href="https://www.gatsbyjs.org">Gatsby</a> 🥇<br><br> <a href="https://www.gatsbyjs.org"><img src="https://avatars1.githubusercontent.com/u/12551863?s=256&v=4" width="128"></a> </td> <td width="20%" align="center" colspan="2"> <a href="https://vercel.com">Vercel</a> 🥇<br><br> <a href="https://vercel.com"><img src="https://avatars1.githubusercontent.com/u/14985020?s=256&v=4" width="128"></a> </td> <td width="20%" align="center" colspan="2"> <a href="https://www.netlify.com">Netlify</a><br><br> <!--OC has a sharper image--> <a href="https://www.netlify.com"><img src="https://images.opencollective.com/netlify/4087de2/logo/256.png" width="128"></a> </td> <td width="10%" align="center"> <a href="https://www.holloway.com">Holloway</a><br><br> <a href="https://www.holloway.com"><img src="https://avatars1.githubusercontent.com/u/35904294?s=128&v=4" width="64"></a> </td> <td width="10%" align="center"> <a href="https://themeisle.com">ThemeIsle</a><br><br> <a href="https://themeisle.com"><img src="https://avatars1.githubusercontent.com/u/58979018?s=128&v=4" width="64"></a> </td> <td width="10%" align="center"> <a href="https://boosthub.io">Boost Hub</a><br><br> <a href="https://boosthub.io"><img src="https://images.opencollective.com/boosthub/6318083/logo/128.png" width="64"></a> </td> <td width="10%" align="center"> <a href="https://expo.io">Expo</a><br><br> <a href="https://expo.io"><img src="https://avatars1.githubusercontent.com/u/12504344?s=128&v=4" width="64"></a> </td> </tr> <tr valign="middle"> <td width="100%" align="center" colspan="10"> <br> <a href="https://opencollective.com/unified"><strong>You?</strong></a> <br><br> </td> </tr> </table> ## License [MIT][license] © [Titus Wormer][author] <!-- Definitions --> [build-badge]: https://img.shields.io/travis/remarkjs/remark.svg [build]: https://travis-ci.org/remarkjs/remark [coverage-badge]: https://img.shields.io/codecov/c/github/remarkjs/remark.svg [coverage]: https://codecov.io/github/remarkjs/remark [downloads-badge]: https://img.shields.io/npm/dm/remark-stringify.svg [downloads]: https://www.npmjs.com/package/remark-stringify [size-badge]: https://img.shields.io/bundlephobia/minzip/remark-stringify.svg [size]: https://bundlephobia.com/result?p=remark-stringify [sponsors-badge]: https://opencollective.com/unified/sponsors/badge.svg [backers-badge]: https://opencollective.com/unified/backers/badge.svg [collective]: https://opencollective.com/unified [chat-badge]: https://img.shields.io/badge/chat-discussions-success.svg [chat]: https://github.com/remarkjs/remark/discussions [health]: https://github.com/remarkjs/.github [contributing]: https://github.com/remarkjs/.github/blob/HEAD/contributing.md [support]: https://github.com/remarkjs/.github/blob/HEAD/support.md [coc]: https://github.com/remarkjs/.github/blob/HEAD/code-of-conduct.md [ideas]: https://github.com/remarkjs/ideas [awesome]: https://github.com/remarkjs/awesome-remark [license]: https://github.com/remarkjs/remark/blob/main/license [author]: https://wooorm.com [npm]: https://docs.npmjs.com/cli/install [unified]: https://github.com/unifiedjs/unified [data]: https://github.com/unifiedjs/unified#processordatakey-value [remark]: https://github.com/remarkjs/remark/tree/main/packages/remark [compiler]: https://github.com/unifiedjs/unified#processorcompiler [mdast]: https://github.com/syntax-tree/mdast [xss]: https://en.wikipedia.org/wiki/Cross-site_scripting [rehype]: https://github.com/rehypejs/rehype [sanitize]: https://github.com/rehypejs/rehype-sanitize [to-markdown]: https://github.com/syntax-tree/mdast-util-to-markdown [to-markdown-options]: https://github.com/syntax-tree/mdast-util-to-markdown#formatting-options [extend]: #extending-the-compiler [remark-gfm]: https://github.com/remarkjs/remark-gfm
kyleterry/sufr
pkg/ui/node_modules/remark-stringify/readme.md
Markdown
apache-2.0
6,900
/* * Copyright 2018 Aleksander Jagiełło * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package pl.themolka.arcade.team; import org.bukkit.ChatColor; import pl.themolka.arcade.command.CommandException; import pl.themolka.arcade.command.CommandUtils; import pl.themolka.arcade.command.Sender; import pl.themolka.arcade.game.GamePlayer; import pl.themolka.arcade.match.Observers; import pl.themolka.arcade.parser.Context; import pl.themolka.arcade.util.Color; import java.util.ArrayList; import java.util.Collection; public class TeamCommands { private final TeamsGame game; public TeamCommands(TeamsGame game) { this.game = game; } // // Commands // public void clearCommand(Sender sender, String teamId) { Team team = this.fetchTeam(teamId); if (team.isObservers()) { throw new CommandException("Cannot clear observers."); } Observers observers = this.game.getMatch().getObservers(); int result = 0; for (GamePlayer player : new ArrayList<>(team.getOnlineMembers())) { observers.joinForce(player); result++; } if (result > 0) { sender.sendSuccess(team.getName() + " has been cleared (" + result + " players) and moved to " + observers.getName() + "."); } else { sender.sendError("No players to clear."); } } public void forceCommand(Sender sender, String username, String teamId) { GamePlayer player = this.fetchPlayer(username); Team team = this.fetchTeam(teamId); if (team.contains(player)) { throw new CommandException(player.getUsername() + " is already member of " + team.getName() + "."); } team.joinForce(player); sender.sendSuccess(player.getUsername() + " has been moved to " + team.getName() + "."); } public void friendlyCommand(Sender sender, String teamId, boolean friendly) { Team team = this.fetchTeam(teamId); if (team.isObservers()) { throw new CommandException("Cannot edit observers."); } if (friendly == team.isFriendlyFire()) { if (friendly) { throw new CommandException(team.getName() + " is already in friendly-fire."); } else { throw new CommandException(team.getName() + " is already not in friendly-fire"); } } Team oldState = new Team(team); team.setFriendlyFire(friendly); this.callEditEvent(team, oldState, TeamEditEvent.Reason.FRIENDLY_FIRE); if (friendly) { sender.sendSuccess(oldState.getName() + " is now in friendly-fire."); } else { sender.sendSuccess(oldState.getName() + " is now not in friendly-fire."); } } public void infoCommand(Sender sender) { Collection<Team> teams = this.game.getTeams(); CommandUtils.sendTitleMessage(sender, "Teams", Integer.toString(teams.size())); for (Team team : teams) { sender.send(String.format("%s - %s/%s - %s minimal to play and %s overfill", team.getPrettyName() + ChatColor.GRAY, ChatColor.GOLD.toString() + team.getOnlineMembers().size() + ChatColor.GRAY, Integer.toString(team.getSlots()), ChatColor.GREEN.toString() + team.getMinPlayers() + ChatColor.GRAY, ChatColor.RED.toString() + team.getMaxPlayers() + ChatColor.GRAY)); } } public void kickCommand(Sender sender, String username) { GamePlayer player = this.fetchPlayer(username); Team team = this.game.getTeam(player); if (team.isObservers()) { throw new CommandException("Cannot kick from observers."); } team.leaveForce(player); team.getMatch().getObservers().joinForce(player); sender.sendSuccess(player.getUsername() + " has been kicked from " + team.getName() + "."); } public void minCommand(Sender sender, String teamId, int min) { Team team = this.fetchTeam(teamId); if (team.isObservers()) { throw new CommandException("Cannot edit observers."); } else if (min < 0) { throw new CommandException("Number cannot be negative."); } Team oldState = new Team(team); team.setMinPlayers(min); this.callEditEvent(team, oldState, TeamEditEvent.Reason.MIN_PLAYERS); sender.sendSuccess(oldState.getName() + " has been edited."); } public void overfillCommand(Sender sender, String teamId, int overfill) { Team team = this.fetchTeam(teamId); if (team.isObservers()) { throw new CommandException("Cannot edit observers."); } // set to unlimited if zero or negative int max = Integer.MAX_VALUE; if (overfill > 0) { max = overfill; } Team oldState = new Team(team); team.setMaxPlayers(max); if (max > team.getSlots()) { team.setSlots(max); // slots } this.callEditEvent(team, oldState, TeamEditEvent.Reason.MAX_PLAYERS); sender.sendSuccess(oldState.getName() + " has been edited."); } public void paintCommand(Sender sender, String teamId, String paint) { Team team = this.fetchTeam(teamId); ChatColor color = Color.parseChat(new Context(this.game.getPlugin()), paint); if (color == null) { StringBuilder colors = new StringBuilder(); for (int i = 0; i < ChatColor.values().length; i++) { ChatColor value = ChatColor.values()[i]; if (i != 0) { colors.append(", "); } ChatColor result = ChatColor.RED; if (!value.equals(ChatColor.MAGIC)) { result = value; } colors.append(result).append(value.name().toLowerCase().replace("_", "-")) .append(ChatColor.RESET).append(ChatColor.RED); } throw new CommandException("Available colors: " + colors.toString() + "."); } Team oldState = new Team(team); team.setChatColor(color); this.callEditEvent(team, oldState, TeamEditEvent.Reason.PAINT); sender.sendSuccess(oldState.getName() + " has been painted from " + oldState.getChatColor().name().toLowerCase().replace("_", "-") + " to " + team.getChatColor().name().toLowerCase().replace("_", "-") + "."); } public void renameCommand(Sender sender, String teamId, String name) { Team team = this.fetchTeam(teamId); if (name == null) { throw new CommandException("New name not given."); } else if (name.length() > Team.NAME_MAX_LENGTH) { throw new CommandException("Name too long (greater than " + Team.NAME_MAX_LENGTH + " characters)."); } else if (team.getName().equals(name)) { throw new CommandException("Already named '" + team.getName() + "'."); } Team oldState = new Team(team); team.setName(name); this.callEditEvent(team, oldState, TeamEditEvent.Reason.RENAME); sender.sendSuccess(oldState.getName() + " has been renamed to " + team.getName() + "."); } public void slotsCommand(Sender sender, String teamId, int slots) { Team team = this.fetchTeam(teamId); if (team.isObservers()) { throw new CommandException("Cannot edit observers."); } // set to unlimited if zero or negative int max = Integer.MAX_VALUE; if (slots > 0) { max = slots; } Team oldState = new Team(team); team.setSlots(max); if (max > team.getMaxPlayers()) { team.setMaxPlayers(max); // overfill } this.callEditEvent(team, oldState, TeamEditEvent.Reason.SLOTS); sender.sendSuccess(oldState.getName() + " has been edited."); } // // Command Utilities // private void callEditEvent(Team newState, Team oldState, TeamEditEvent.Reason reason) { this.game.getPlugin().getEventBus().publish(new TeamEditEvent( this.game.getPlugin(), newState, oldState, reason)); } private GamePlayer fetchPlayer(String player) { if (player != null && !player.isEmpty()) { GamePlayer result = this.game.getGame().findPlayer(player); if (result != null) { return result; } } throw new CommandException("Player not found."); } private Team fetchTeam(String team) { if (team != null && !team.isEmpty()) { Team result = this.game.findTeamById(team); if (result != null) { return result; } } throw new CommandException("Team not found."); } }
ShootGame/Arcade2
src/main/java/pl/themolka/arcade/team/TeamCommands.java
Java
apache-2.0
9,514
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (version 1.7.0_25) on Tue Oct 08 12:24:28 JST 2013 --> <meta http-equiv="Content-Type" content="text/html" charset="UTF-8"> <title>Uses of Class twitter4j.examples.geo.CreatePlace (twitter4j-examples 3.0.4 API)</title> <meta name="date" content="2013-10-08"> <link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style"> </head> <body> <script type="text/javascript"><!-- if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Class twitter4j.examples.geo.CreatePlace (twitter4j-examples 3.0.4 API)"; } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar_top"> <!-- --> </a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../twitter4j/examples/geo/CreatePlace.html" title="class in twitter4j.examples.geo">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../index-all.html">Index</a></li> <li><a href="../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../index.html?twitter4j/examples/geo/class-use/CreatePlace.html" target="_top">Frames</a></li> <li><a href="CreatePlace.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Class twitter4j.examples.geo.CreatePlace" class="title">Uses of Class<br>twitter4j.examples.geo.CreatePlace</h2> </div> <div class="classUseContainer">No usage of twitter4j.examples.geo.CreatePlace</div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar_bottom"> <!-- --> </a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../twitter4j/examples/geo/CreatePlace.html" title="class in twitter4j.examples.geo">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../index-all.html">Index</a></li> <li><a href="../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../index.html?twitter4j/examples/geo/class-use/CreatePlace.html" target="_top">Frames</a></li> <li><a href="CreatePlace.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &#169; 2013. All Rights Reserved.</small></p> </body> </html>
vaglucas/cafeUnoesc
twitter4j-examples/javadoc/twitter4j/examples/geo/class-use/CreatePlace.html
HTML
apache-2.0
4,269
package problems; import java.util.Arrays; import java.util.PriorityQueue; /** * Leetcode: Super Ugly Number * Created by alan on 2/24/2016. */ public class SuperUglyNumber { class Node implements Comparable<Node> { int val; final int prime_index; public Node(int value, int prime_idx) { this.val = value; this.prime_index = prime_idx; } public int compareTo(Node a) { return this.val - a.val; } } public int[] nthSuperUglyNumber(int n, int[] primes) { int[] nums = new int[n]; nums[0] = 1; int[] index = new int[primes.length]; PriorityQueue<Node> pq = new PriorityQueue<>(); for (int i = 0; i < primes.length; i++) pq.add(new Node(primes[i], i)); for (int i = 1; i < n; i++) { Node node = pq.poll(); while (node.val == nums[i - 1]) { node.val = nums[++index[node.prime_index]] * primes[node.prime_index]; pq.add(node); node = pq.poll(); } nums[i] = node.val; node.val = nums[++index[node.prime_index]] * primes[node.prime_index]; pq.add(node); } return nums; } public static void main(String[] args) { SuperUglyNumber sn = new SuperUglyNumber(); int[] primes = {2, 7, 13, 19}; System.out.println(Arrays.toString(primes)); System.out.println(Arrays.toString(sn.nthSuperUglyNumber(12, primes))); } }
alyiwang/LeetCode
src/problems/SuperUglyNumber.java
Java
apache-2.0
1,548
/* * Copyright 2021 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package services import models.autocomplete.{CountryDataProvider, NameValuePair} import org.scalatestplus.mockito.MockitoSugar import org.scalatestplus.play.PlaySpec class AutoCompleteServiceSpec extends PlaySpec with MockitoSugar { trait Fixture { val locations = Seq( NameValuePair("Location 1", "location:1"), NameValuePair("Location 2", "location:2") ) val service = new AutoCompleteService(new CountryDataProvider { override def fetch: Option[Seq[NameValuePair]] = Some(locations) }) } "getLocations" must { "return a list of locations loaded from a resource file" in new Fixture { service.getCountries mustBe Some(locations) } } }
hmrc/amls-frontend
test/services/AutoCompleteServiceSpec.scala
Scala
apache-2.0
1,305
package yuku.alkitab.base.util; import android.app.Activity; import android.app.Dialog; import android.content.Intent; import android.database.Cursor; import android.database.DatabaseUtils; import android.database.sqlite.SQLiteDatabase; import android.os.AsyncTask; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.util.Xml; import com.afollestad.materialdialogs.MaterialDialog; import gnu.trove.list.TIntList; import gnu.trove.list.array.TIntArrayList; import gnu.trove.map.hash.TIntLongHashMap; import gnu.trove.map.hash.TIntObjectHashMap; import gnu.trove.map.hash.TObjectIntHashMap; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.xml.sax.ext.DefaultHandler2; import yuku.alkitab.base.App; import yuku.alkitab.base.IsiActivity; import yuku.alkitab.base.S; import yuku.alkitab.base.storage.Db; import yuku.alkitab.base.storage.InternalDb; import yuku.alkitab.debug.R; import yuku.alkitab.model.Label; import yuku.alkitab.model.Marker; import yuku.alkitab.model.Marker_Label; import java.io.InputStream; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import static yuku.alkitab.base.util.Literals.ToStringArray; // Imported from v3. Used for once-only migration from v3 to v4. public class BookmarkImporter { static final String TAG = BookmarkImporter.class.getSimpleName(); // constants static class Bookmark2_Label { // DO NOT CHANGE CONSTANT VALUES! public static final String XMLTAG_Bookmark2_Label = "Bukmak2_Label"; public static final String XMLATTR_bookmark2_relId = "bukmak2_relId"; public static final String XMLATTR_label_relId = "label_relId"; } // constants static class BackupManager { public static final String XMLTAG_Bukmak2 = "Bukmak2"; private static final String XMLATTR_ari = "ari"; private static final String XMLATTR_kind = "jenis"; private static final String XMLATTR_caption = "tulisan"; private static final String XMLATTR_addTime = "waktuTambah"; private static final String XMLATTR_modifyTime = "waktuUbah"; private static final String XMLATTR_relId = "relId"; private static final String XMLVAL_bookmark = "bukmak"; private static final String XMLVAL_note = "catatan"; private static final String XMLVAL_highlight = "stabilo"; public static final String XMLTAG_Label = "Label"; private static final String XMLATTR_title = "judul"; private static final String XMLATTR_bgColor = "warnaLatar"; @Nullable public static Marker markerFromAttributes(Attributes attributes) { int ari = Integer.parseInt(attributes.getValue("", XMLATTR_ari)); String kind_s = attributes.getValue("", XMLATTR_kind); Marker.Kind kind = kind_s.equals(XMLVAL_bookmark) ? Marker.Kind.bookmark : kind_s.equals(XMLVAL_note) ? Marker.Kind.note : kind_s.equals(XMLVAL_highlight) ? Marker.Kind.highlight : null; String caption = unescapeHighUnicode(attributes.getValue("", XMLATTR_caption)); Date addTime = Sqlitil.toDate(Integer.parseInt(attributes.getValue("", XMLATTR_addTime))); Date modifyTime = Sqlitil.toDate(Integer.parseInt(attributes.getValue("", XMLATTR_modifyTime))); if (kind == null) { // invalid return null; } return Marker.createNewMarker(ari, kind, caption, 1, addTime, modifyTime); } public static int getRelId(Attributes attributes) { String s = attributes.getValue("", XMLATTR_relId); return s == null ? 0 : Integer.parseInt(s); } public static Label labelFromAttributes(Attributes attributes) { String title = unescapeHighUnicode(attributes.getValue("", XMLATTR_title)); String bgColor = attributes.getValue("", XMLATTR_bgColor); return Label.createNewLabel(title, 0, bgColor); } static ThreadLocal<Matcher> highUnicodeMatcher = new ThreadLocal<Matcher>() { @Override protected Matcher initialValue() { return Pattern.compile("\\[\\[~U([0-9A-Fa-f]{6})~\\]\\]").matcher(""); } }; public static String unescapeHighUnicode(String input) { if (input == null) return null; final Matcher m = highUnicodeMatcher.get(); m.reset(input); StringBuffer res = new StringBuffer(); while (m.find()) { String s = m.group(1); final int cp = Integer.parseInt(s, 16); m.appendReplacement(res, new String(new int[]{cp}, 0, 1)); } m.appendTail(res); return res.toString(); } } public static void importBookmarks(final Activity activity, @NonNull final InputStream fis, final boolean finishActivityAfterwards, final Runnable runWhenDone) { final MaterialDialog pd = new MaterialDialog.Builder(activity) .content(R.string.mengimpor_titiktiga) .cancelable(false) .progress(true, 0) .show(); new AsyncTask<Boolean, Integer, Object>() { int count_bookmark = 0; int count_label = 0; @Override protected Object doInBackground(Boolean... params) { final List<Marker> markers = new ArrayList<>(); final TObjectIntHashMap<Marker> markerToRelIdMap = new TObjectIntHashMap<>(); final List<Label> labels = new ArrayList<>(); final TObjectIntHashMap<Label> labelToRelIdMap = new TObjectIntHashMap<>(); final TIntLongHashMap labelRelIdToAbsIdMap = new TIntLongHashMap(); final TIntObjectHashMap<TIntList> markerRelIdToLabelRelIdsMap = new TIntObjectHashMap<>(); try { Xml.parse(fis, Xml.Encoding.UTF_8, new DefaultHandler2() { @Override public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException { switch (localName) { case BackupManager.XMLTAG_Bukmak2: final Marker marker = BackupManager.markerFromAttributes(attributes); if (marker != null) { markers.add(marker); final int bookmark2_relId = BackupManager.getRelId(attributes); markerToRelIdMap.put(marker, bookmark2_relId); count_bookmark++; } break; case BackupManager.XMLTAG_Label: { final Label label = BackupManager.labelFromAttributes(attributes); int label_relId = BackupManager.getRelId(attributes); labels.add(label); labelToRelIdMap.put(label, label_relId); count_label++; break; } case Bookmark2_Label.XMLTAG_Bookmark2_Label: { final int bookmark2_relId = Integer.parseInt(attributes.getValue("", Bookmark2_Label.XMLATTR_bookmark2_relId)); final int label_relId = Integer.parseInt(attributes.getValue("", Bookmark2_Label.XMLATTR_label_relId)); TIntList labelRelIds = markerRelIdToLabelRelIdsMap.get(bookmark2_relId); if (labelRelIds == null) { labelRelIds = new TIntArrayList(); markerRelIdToLabelRelIdsMap.put(bookmark2_relId, labelRelIds); } labelRelIds.add(label_relId); break; } } } }); fis.close(); } catch (Exception e) { return e; } { // bikin label-label yang diperlukan, juga map relId dengan id dari label. final HashMap<String, Label> judulMap = new HashMap<>(); final List<Label> xlabelLama = S.getDb().listAllLabels(); for (Label labelLama : xlabelLama) { judulMap.put(labelLama.title, labelLama); } for (Label label : labels) { // cari apakah label yang judulnya persis sama udah ada Label labelLama = judulMap.get(label.title); final int labelRelId = labelToRelIdMap.get(label); if (labelLama != null) { // removed from v3: update warna label lama labelRelIdToAbsIdMap.put(labelRelId, labelLama._id); AppLog.d(TAG, "label (lama) r->a : " + labelRelId + "->" + labelLama._id); } else { // belum ada, harus bikin baru Label labelBaru = S.getDb().insertLabel(label.title, label.backgroundColor); labelRelIdToAbsIdMap.put(labelRelId, labelBaru._id); AppLog.d(TAG, "label (baru) r->a : " + labelRelId + "->" + labelBaru._id); } } } importBookmarks(markers, markerToRelIdMap, labelRelIdToAbsIdMap, markerRelIdToLabelRelIdsMap); return null; } @Override protected void onPostExecute(@NonNull Object result) { pd.dismiss(); if (result instanceof Exception) { AppLog.e(TAG, "Error when importing markers", (Throwable) result); new MaterialDialog.Builder(activity) .content(activity.getString(R.string.terjadi_kesalahan_ketika_mengimpor_pesan, ((Exception) result).getMessage())) .positiveText(R.string.ok) .show(); } else { final Dialog dialog = new MaterialDialog.Builder(activity) .content(activity.getString(R.string.impor_berhasil_angka_diproses, count_bookmark, count_label)) .positiveText(R.string.ok) .show(); if (finishActivityAfterwards) { dialog.setOnDismissListener(dialog1 -> activity.finish()); } } if (runWhenDone != null) runWhenDone.run(); } }.execute(); } public static void importBookmarks(List<Marker> markers, TObjectIntHashMap<Marker> markerToRelIdMap, TIntLongHashMap labelRelIdToAbsIdMap, TIntObjectHashMap<TIntList> markerRelIdToLabelRelIdsMap) { SQLiteDatabase db = S.getDb().getWritableDatabase(); db.beginTransaction(); try { final TIntObjectHashMap<Marker> markerRelIdToMarker = new TIntObjectHashMap<>(); { // write new markers (if not available yet) for (int i = 0; i < markers.size(); i++) { Marker marker = markers.get(i); final int marker_relId = markerToRelIdMap.get(marker); // migrate: look for existing marker with same kind, ari, and content try (Cursor cursor = db.query( Db.TABLE_Marker, null, Db.Marker.ari + "=? and " + Db.Marker.kind + "=? and " + Db.Marker.caption + "=?", ToStringArray(marker.ari, marker.kind.code, marker.caption), null, null, null )) { if (cursor.moveToNext()) { marker = InternalDb.markerFromCursor(cursor); markers.set(i, marker); } else { InternalDb.insertMarker(db, marker); } // map it markerRelIdToMarker.put(marker_relId, marker); } } } { // now is marker-label assignments for (final int marker_relId : markerRelIdToLabelRelIdsMap.keys()) { final TIntList label_relIds = markerRelIdToLabelRelIdsMap.get(marker_relId); final Marker marker = markerRelIdToMarker.get(marker_relId); if (marker != null) { // existing labels > 0: ignore // existing labels == 0: insert final int existing_label_count = (int) DatabaseUtils.queryNumEntries(db, Db.TABLE_Marker_Label, Db.Marker_Label.marker_gid + "=?", ToStringArray(marker.gid)); if (existing_label_count == 0) { for (int label_relId : label_relIds.toArray()) { final long label_id = labelRelIdToAbsIdMap.get(label_relId); if (label_id > 0) { final Label label = S.getDb().getLabelById(label_id); final Marker_Label marker_label = Marker_Label.createNewMarker_Label(marker.gid, label.gid); InternalDb.insertMarker_LabelIfNotExists(db, marker_label); } else { AppLog.w(TAG, "label_id is invalid!: " + label_id); } } } } else { AppLog.w(TAG, "wrong marker_relId: " + marker_relId); } } } db.setTransactionSuccessful(); } finally { db.endTransaction(); } App.getLbm().sendBroadcast(new Intent(IsiActivity.ACTION_ATTRIBUTE_MAP_CHANGED)); } }
infojulio/androidbible
Alkitab/src/main/java/yuku/alkitab/base/util/BookmarkImporter.java
Java
apache-2.0
11,461
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.core.region; import java.util.Set; import org.joda.beans.impl.flexi.FlexiBean; import org.threeten.bp.ZoneId; import com.opengamma.id.ExternalBundleIdentifiable; import com.opengamma.id.ExternalIdBundle; import com.opengamma.id.UniqueId; import com.opengamma.id.UniqueIdentifiable; import com.opengamma.util.PublicAPI; import com.opengamma.util.i18n.Country; import com.opengamma.util.money.Currency; /** * A region of the world. * <p> * Many aspects of business, algorithms and contracts are specific to a region. The region may be of any size, from a municipality to a super-national group. * <p> * This interface is read-only. Implementations may be mutable. */ @PublicAPI public interface Region extends UniqueIdentifiable, ExternalBundleIdentifiable { /** * Gets the unique identifier of the region. * <p> * This specifies a single version-correction of the region. * * @return the unique identifier for this region, not null within the engine */ @Override UniqueId getUniqueId(); /** * Gets the external identifier bundle that defines the region. * <p> * Each external system has one or more identifiers by which they refer to the region. * Some of these may be unique within that system, while others may be more descriptive. * This bundle stores the set of these external identifiers. * <p> * This will include the country, currency and time-zone. * * @return the bundle defining the region, not null */ @Override // override for Javadoc ExternalIdBundle getExternalIdBundle(); /** * Gets the classification of the region. * * @return the classification of region, such as SUPER_NATIONAL or INDEPENDENT_STATE, not null */ RegionClassification getClassification(); /** * Gets the unique identifiers of the regions that this region is a member of. For example, a country might be a member * of the World, UN, European Union and NATO. * * @return the parent unique identifiers, null if this is the root entry */ Set<UniqueId> getParentRegionIds(); /** * Gets the country. * * @return the country, null if not applicable */ Country getCountry(); /** * Gets the currency. * * @return the currency, null if not applicable */ Currency getCurrency(); /** * Gets the time-zone. For larger regions, there can be multiple time-zones, so this is only reliable for municipalities. * * @return the time-zone, null if not applicable */ ZoneId getTimeZone(); /** * Gets the short descriptive name of the region. * * @return the name of the region, not null */ String getName(); /** * Gets the full descriptive name of the region. * * @return the full name of the region, not null */ String getFullName(); /** * Gets the extensible data store for additional information. Applications may store additional region based information here. * * @return the additional data, not null */ FlexiBean getData(); }
McLeodMoores/starling
projects/core/src/main/java/com/opengamma/core/region/Region.java
Java
apache-2.0
3,168
var config = require('./lib/config'); var FaceRec = require('./lib/facerec').FaceRec; var hfr = new FaceRec(config); // constant var threshold = 20; var prevX; var prevY; setInterval(function() { var result = hfr.detect(); console.log('result:' + JSON.stringify(result)); if (result && result.pos_x && result.pos_y) { var newX = result.pos_x; var newY = result.pos_y; var deltaX = newX - prevX; var deltaY = newY - prevY; if (Math.abs(deltaX) > threshold) { var direction = deltaX > 0 ? "right" : "left"; console.log("moving head to " + direction + " , distance" + Math.abs(deltaX)); } if (Math.abs(deltaY) > threshold) { var direction = deltaY > 0 ? "down" : "up"; console.log("moving head to " + direction + " , distance" + Math.abs(deltaY)); } console.log('updating x and y'); prevX = newX; prevY = newY; } }, 5000);
project-humix/humix-facerec-module
test.js
JavaScript
apache-2.0
992
/* $Id$ * $URL: https://dev.almende.com/svn/abms/coala-common/src/main/java/com/almende/coala/time/NanoInstant.java $ * * Part of the EU project Adapt4EE, see http://www.adapt4ee.eu/ * * @license * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * * Copyright (c) 2010-2013 Almende B.V. */ package io.coala.time; /** * {@link NanoInstant} has the nano-second as base time unit * * @date $Date: 2014-06-03 14:26:09 +0200 (Tue, 03 Jun 2014) $ * @version $Revision: 296 $ * @author <a href="mailto:[email protected]">Rick</a> */ public class NanoInstant extends AbstractInstant<NanoInstant> { /** */ private static final long serialVersionUID = 1L; /** */ // private static final Logger LOG = LogUtil.getLogger(NanoInstant.class); /** */ // private static final TimeUnit BASE_UNIT = TimeUnit.NANOS; /** */ public static final NanoInstant ZERO = new NanoInstant(null, 0); /** * {@link NanoInstant} constructor * * @param value */ public NanoInstant(final ClockID clockID, final Number value) { super(clockID, value, TimeUnit.NANOS); } // /** // * {@link NanoInstant} constructor // * // * @param value // */ // public NanoInstant(final ClockID clockID, final Number value, // final TimeUnit unit) // { // super(clockID, value, unit); // } // // /** @see Instant#getBaseUnit() */ // @Override // public TimeUnit getBaseUnit() // { // return BASE_UNIT; // } /** @see Instant#toUnit(TimeUnit) */ @Override public NanoInstant toUnit(final TimeUnit unit) { throw new RuntimeException( "Can't convert NanoInstant to another TimeUnit"); } /** @see Instant#plus(Number) */ @Override public NanoInstant plus(final Number value) { return new NanoInstant(getClockID(), getValue().doubleValue() + value.doubleValue()); } }
krevelen/coala
coala-core/src/main/java/io/coala/time/NanoInstant.java
Java
apache-2.0
2,299
package cat.ereza.customactivityoncrash.activity; import android.app.Activity; import android.content.Intent; import android.os.Bundle; import cat.ereza.customactivityoncrash.CustomActivityOnCrash; /** * Created by zhy on 15/8/4. */ public class ClearStack extends Activity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Intent intent = getIntent().getParcelableExtra(CustomActivityOnCrash.KEY_CURRENT_INTENT); startActivity(intent); finish(); Runtime.getRuntime().exit(0); } }
hongyangAndroid/CustomActivityOnCrash
library/src/main/java/cat/ereza/customactivityoncrash/activity/ClearStack.java
Java
apache-2.0
596