text
stringlengths 4
5.48M
| meta
stringlengths 14
6.54k
|
---|---|
package brooklyn.entity.rebind;
import static brooklyn.test.EntityTestUtils.assertAttributeEquals;
import static brooklyn.test.EntityTestUtils.assertConfigEquals;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNotSame;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import java.io.File;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import brooklyn.config.ConfigKey;
import brooklyn.entity.Application;
import brooklyn.entity.Entity;
import brooklyn.entity.basic.AbstractEntity;
import brooklyn.entity.basic.ApplicationBuilder;
import brooklyn.entity.basic.BasicGroup;
import brooklyn.entity.basic.Entities;
import brooklyn.entity.basic.EntityLocal;
import brooklyn.entity.basic.EntityPredicates;
import brooklyn.entity.proxying.EntitySpec;
import brooklyn.entity.proxying.ImplementedBy;
import brooklyn.entity.rebind.RebindLocationTest.MyLocation;
import brooklyn.entity.trait.Startable;
import brooklyn.event.AttributeSensor;
import brooklyn.event.SensorEvent;
import brooklyn.event.SensorEventListener;
import brooklyn.event.basic.BasicAttributeSensor;
import brooklyn.event.basic.BasicConfigKey;
import brooklyn.event.basic.BasicSensorEvent;
import brooklyn.event.basic.DependentConfiguration;
import brooklyn.event.basic.Sensors;
import brooklyn.location.Location;
import brooklyn.management.internal.LocalManagementContext;
import brooklyn.mementos.EntityMemento;
import brooklyn.test.TestUtils;
import brooklyn.test.entity.TestApplication;
import brooklyn.test.entity.TestEntity;
import brooklyn.util.collections.MutableMap;
import brooklyn.util.exceptions.Exceptions;
import brooklyn.util.exceptions.RuntimeInterruptedException;
import brooklyn.util.flags.SetFromFlag;
import com.google.common.base.Objects;
import com.google.common.base.Predicates;
import com.google.common.base.Suppliers;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.io.Files;
public class RebindEntityTest {
// FIXME Add test about dependent configuration serialization?!
// TODO Convert, so not calling entity constructors
private static final long TIMEOUT_MS = 10*1000;
private ClassLoader classLoader = getClass().getClassLoader();
private LocalManagementContext origManagementContext;
private TestApplication origApp;
private TestApplication newApp;
private LocalManagementContext newManagementContext;
private File mementoDir;
@BeforeMethod(alwaysRun=true)
public void setUp() throws Exception {
mementoDir = Files.createTempDir();
origManagementContext = RebindTestUtils.newPersistingManagementContext(mementoDir, classLoader, 1);
origApp = ApplicationBuilder.newManagedApp(EntitySpec.create(TestApplication.class), origManagementContext);
}
@AfterMethod(alwaysRun=true)
public void tearDown() throws Exception {
if (origManagementContext != null) Entities.destroyAll(origManagementContext);
if (newApp != null) Entities.destroyAll(newApp.getManagementContext());
if (newManagementContext != null) Entities.destroyAll(newManagementContext);
if (mementoDir != null) RebindTestUtils.deleteMementoDir(mementoDir);
}
@Test
public void testRestoresSimpleApp() throws Exception {
newApp = rebind();
assertNotSame(newApp, origApp);
assertEquals(newApp.getId(), origApp.getId());
}
@Test
public void testRestoresEntityHierarchy() throws Exception {
TestEntity origE = origApp.createAndManageChild(EntitySpec.create(TestEntity.class));
TestEntity origE2 = origE.createAndManageChild(EntitySpec.create(TestEntity.class));
newApp = rebind();
// Assert has expected config/fields
assertEquals(newApp.getId(), origApp.getId());
assertEquals(newApp.getChildren().size(), 1);
TestEntity newE = (TestEntity) Iterables.get(newApp.getChildren(), 0);
assertEquals(newE.getId(), origE.getId());
assertEquals(newE.getChildren().size(), 1);
TestEntity newE2 = (TestEntity) Iterables.get(newE.getChildren(), 0);
assertEquals(newE2.getId(), origE2.getId());
assertNotSame(origApp, newApp);
assertNotSame(origApp.getManagementContext(), newApp.getManagementContext());
assertNotSame(origE, newE);
assertNotSame(origE2, newE2);
}
@Test
public void testRestoresGroupMembers() throws Exception {
MyEntity origE = origApp.createAndManageChild(EntitySpec.create(MyEntity.class));
MyEntity origE2 = origApp.createAndManageChild(EntitySpec.create(MyEntity.class));
BasicGroup origG = origApp.createAndManageChild(EntitySpec.create(BasicGroup.class));
origG.addMember(origE);
origG.addMember(origE2);
newApp = rebind();
BasicGroup newG = (BasicGroup) Iterables.find(newApp.getChildren(), Predicates.instanceOf(BasicGroup.class));
Iterable<Entity> newEs = Iterables.filter(newApp.getChildren(), Predicates.instanceOf(MyEntity.class));
assertEquals(ImmutableSet.copyOf(newG.getMembers()), ImmutableSet.copyOf(newEs));
}
@Test
public void testRestoresEntityConfig() throws Exception {
MyEntity origE = origApp.createAndManageChild(EntitySpec.create(MyEntity.class).configure("myconfig", "myval"));
newApp = rebind();
MyEntity newE = (MyEntity) Iterables.find(newApp.getChildren(), Predicates.instanceOf(MyEntity.class));
assertEquals(newE.getConfig(MyEntity.MY_CONFIG), "myval");
}
@Test
public void testRestoresEntityDependentConfigCompleted() throws Exception {
MyEntity origE = origApp.createAndManageChild(EntitySpec.create(MyEntity.class)
.configure("myconfig", DependentConfiguration.attributeWhenReady(origApp, TestApplication.MY_ATTRIBUTE)));
origApp.setAttribute(TestApplication.MY_ATTRIBUTE, "myval");
origE.getConfig(MyEntity.MY_CONFIG); // wait for it to be done
newApp = rebind();
MyEntity newE = (MyEntity) Iterables.find(newApp.getChildren(), Predicates.instanceOf(MyEntity.class));
assertEquals(newE.getConfig(MyEntity.MY_CONFIG), "myval");
}
@Test(enabled=false) // not yet supported
public void testRestoresEntityDependentConfigUncompleted() throws Exception {
MyEntity origE = origApp.createAndManageChild(EntitySpec.create(MyEntity.class)
.configure("myconfig", DependentConfiguration.attributeWhenReady(origApp, TestApplication.MY_ATTRIBUTE)));
newApp = rebind();
MyEntity newE = (MyEntity) Iterables.find(newApp.getChildren(), Predicates.instanceOf(MyEntity.class));
newApp.setAttribute(TestApplication.MY_ATTRIBUTE, "myval");
assertEquals(newE.getConfig(MyEntity.MY_CONFIG), "myval");
}
@Test
public void testRestoresEntitySensors() throws Exception {
AttributeSensor<String> myCustomAttribute = Sensors.newStringSensor("my.custom.attribute");
MyEntity origE = origApp.createAndManageChild(EntitySpec.create(MyEntity.class));
origE.setAttribute(myCustomAttribute, "myval");
newApp = rebind();
MyEntity newE = (MyEntity) Iterables.find(newApp.getChildren(), Predicates.instanceOf(MyEntity.class));
assertEquals(newE.getAttribute(myCustomAttribute), "myval");
}
@Test
public void testRestoresEntityIdAndDisplayName() throws Exception {
MyEntity origE = origApp.createAndManageChild(EntitySpec.create(MyEntity.class)
.displayName("mydisplayname")
.configure("iconUrl", "file:///tmp/myicon.png"));
String eId = origE.getId();
newApp = rebind();
MyEntity newE = (MyEntity) Iterables.find(newApp.getChildren(), Predicates.instanceOf(MyEntity.class));
assertEquals(newE.getId(), eId);
assertEquals(newE.getDisplayName(), "mydisplayname");
}
@Test
public void testCanCustomizeRebind() throws Exception {
MyEntity2 origE = origApp.createAndManageChild(EntitySpec.create(MyEntity2.class).configure("myfield", "myval"));
newApp = rebind();
MyEntity2 newE = (MyEntity2) Iterables.find(newApp.getChildren(), Predicates.instanceOf(MyEntity2.class));
assertEquals(newE.getMyfield(), "myval");
}
@Test
public void testRebindsSubscriptions() throws Exception {
MyEntity2 origE = origApp.createAndManageChild(EntitySpec.create(MyEntity2.class).configure("subscribe", true));
newApp = rebind();
MyEntity2 newE = (MyEntity2) Iterables.find(newApp.getChildren(), Predicates.instanceOf(MyEntity2.class));
newApp.setAttribute(TestApplication.MY_ATTRIBUTE, "mysensorval");
TestUtils.assertEventually(Suppliers.ofInstance(newE.getEvents()), Predicates.equalTo(ImmutableList.of("mysensorval")));
}
@Test
public void testHandlesReferencingOtherEntities() throws Exception {
MyEntity origOtherE = origApp.createAndManageChild(EntitySpec.create(MyEntity.class));
MyEntityReffingOthers origE = origApp.createAndManageChild(EntitySpec.create(MyEntityReffingOthers.class)
.configure("entityRef", origOtherE));
origE.setAttribute(MyEntityReffingOthers.ENTITY_REF_SENSOR, origOtherE);
newApp = rebind();
MyEntityReffingOthers newE = (MyEntityReffingOthers) Iterables.find(newApp.getChildren(), Predicates.instanceOf(MyEntityReffingOthers.class));
MyEntity newOtherE = (MyEntity) Iterables.find(newApp.getChildren(), Predicates.instanceOf(MyEntity.class));
assertAttributeEquals(newE, MyEntityReffingOthers.ENTITY_REF_SENSOR, newOtherE);
assertConfigEquals(newE, MyEntityReffingOthers.ENTITY_REF_CONFIG, newOtherE);
}
@Test
public void testHandlesReferencingOtherEntitiesInPojoField() throws Exception {
MyEntity origE = origApp.createAndManageChild(EntitySpec.create(MyEntity.class));
ReffingEntity reffer = new ReffingEntity();
reffer.obj = origE;
reffer.entity = origE;
reffer.myEntity = origE;
origApp.setConfig(TestEntity.CONF_OBJECT, reffer);
newApp = rebind(false);
MyEntity newE = (MyEntity) Iterables.find(newApp.getChildren(), Predicates.instanceOf(MyEntity.class));
ReffingEntity reffer2 = (ReffingEntity)newApp.getConfig(TestEntity.CONF_OBJECT);
assertEquals(reffer2.myEntity, newE);
assertEquals(reffer2.entity, newE);
assertEquals(reffer2.obj, newE);
}
public static class ReffingEntity {
public MyEntity myEntity;
public Entity entity;
public Object obj;
@Override
public boolean equals(Object o) {
return (o instanceof ReffingEntity) && Objects.equal(entity, ((ReffingEntity)o).entity) && Objects.equal(obj, ((ReffingEntity)o).obj);
}
@Override
public int hashCode() {
return Objects.hashCode(entity, obj);
}
}
@Test
public void testHandlesReferencingOtherLocations() throws Exception {
MyLocation origLoc = new MyLocation();
MyEntityReffingOthers origE = origApp.createAndManageChild(EntitySpec.create(MyEntityReffingOthers.class)
.configure("locationRef", origLoc));
origE.setAttribute(MyEntityReffingOthers.LOCATION_REF_SENSOR, origLoc);
origApp.start(ImmutableList.of(origLoc));
newApp = rebind();
MyEntityReffingOthers newE = (MyEntityReffingOthers) Iterables.find(newApp.getChildren(), Predicates.instanceOf(MyEntityReffingOthers.class));
MyLocation newLoc = (MyLocation) Iterables.getOnlyElement(newApp.getLocations());
assertAttributeEquals(newE, MyEntityReffingOthers.LOCATION_REF_SENSOR, newLoc);
assertConfigEquals(newE, MyEntityReffingOthers.LOCATION_REF_CONFIG, newLoc);
}
@Test
public void testEntityManagementLifecycleAndVisibilityDuringRebind() throws Exception {
MyLatchingEntityImpl.latching = false;
MyLatchingEntity origE = origApp.createAndManageChild(EntitySpec.create(MyLatchingEntity.class));
MyLatchingEntityImpl.reset(); // after origE has been managed
MyLatchingEntityImpl.latching = true;
// Serialize and rebind, but don't yet manage the app
RebindTestUtils.waitForPersisted(origApp);
RebindTestUtils.checkCurrentMementoSerializable(origApp);
newManagementContext = new LocalManagementContext();
Thread thread = new Thread() {
public void run() {
try {
RebindTestUtils.rebind(newManagementContext, mementoDir, getClass().getClassLoader());
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
};
try {
thread.start();
assertTrue(MyLatchingEntityImpl.reconstructStartedLatch.await(TIMEOUT_MS, TimeUnit.MILLISECONDS));
assertNull(newManagementContext.getEntityManager().getEntity(origApp.getId()));
assertNull(newManagementContext.getEntityManager().getEntity(origE.getId()));
assertTrue(MyLatchingEntityImpl.managingStartedLatch.getCount() > 0);
MyLatchingEntityImpl.reconstructContinuesLatch.countDown();
assertTrue(MyLatchingEntityImpl.managingStartedLatch.await(TIMEOUT_MS, TimeUnit.MILLISECONDS));
assertNotNull(newManagementContext.getEntityManager().getEntity(origApp.getId()));
assertNull(newManagementContext.getEntityManager().getEntity(origE.getId()));
assertTrue(MyLatchingEntityImpl.managedStartedLatch.getCount() > 0);
MyLatchingEntityImpl.managingContinuesLatch.countDown();
assertTrue(MyLatchingEntityImpl.managedStartedLatch.await(TIMEOUT_MS, TimeUnit.MILLISECONDS));
assertNotNull(newManagementContext.getEntityManager().getEntity(origApp.getId()));
assertNotNull(newManagementContext.getEntityManager().getEntity(origE.getId()));
MyLatchingEntityImpl.managedContinuesLatch.countDown();
thread.join(TIMEOUT_MS);
assertFalse(thread.isAlive());
} finally {
thread.interrupt();
MyLatchingEntityImpl.reset();
}
}
@Test(groups="Integration") // takes more than 4 seconds, due to assertContinually calls
public void testSubscriptionAndPublishingOnlyActiveWhenEntityIsManaged() throws Exception {
MyLatchingEntityImpl.latching = false;
MyLatchingEntity origE = origApp.createAndManageChild(EntitySpec.create(MyLatchingEntity.class)
.configure("subscribe", TestApplication.MY_ATTRIBUTE)
.configure("publish", "myvaltopublish"));
MyLatchingEntityImpl.reset(); // after origE has been managed
MyLatchingEntityImpl.latching = true;
// Serialize and rebind, but don't yet manage the app
RebindTestUtils.waitForPersisted(origApp);
RebindTestUtils.checkCurrentMementoSerializable(origApp);
newManagementContext = new LocalManagementContext();
Thread thread = new Thread() {
public void run() {
try {
RebindTestUtils.rebind(newManagementContext, mementoDir, getClass().getClassLoader());
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
};
try {
thread.start();
final List<Object> events = new CopyOnWriteArrayList<Object>();
newManagementContext.getSubscriptionManager().subscribe(null, MyLatchingEntityImpl.MY_SENSOR, new SensorEventListener<Object>() {
@Override public void onEvent(SensorEvent<Object> event) {
events.add(event.getValue());
}});
// In entity's reconstruct, publishes events are queued, and subscriptions don't yet take effect
assertTrue(MyLatchingEntityImpl.reconstructStartedLatch.await(TIMEOUT_MS, TimeUnit.MILLISECONDS));
newManagementContext.getSubscriptionManager().publish(new BasicSensorEvent<String>(TestApplication.MY_ATTRIBUTE, null, "myvaltooearly"));
TestUtils.assertContinuallyFromJava(Suppliers.ofInstance(MyLatchingEntityImpl.events), Predicates.equalTo(Collections.emptyList()));
TestUtils.assertContinuallyFromJava(Suppliers.ofInstance(events), Predicates.equalTo(Collections.emptyList()));
// When the entity is notified of "managing", then subscriptions take effect (but missed events not delivered);
// published events remain queued
MyLatchingEntityImpl.reconstructContinuesLatch.countDown();
assertTrue(MyLatchingEntityImpl.managingStartedLatch.getCount() > 0);
TestUtils.assertContinuallyFromJava(Suppliers.ofInstance(events), Predicates.equalTo(Collections.emptyList()));
TestUtils.assertContinuallyFromJava(Suppliers.ofInstance(MyLatchingEntityImpl.events), Predicates.equalTo(Collections.emptyList()));
newManagementContext.getSubscriptionManager().publish(new BasicSensorEvent<String>(TestApplication.MY_ATTRIBUTE, null, "myvaltoreceive"));
TestUtils.assertEventually(Suppliers.ofInstance(MyLatchingEntityImpl.events), Predicates.equalTo(ImmutableList.of("myvaltoreceive")));
// When the entity is notified of "managed", its events are only then delivered
MyLatchingEntityImpl.managingContinuesLatch.countDown();
assertTrue(MyLatchingEntityImpl.managedStartedLatch.await(TIMEOUT_MS, TimeUnit.MILLISECONDS));
TestUtils.assertEventually(Suppliers.ofInstance(MyLatchingEntityImpl.events), Predicates.equalTo(ImmutableList.of("myvaltoreceive")));
MyLatchingEntityImpl.managedContinuesLatch.countDown();
thread.join(TIMEOUT_MS);
assertFalse(thread.isAlive());
} finally {
thread.interrupt();
MyLatchingEntityImpl.reset();
}
}
@Test
public void testRestoresConfigKeys() throws Exception {
TestEntity origE = origApp.createAndManageChild(EntitySpec.create(TestEntity.class)
.configure(TestEntity.CONF_NAME, "nameval")
.configure(TestEntity.CONF_LIST_PLAIN, ImmutableList.of("val1", "val2"))
.configure(TestEntity.CONF_MAP_PLAIN, ImmutableMap.of("akey", "aval")));
newApp = rebind();
final TestEntity newE = (TestEntity) Iterables.find(newApp.getChildren(), Predicates.instanceOf(TestEntity.class));
assertEquals(newE.getConfig(TestEntity.CONF_NAME), "nameval");
assertEquals(newE.getConfig(TestEntity.CONF_LIST_PLAIN), ImmutableList.of("val1", "val2"));
assertEquals(newE.getConfig(TestEntity.CONF_MAP_PLAIN), ImmutableMap.of("akey", "aval"));
}
@Test // ListConfigKey deprecated, as order no longer guaranteed
public void testRestoresListConfigKey() throws Exception {
TestEntity origE = origApp.createAndManageChild(EntitySpec.create(TestEntity.class)
.configure(TestEntity.CONF_LIST_THING.subKey(), "val1")
.configure(TestEntity.CONF_LIST_THING.subKey(), "val2"));
newApp = rebind();
final TestEntity newE = (TestEntity) Iterables.find(newApp.getChildren(), Predicates.instanceOf(TestEntity.class));
//assertEquals(newE.getConfig(TestEntity.CONF_LIST_THING), ImmutableList.of("val1", "val2"));
assertEquals(ImmutableSet.copyOf(newE.getConfig(TestEntity.CONF_LIST_THING)), ImmutableSet.of("val1", "val2"));
}
@Test
public void testRestoresSetConfigKey() throws Exception {
TestEntity origE = origApp.createAndManageChild(EntitySpec.create(TestEntity.class)
.configure(TestEntity.CONF_SET_THING.subKey(), "val1")
.configure(TestEntity.CONF_SET_THING.subKey(), "val2"));
newApp = rebind();
final TestEntity newE = (TestEntity) Iterables.find(newApp.getChildren(), Predicates.instanceOf(TestEntity.class));
assertEquals(newE.getConfig(TestEntity.CONF_SET_THING), ImmutableSet.of("val1", "val2"));
}
@Test
public void testRestoresMapConfigKey() throws Exception {
TestEntity origE = origApp.createAndManageChild(EntitySpec.create(TestEntity.class)
.configure(TestEntity.CONF_MAP_THING.subKey("akey"), "aval")
.configure(TestEntity.CONF_MAP_THING.subKey("bkey"), "bval"));
newApp = rebind();
final TestEntity newE = (TestEntity) Iterables.find(newApp.getChildren(), Predicates.instanceOf(TestEntity.class));
assertEquals(newE.getConfig(TestEntity.CONF_MAP_THING), ImmutableMap.of("akey", "aval", "bkey", "bval"));
}
@Test
public void testRebindPreservesInheritedConfig() throws Exception {
origApp.setConfig(MyEntity.MY_CONFIG, "myValInSuper");
MyEntity origE = origApp.createAndManageChild(EntitySpec.create(MyEntity.class));
// rebind: inherited config is preserved
newApp = rebind();
MyEntity newE = (MyEntity) Iterables.find(newApp.getChildren(), Predicates.instanceOf(MyEntity.class));
assertEquals(newE.getConfig(MyEntity.MY_CONFIG), "myValInSuper");
assertEquals(newApp.getConfig(MyEntity.MY_CONFIG), "myValInSuper");
// This config should be inherited by dynamically-added children of app
MyEntity newE2 = newApp.createAndManageChild(EntitySpec.create(MyEntity.class));
assertEquals(newE2.getConfig(MyEntity.MY_CONFIG), "myValInSuper");
}
@Test
public void testRebindPreservesGetConfigWithDefault() throws Exception {
MyEntity origE = origApp.createAndManageChild(EntitySpec.create(MyEntity.class));
assertNull(origE.getConfig(MyEntity.MY_CONFIG));
assertEquals(origE.getConfig(MyEntity.MY_CONFIG, "mydefault"), "mydefault");
newApp = rebind();
MyEntity newE = (MyEntity) Iterables.find(newApp.getChildren(), Predicates.instanceOf(MyEntity.class));
assertNull(newE.getConfig(MyEntity.MY_CONFIG));
assertEquals(newE.getConfig(MyEntity.MY_CONFIG, "mydefault"), "mydefault");
}
@Test
public void testRestoresUnmatchedConfig() throws Exception {
TestEntity origE = origApp.createAndManageChild(EntitySpec.create(TestEntity.class)
.configure("myunmatchedkey", "myunmatchedval"));
TestEntity origChildE = origE.createAndManageChild(EntitySpec.create(TestEntity.class));
//Thread.sleep(1000);
newApp = rebind();
final TestEntity newE = (TestEntity) Iterables.find(newApp.getChildren(), Predicates.instanceOf(TestEntity.class));
final TestEntity newChildE = (TestEntity) Iterables.find(newE.getChildren(), Predicates.instanceOf(TestEntity.class));
assertEquals(newE.getAllConfigBag().getStringKey("myunmatchedkey"), "myunmatchedval");
assertEquals(newE.getLocalConfigBag().getStringKey("myunmatchedkey"), "myunmatchedval");
try {
assertEquals(newChildE.getAllConfigBag().getStringKey("myunmatchedkey"), "myunmatchedval");
assertFalse(newChildE.getLocalConfigBag().containsKey("myunmatchedkey"));
} catch (Throwable t) {
t.printStackTrace();
throw Exceptions.propagate(t);
}
}
@Test
public void testRebindPersistsNullAttribute() throws Exception {
MyEntity origE = origApp.createAndManageChild(EntitySpec.create(MyEntity.class));
origE.setAttribute(MyEntity.MY_SENSOR, null);
assertNull(origE.getAttribute(MyEntity.MY_SENSOR));
newApp = rebind();
MyEntity newE = (MyEntity) Iterables.find(newApp.getChildren(), Predicates.instanceOf(MyEntity.class));
assertNull(newE.getAttribute(MyEntity.MY_SENSOR));
}
@Test
public void testFailureGeneratingMementoStillPersistsOtherEntities() throws Exception {
MyEntity origE = origApp.createAndManageChild(EntitySpec.create(MyEntity.class));
MyEntity origFailingE = origApp.createAndManageChild(EntitySpec.create(MyEntity.class).impl(MyEntityFailingImpl.class));
newApp = rebind(false);
MyEntity newE = (MyEntity) Iterables.find(newApp.getChildren(), EntityPredicates.idEqualTo(origE.getId()));
MyEntity newFailingE = (MyEntity) Iterables.find(newApp.getChildren(), EntityPredicates.idEqualTo(origFailingE.getId()), null);
// Expect origFailingE to never have been persisted, but origE to have worked
assertNotNull(newE);
assertNull(newFailingE);
}
@Test(invocationCount=100, groups="Integration")
public void testFailureGeneratingMementoStillPersistsOtherEntitiesRepeatedly() throws Exception {
testFailureGeneratingMementoStillPersistsOtherEntities();
}
@Test
public void testRebindWhenPreviousAppDestroyedHasNoApp() throws Exception {
origApp.stop();
RebindTestUtils.waitForPersisted(origManagementContext);
LocalManagementContext newManagementContext = RebindTestUtils.newPersistingManagementContextUnstarted(mementoDir, classLoader);
List<Application> newApps = newManagementContext.getRebindManager().rebind(classLoader);
newManagementContext.getRebindManager().start();
assertEquals(newApps.size(), 0, "apps="+newApps);
assertEquals(newManagementContext.getApplications().size(), 0, "apps="+newManagementContext.getApplications());
}
@Test(invocationCount=100, groups="Integration")
public void testRebindWhenPreviousAppDestroyedHasNoAppRepeatedly() throws Exception {
testRebindWhenPreviousAppDestroyedHasNoApp();
}
private TestApplication rebind() throws Exception {
return rebind(true);
}
private TestApplication rebind(boolean checkSerializable) throws Exception {
RebindTestUtils.waitForPersisted(origApp);
if (checkSerializable) {
RebindTestUtils.checkCurrentMementoSerializable(origApp);
}
return (TestApplication) RebindTestUtils.rebind(mementoDir, getClass().getClassLoader());
}
// TODO Don't want to extend EntityLocal, but tests want to call app.setAttribute
@ImplementedBy(MyEntityImpl.class)
public interface MyEntity extends Entity, Startable, EntityLocal {
@SetFromFlag("myconfig")
public static final ConfigKey<String> MY_CONFIG = new BasicConfigKey<String>(
String.class, "test.myentity.myconfig", "My test config");
public static final AttributeSensor<String> MY_SENSOR = new BasicAttributeSensor<String>(
String.class, "test.myentity.mysensor", "My test sensor");
}
public static class MyEntityImpl extends AbstractEntity implements MyEntity {
private final Object dummy = new Object(); // so not serializable
public MyEntityImpl() {
}
@Override
public void start(Collection<? extends Location> locations) {
addLocations(locations);
}
@Override
public void stop() {
}
@Override
public void restart() {
}
}
// TODO Don't want to extend EntityLocal, but tests want to call app.setAttribute
@ImplementedBy(MyEntityReffingOthersImpl.class)
public interface MyEntityReffingOthers extends Entity, EntityLocal {
@SetFromFlag("entityRef")
public static final ConfigKey<Entity> ENTITY_REF_CONFIG = new BasicConfigKey<Entity>(
Entity.class, "test.config.entityref", "Ref to other entity");
@SetFromFlag("locationRef")
public static final ConfigKey<Location> LOCATION_REF_CONFIG = new BasicConfigKey<Location>(
Location.class, "test.config.locationref", "Ref to other location");
public static final AttributeSensor<Entity> ENTITY_REF_SENSOR = new BasicAttributeSensor<Entity>(
Entity.class, "test.attribute.entityref", "Ref to other entity");
public static final AttributeSensor<Location> LOCATION_REF_SENSOR = new BasicAttributeSensor<Location>(
Location.class, "test.attribute.locationref", "Ref to other location");
}
public static class MyEntityReffingOthersImpl extends AbstractEntity implements MyEntityReffingOthers {
@SetFromFlag("entityRef")
public static final ConfigKey<Entity> ENTITY_REF_CONFIG = new BasicConfigKey<Entity>(
Entity.class, "test.config.entityref", "Ref to other entity");
@SetFromFlag("locationRef")
public static final ConfigKey<Location> LOCATION_REF_CONFIG = new BasicConfigKey<Location>(
Location.class, "test.config.locationref", "Ref to other location");
public static final AttributeSensor<Entity> ENTITY_REF_SENSOR = new BasicAttributeSensor<Entity>(
Entity.class, "test.attribute.entityref", "Ref to other entity");
public static final AttributeSensor<Location> LOCATION_REF_SENSOR = new BasicAttributeSensor<Location>(
Location.class, "test.attribute.locationref", "Ref to other location");
private final Object dummy = new Object(); // so not serializable
public MyEntityReffingOthersImpl() {
}
}
@ImplementedBy(MyEntity2Impl.class)
public interface MyEntity2 extends Entity {
@SetFromFlag("myconfig")
public static final ConfigKey<String> MY_CONFIG = new BasicConfigKey<String>(
String.class, "test.myconfig", "My test config");
@SetFromFlag("subscribe")
public static final ConfigKey<Boolean> SUBSCRIBE = new BasicConfigKey<Boolean>(
Boolean.class, "test.subscribe", "Whether to do some subscriptions on re-bind", false);
public List<String> getEvents();
public String getMyfield();
}
public static class MyEntity2Impl extends AbstractEntity implements MyEntity2 {
@SetFromFlag
String myfield;
final List<String> events = new CopyOnWriteArrayList<String>();
private final Object dummy = new Object(); // so not serializable
public MyEntity2Impl() {
}
public List<String> getEvents() {
return events;
}
public String getMyfield() {
return myfield;
}
@Override
public void onManagementStarting() {
if (getConfig(SUBSCRIBE)) {
subscribe(getApplication(), TestApplication.MY_ATTRIBUTE, new SensorEventListener<String>() {
@Override public void onEvent(SensorEvent<String> event) {
events.add(event.getValue());
}
});
}
}
@Override
public RebindSupport<EntityMemento> getRebindSupport() {
return new BasicEntityRebindSupport(this) {
@Override public EntityMemento getMemento() {
// Note: using MutableMap so accepts nulls
return getMementoWithProperties(MutableMap.<String,Object>of("myfield", myfield));
}
@Override protected void doReconstruct(RebindContext rebindContext, EntityMemento memento) {
super.doReconstruct(rebindContext, memento);
myfield = (String) memento.getCustomField("myfield");
}
};
}
}
@ImplementedBy(MyLatchingEntityImpl.class)
public interface MyLatchingEntity extends Entity {
@SetFromFlag("subscribe")
public static final ConfigKey<AttributeSensor<?>> SUBSCRIBE = new BasicConfigKey(
AttributeSensor.class, "test.mylatchingentity.subscribe", "Sensor to subscribe to (or null means don't)", null);
@SetFromFlag("publish")
public static final ConfigKey<String> PUBLISH = new BasicConfigKey<String>(
String.class, "test.mylatchingentity.publish", "Value to publish (or null means don't)", null);
public static final AttributeSensor<String> MY_SENSOR = new BasicAttributeSensor<String>(
String.class, "test.mylatchingentity.mysensor", "My test sensor");
}
public static class MyLatchingEntityImpl extends AbstractEntity implements MyLatchingEntity {
static volatile CountDownLatch reconstructStartedLatch;
static volatile CountDownLatch reconstructContinuesLatch;
static volatile CountDownLatch managingStartedLatch;
static volatile CountDownLatch managingContinuesLatch;
static volatile CountDownLatch managedStartedLatch;
static volatile CountDownLatch managedContinuesLatch;
static volatile boolean latching = false;
static volatile List<Object> events;
static void reset() {
latching = false;
events = new CopyOnWriteArrayList<Object>();
reconstructStartedLatch = new CountDownLatch(1);
reconstructContinuesLatch = new CountDownLatch(1);
managingStartedLatch = new CountDownLatch(1);
managingContinuesLatch = new CountDownLatch(1);
managedStartedLatch = new CountDownLatch(1);
managedContinuesLatch = new CountDownLatch(1);
}
public MyLatchingEntityImpl() {
}
private void onReconstruct() {
if (getConfig(SUBSCRIBE) != null) {
getManagementSupport().getSubscriptionContext().subscribe(null, getConfig(SUBSCRIBE), new SensorEventListener<Object>() {
@Override public void onEvent(SensorEvent<Object> event) {
events.add(event.getValue());
}});
}
if (getConfig(PUBLISH) != null) {
setAttribute(MY_SENSOR, getConfig(PUBLISH));
}
if (latching) {
reconstructStartedLatch.countDown();
try {
reconstructContinuesLatch.await();
} catch (InterruptedException e) {
throw new RuntimeInterruptedException(e);
}
}
}
@Override
public void onManagementStarting() {
if (latching) {
managingStartedLatch.countDown();
try {
managingContinuesLatch.await();
} catch (InterruptedException e) {
throw new RuntimeInterruptedException(e);
}
}
}
@Override
public void onManagementStarted() {
if (latching) {
managedStartedLatch.countDown();
try {
managedContinuesLatch.await();
} catch (InterruptedException e) {
throw new RuntimeInterruptedException(e);
}
}
}
@Override
public RebindSupport<EntityMemento> getRebindSupport() {
return new BasicEntityRebindSupport(this) {
@Override protected void doReconstruct(RebindContext rebindContext, EntityMemento memento) {
MyLatchingEntityImpl.this.onReconstruct();
}
};
}
}
public static class MyEntityFailingImpl extends MyEntityImpl implements MyEntity {
@Override
public Map<AttributeSensor, Object> getAllAttributes() {
throw new RuntimeException("Simulating failure in "+this+", which will cause memento-generation to fail");
}
}
}
| {'content_hash': 'bb738eae180686f94020f4b3ddf7e49e', 'timestamp': '', 'source': 'github', 'line_count': 805, 'max_line_length': 150, 'avg_line_length': 45.87826086956522, 'alnum_prop': 0.6787338893100834, 'repo_name': 'merrickjyoungibm/brooklyn', 'id': '20bd8415795f98bd8752f341ab70510f62bb45aa', 'size': '36932', 'binary': False, 'copies': '3', 'ref': 'refs/heads/master', 'path': 'core/src/test/java/brooklyn/entity/rebind/RebindEntityTest.java', 'mode': '33188', 'license': 'apache-2.0', 'language': []} |
echo "success" | {'content_hash': 'd92097bde03cb3414629ae220a06063a', 'timestamp': '', 'source': 'github', 'line_count': 1, 'max_line_length': 14, 'avg_line_length': 14.0, 'alnum_prop': 0.7857142857142857, 'repo_name': 'OresteVisari/alien4cloud', 'id': '01be139d2eaefd8788380865a28c51e16767afb3', 'size': '29', 'binary': False, 'copies': '9', 'ref': 'refs/heads/develop', 'path': 'alien4cloud-core/src/test/resources/data/csars/custom-interface-mock-types/scripts/success.sh', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'ApacheConf', 'bytes': '59321'}, {'name': 'Batchfile', 'bytes': '526'}, {'name': 'CSS', 'bytes': '48930'}, {'name': 'Cucumber', 'bytes': '738188'}, {'name': 'Groovy', 'bytes': '108257'}, {'name': 'HTML', 'bytes': '421060'}, {'name': 'Java', 'bytes': '3834486'}, {'name': 'JavaScript', 'bytes': '1304989'}, {'name': 'Shell', 'bytes': '41378'}]} |
/* FreezeDLLMain.cpp
This is a DLLMain suitable for frozen applications/DLLs on
a Windows platform.
The general problem is that many Python extension modules may define
DLL main functions, but when statically linked together to form
a frozen application, this DLLMain symbol exists multiple times.
The solution is:
* Each module checks for a frozen build, and if so, defines its DLLMain
function as "__declspec(dllexport) DllMain%module%"
(eg, DllMainpythoncom, or DllMainpywintypes)
* The frozen .EXE/.DLL links against this module, which provides
the single DllMain.
* This DllMain attempts to locate and call the DllMain for each
of the extension modules.
* This code also has hooks to "simulate" DllMain when used from
a frozen .EXE.
At this stage, there is a static table of "possibly embedded modules".
This should change to something better, but it will work OK for now.
Note that this scheme does not handle dependencies in the order
of DllMain calls - except it does call pywintypes first :-)
As an example of how an extension module with a DllMain should be
changed, here is a snippet from the pythoncom extension module.
// end of example code from pythoncom's DllMain.cpp
#ifndef BUILD_FREEZE
#define DLLMAIN DllMain
#define DLLMAIN_DECL
#else
#define DLLMAIN DllMainpythoncom
#define DLLMAIN_DECL __declspec(dllexport)
#endif
extern "C" DLLMAIN_DECL
BOOL WINAPI DLLMAIN(HINSTANCE hInstance, DWORD dwReason, LPVOID lpReserved)
// end of example code from pythoncom's DllMain.cpp
***************************************************************************/
#include "windows.h"
static char *possibleModules[] = {
"pywintypes",
"pythoncom",
"win32ui",
NULL,
};
BOOL CallModuleDllMain(char *modName, DWORD dwReason);
/*
Called by a frozen .EXE only, so that built-in extension
modules are initialized correctly
*/
void PyWinFreeze_ExeInit(void)
{
char **modName;
for (modName = possibleModules;*modName;*modName++) {
/* printf("Initialising '%s'\n", *modName); */
CallModuleDllMain(*modName, DLL_PROCESS_ATTACH);
}
}
/*
Called by a frozen .EXE only, so that built-in extension
modules are cleaned up
*/
void PyWinFreeze_ExeTerm(void)
{
// Must go backwards
char **modName;
for (modName = possibleModules+(sizeof(possibleModules) / sizeof(char *))-2;
modName >= possibleModules;
*modName--) {
/* printf("Terminating '%s'\n", *modName);*/
CallModuleDllMain(*modName, DLL_PROCESS_DETACH);
}
}
BOOL WINAPI DllMain(HINSTANCE hInstance, DWORD dwReason, LPVOID lpReserved)
{
BOOL ret = TRUE;
switch (dwReason) {
case DLL_PROCESS_ATTACH:
{
char **modName;
for (modName = possibleModules;*modName;*modName++) {
BOOL ok = CallModuleDllMain(*modName, dwReason);
if (!ok)
ret = FALSE;
}
break;
}
case DLL_PROCESS_DETACH:
{
// Must go backwards
char **modName;
for (modName = possibleModules+(sizeof(possibleModules) / sizeof(char *))-2;
modName >= possibleModules;
*modName--)
CallModuleDllMain(*modName, DLL_PROCESS_DETACH);
break;
}
}
return ret;
}
BOOL CallModuleDllMain(char *modName, DWORD dwReason)
{
BOOL (WINAPI * pfndllmain)(HINSTANCE, DWORD, LPVOID);
char funcName[255];
HMODULE hmod = GetModuleHandle(NULL);
strcpy(funcName, "_DllMain");
strcat(funcName, modName);
strcat(funcName, "@12"); // stdcall convention.
pfndllmain = (BOOL (WINAPI *)(HINSTANCE, DWORD, LPVOID))GetProcAddress(hmod, funcName);
if (pfndllmain==NULL) {
/* No function by that name exported - then that module does
not appear in our frozen program - return OK
*/
return TRUE;
}
return (*pfndllmain)(hmod, dwReason, NULL);
}
| {'content_hash': '276136d53135a59c5490ca1d15a52ea6', 'timestamp': '', 'source': 'github', 'line_count': 134, 'max_line_length': 91, 'avg_line_length': 29.888059701492537, 'alnum_prop': 0.6484394506866417, 'repo_name': 'andreparrish/python-for-android', 'id': 'a8cc88518452db4ac2a51d5908bc902a71ecc7d9', 'size': '4005', 'binary': False, 'copies': '59', 'ref': 'refs/heads/master', 'path': 'python3-alpha/python3-src/PC/frozen_dllmain.c', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Ada', 'bytes': '89080'}, {'name': 'Assembly', 'bytes': '1436816'}, {'name': 'C', 'bytes': '51252151'}, {'name': 'C#', 'bytes': '55625'}, {'name': 'C++', 'bytes': '1031594'}, {'name': 'CLIPS', 'bytes': '5291'}, {'name': 'CMake', 'bytes': '6041'}, {'name': 'CSS', 'bytes': '13063'}, {'name': 'D', 'bytes': '542'}, {'name': 'Emacs Lisp', 'bytes': '1639'}, {'name': 'GAP', 'bytes': '14130'}, {'name': 'Groff', 'bytes': '295313'}, {'name': 'HTML', 'bytes': '10823343'}, {'name': 'Inno Setup', 'bytes': '18486'}, {'name': 'Java', 'bytes': '639396'}, {'name': 'Makefile', 'bytes': '2079593'}, {'name': 'Objective-C', 'bytes': '1422355'}, {'name': 'OpenEdge ABL', 'bytes': '125979'}, {'name': 'Pascal', 'bytes': '42411'}, {'name': 'Perl', 'bytes': '4226168'}, {'name': 'Perl6', 'bytes': '27602'}, {'name': 'PostScript', 'bytes': '13803'}, {'name': 'Prolog', 'bytes': '30605'}, {'name': 'Protocol Buffer', 'bytes': '2764'}, {'name': 'Python', 'bytes': '36074899'}, {'name': 'R', 'bytes': '9180'}, {'name': 'SAS', 'bytes': '1847'}, {'name': 'Scheme', 'bytes': '4249'}, {'name': 'Shell', 'bytes': '1433409'}, {'name': 'Smarty', 'bytes': '393'}, {'name': 'TeX', 'bytes': '524112'}, {'name': 'VimL', 'bytes': '9349'}, {'name': 'Visual Basic', 'bytes': '481'}, {'name': 'XS', 'bytes': '8638'}, {'name': 'XSLT', 'bytes': '24576'}, {'name': 'eC', 'bytes': '8477'}]} |
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.mgu.jogo</groupId>
<artifactId>jogo-lang</artifactId>
<version>0.2.0-SNAPSHOT</version>
</parent>
<groupId>com.mgu.jogo</groupId>
<artifactId>jogo-lang-interpreter</artifactId>
<packaging>jar</packaging>
<version>0.2.0-SNAPSHOT</version>
<name>[jogo-lang] Jogo Language Core - Interpreter</name>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>
<dependencies>
<dependency>
<groupId>com.mgu.jogo</groupId>
<artifactId>jogo-lang-parser</artifactId>
<version>0.2.0-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</dependency>
<!-- TEST -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</project>
| {'content_hash': 'd321746f0dcb4d0b5a0c6529b05a2cc9', 'timestamp': '', 'source': 'github', 'line_count': 50, 'max_line_length': 105, 'avg_line_length': 32.62, 'alnum_prop': 0.5941140404659718, 'repo_name': 'mguenther/jogo-lang', 'id': 'a6c45a3e4c9044396b5276ece29b2019400b4bc0', 'size': '1631', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'jogo-lang-interpreter/pom.xml', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '188'}, {'name': 'Java', 'bytes': '126694'}]} |
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>Builtin help</title>
</head>
<body>
<div class='doc-body' src='txt/TempDiv.txt'></div>
<div class='doc-body' src='txt/Unit.txt'></div>
<div class='doc-body' src='txt/Widget.txt'></div>
<p> </p>
<link rel="stylesheet" href="built_inline.css" />
<script src='built_inline.js'></script>
</body>
</html>
| {'content_hash': 'ddb677ac9e69d078a152b94548c7e47d', 'timestamp': '', 'source': 'github', 'line_count': 20, 'max_line_length': 50, 'avg_line_length': 18.6, 'alnum_prop': 0.6397849462365591, 'repo_name': 'rewgt/shadow-server', 'id': 'fd150785609c5b167cfc3210d91a41d534cc8bbe', 'size': '372', 'binary': False, 'copies': '1', 'ref': 'refs/heads/gh-pages', 'path': 'public/static/files/rewgt/doc/TempDiv.html', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'CSS', 'bytes': '44623'}, {'name': 'HTML', 'bytes': '611286'}, {'name': 'JavaScript', 'bytes': '898819'}, {'name': 'Shell', 'bytes': '291'}]} |
import { FormNote } from 'elemental';
import React from 'react';
import Popout from './Popout';
import PopoutList from './PopoutList';
import vkey from 'vkey';
import CurrentListStore from '../stores/CurrentListStore';
var ListSort = React.createClass({
displayName: 'ListSort',
getInitialState () {
return {
altDown: false,
popoutIsOpen: false
};
},
componentDidMount () {
document.body.addEventListener('keydown', this.handleKeyDown, false);
document.body.addEventListener('keyup', this.handleKeyUp, false);
},
componentWillUnmount () {
document.body.removeEventListener('keydown', this.handleKeyDown);
document.body.removeEventListener('keyup', this.handleKeyUp);
},
handleKeyDown (e) {
if (vkey[e.keyCode] !== '<alt>') return;
this.setState({
altDown: true
});
},
handleKeyUp (e) {
if (vkey[e.keyCode] !== '<alt>') return;
this.setState({
altDown: false
});
},
openPopout () {
this.setState({
popoutIsOpen: true
});
},
closePopout () {
this.setState({
popoutIsOpen: false
});
},
handleSortSelect (path, inverted) {
if (this.state.altDown) inverted = true;
if (inverted) path = '-' + path;
this.closePopout();
CurrentListStore.setActiveSort(path);
},
renderSortOptions () {
// TODO: Handle multiple sort paths
let activeSortPath = CurrentListStore.getActiveSort().paths[0];
return CurrentListStore.getAvailableColumns().map((el, i) => {
if (el.type === 'heading') {
return <PopoutList.Heading key={'heading_' + i}>{el.content}</PopoutList.Heading>;
}
let path = el.field.path;
let isSelected = activeSortPath && activeSortPath.path === path;
let isInverted = isSelected && activeSortPath.invert;
let icon = this.state.altDown || (isSelected && !isInverted) ? 'chevron-up' : 'chevron-down';
return (
<PopoutList.Item
key={'column_' + el.field.path}
icon={icon}
isSelected={isSelected}
label={el.field.label}
onClick={() => {
this.handleSortSelect(path, isSelected && !isInverted);
}} />
);
});
},
render () {
// TODO: Handle multiple sort paths
let activeSortPath = CurrentListStore.getActiveSort().paths[0];
return (
<span>
{activeSortPath && (
<span>
<span className="ListHeader__sortedby"> sorted by </span>
<a id="listHeaderSortButton" href="javascript:;" onClick={this.openPopout}>
{activeSortPath.label.toLowerCase()}
{activeSortPath.invert ? ' (descending)' : ''}
<span className="disclosure-arrow" />
</a>
</span>
)}
<Popout isOpen={this.state.popoutIsOpen} onCancel={this.closePopout} relativeToID="listHeaderSortButton">
<Popout.Header title="Sort" />
<Popout.Body scrollable>
<PopoutList>
{this.renderSortOptions()}
</PopoutList>
</Popout.Body>
<Popout.Footer>
<FormNote>Hold <kbd>alt</kbd> to toggle ascending/descending</FormNote>
</Popout.Footer>
</Popout>
</span>
);
}
});
module.exports = ListSort;
| {'content_hash': '14e0f5aab8487938c49fc94eadbc402f', 'timestamp': '', 'source': 'github', 'line_count': 109, 'max_line_length': 109, 'avg_line_length': 27.577981651376145, 'alnum_prop': 0.6520292747837658, 'repo_name': 'codevlabs/keystone', 'id': '7f02bfec6c0299a1fbe8bff0fdbdd8cf6d47574f', 'size': '3006', 'binary': False, 'copies': '4', 'ref': 'refs/heads/master', 'path': 'admin/client/components/ListSort.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '170579'}, {'name': 'HTML', 'bytes': '30512'}, {'name': 'JavaScript', 'bytes': '1167955'}]} |
Convert Plugin
==============
The ``convert`` plugin lets you convert parts of your collection to a
directory of your choice, transcoding audio and embedding album art along the
way. It can transcode to and from any format using a configurable command
line.
Installation
------------
To use the ``convert`` plugin, first enable it in your configuration (see
:ref:`using-plugins`). By default, the plugin depends on `FFmpeg`_ to
transcode the audio, so you might want to install it.
.. _FFmpeg: https://ffmpeg.org
Usage
-----
To convert a part of your collection, run ``beet convert QUERY``. The
command will transcode all the files matching the query to the
destination directory given by the ``-d`` (``--dest``) option or the
``dest`` configuration. The path layout mirrors that of your library,
but it may be customized through the ``paths`` configuration. Files
that have been previously converted---and thus already exist in the
destination directory---will be skipped.
The plugin uses a command-line program to transcode the audio. With the
``-f`` (``--format``) option you can choose the transcoding command
and customize the available commands
:ref:`through the configuration <convert-format-config>`.
Unless the ``-y`` (``--yes``) flag is set, the command will list all
the items to be converted and ask for your confirmation.
The ``-a`` (or ``--album``) option causes the command
to match albums instead of tracks.
By default, the command places converted files into the destination directory
and leaves your library pristine. To instead back up your original files into
the destination directory and keep converted files in your library, use the
``-k`` (or ``--keep-new``) option.
To test your configuration without taking any actions, use the ``--pretend``
flag. The plugin will print out the commands it will run instead of executing
them.
By default, files that do not need to be transcoded will be copied to their
destination. Passing the ``-l`` (``--link``) flag creates symbolic links
instead, passing ``-H`` (``--hardlink``) creates hard links.
Note that album art embedding is disabled for files that are linked.
Refer to the ``link`` and ``hardlink`` options below.
Configuration
-------------
To configure the plugin, make a ``convert:`` section in your configuration
file. The available options are:
- **auto**: Import transcoded versions of your files automatically during
imports. With this option enabled, the importer will transcode all (in the
default configuration) non-MP3 files over the maximum bitrate before adding
them to your library.
Default: ``no``.
- **tmpdir**: The directory where temporary files will be stored during import.
Default: none (system default),
- **copy_album_art**: Copy album art when copying or transcoding albums matched
using the ``-a`` option. Default: ``no``.
- **album_art_maxwidth**: Downscale album art if it's too big. The resize
operation reduces image width to at most ``maxwidth`` pixels while
preserving the aspect ratio.
- **dest**: The directory where the files will be converted (or copied) to.
Default: none.
- **embed**: Embed album art in converted items. Default: ``yes``.
- **id3v23**: Can be used to override the global ``id3v23`` option. Default:
``inherit``.
- **max_bitrate**: All lossy files with a higher bitrate will be
transcoded and those with a lower bitrate will simply be copied. Note that
this does not guarantee that all converted files will have a lower
bitrate---that depends on the encoder and its configuration.
Default: none.
- **no_convert**: Does not transcode items matching provided query string
(see :doc:`/reference/query`). (i.e. ``format:AAC, format:WMA`` or
``path::\.(m4a|wma)$``)
- **never_convert_lossy_files**: Cross-conversions between lossy codecs---such
as mp3, ogg vorbis, etc.---makes little sense as they will decrease quality
even further. If set to ``yes``, lossy files are always copied.
Default: ``no``.
- **paths**: The directory structure and naming scheme for the converted
files. Uses the same format as the top-level ``paths`` section (see
:ref:`path-format-config`).
Default: Reuse your top-level path format settings.
- **quiet**: Prevent the plugin from announcing every file it processes.
Default: ``false``.
- **threads**: The number of threads to use for parallel encoding.
By default, the plugin will detect the number of processors available and use
them all.
- **link**: By default, files that do not need to be transcoded will be copied
to their destination. This option creates symbolic links instead. Note that
options such as ``embed`` that modify the output files after the transcoding
step will cause the original files to be modified as well if ``link`` is
enabled. For this reason, album-art embedding is disabled
for files that are linked.
Default: ``false``.
- **hardlink**: This options works similar to ``link``, but it creates
hard links instead of symlinks.
This option overrides ``link``. Only works when converting to a directory
on the same filesystem as the library.
Default: ``false``.
You can also configure the format to use for transcoding (see the next
section):
- **format**: The name of the format to transcode to when none is specified on
the command line.
Default: ``mp3``.
- **formats**: A set of formats and associated command lines for transcoding
each.
.. _convert-format-config:
Configuring the transcoding command
```````````````````````````````````
You can customize the transcoding command through the ``formats`` map
and select a command with the ``--format`` command-line option or the
``format`` configuration.
::
convert:
format: speex
formats:
speex:
command: ffmpeg -i $source -y -acodec speex $dest
extension: spx
wav: ffmpeg -i $source -y -acodec pcm_s16le $dest
In this example ``beet convert`` will use the *speex* command by
default. To convert the audio to `wav`, run ``beet convert -f wav``.
This will also use the format key (``wav``) as the file extension.
Each entry in the ``formats`` map consists of a key (the name of the
format) as well as the command and optionally the file extension.
``extension`` is the filename extension to be used for newly transcoded
files. If only the command is given as a string or the extension is not
provided, the file extension defaults to the format's name. ``command`` is the
command to use to transcode audio. The tokens ``$source`` and ``$dest`` in the
command are replaced with the paths to the existing and new file.
The plugin in comes with default commands for the most common audio
formats: `mp3`, `alac`, `flac`, `aac`, `opus`, `ogg`, `wmv`. For
details have a look at the output of ``beet config -d``.
For a one-command-fits-all solution use the ``convert.command`` and
``convert.extension`` options. If these are set, the formats are ignored
and the given command is used for all conversions.
::
convert:
command: ffmpeg -i $source -y -vn -aq 2 $dest
extension: mp3
Gapless MP3 encoding
````````````````````
While FFmpeg cannot produce "`gapless`_" MP3s by itself, you can create them
by using `LAME`_ directly. Use a shell script like this to pipe the output of
FFmpeg into the LAME tool::
#!/bin/sh
ffmpeg -i "$1" -f wav - | lame -V 2 --noreplaygain - "$2"
Then configure the ``convert`` plugin to use the script::
convert:
command: /path/to/script.sh $source $dest
extension: mp3
This strategy configures FFmpeg to produce a WAV file with an accurate length
header for LAME to use. Using ``--noreplaygain`` disables gain analysis; you
can use the :doc:`/plugins/replaygain` to do this analysis. See the LAME
`documentation`_ and the `HydrogenAudio wiki`_ for other LAME configuration
options and a thorough discussion of MP3 encoding.
.. _documentation: http://lame.sourceforge.net/using.php
.. _HydrogenAudio wiki: https://wiki.hydrogenaud.io/index.php?title=LAME
.. _gapless: https://wiki.hydrogenaud.io/index.php?title=Gapless_playback
.. _LAME: https://lame.sourceforge.net/
| {'content_hash': '73e75dc0dbc449ad6ee32d05bb9b0400', 'timestamp': '', 'source': 'github', 'line_count': 195, 'max_line_length': 79, 'avg_line_length': 41.805128205128206, 'alnum_prop': 0.719332679097154, 'repo_name': 'SusannaMaria/beets', 'id': '6e9d00a11b91cbe0f8c239ebc8bc84ff7ed21975', 'size': '8152', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'docs/plugins/convert.rst', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '2951'}, {'name': 'HTML', 'bytes': '3306'}, {'name': 'JavaScript', 'bytes': '85948'}, {'name': 'Python', 'bytes': '1980656'}, {'name': 'Shell', 'bytes': '7448'}]} |
using Saule.Queries.Filtering;
using Saule.Queries.Pagination;
using Saule.Queries.Sorting;
namespace Saule.Queries
{
internal class QueryContext
{
public PaginationContext Pagination { get; set; }
public SortingContext Sorting { get; set; }
public FilteringContext Filtering { get; set; }
}
}
| {'content_hash': '4960b6e2934b81104c5d81de0350fb21', 'timestamp': '', 'source': 'github', 'line_count': 15, 'max_line_length': 57, 'avg_line_length': 22.266666666666666, 'alnum_prop': 0.6946107784431138, 'repo_name': 'IntertechInc/saule', 'id': 'db5f4bb1b43a601e999438fed88ebb477f1a6527', 'size': '336', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'Saule/Queries/QueryContext.cs', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'ASP', 'bytes': '102'}, {'name': 'C#', 'bytes': '235971'}, {'name': 'PowerShell', 'bytes': '503'}]} |
package org.knowm.xchange.dvchain.v4.dto.trades;
import static org.junit.Assert.assertEquals;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.IOException;
import java.io.InputStream;
import java.math.BigDecimal;
import java.time.Instant;
import org.junit.Test;
import org.knowm.xchange.dvchain.dto.trade.DVChainNewMarketOrder;
import org.knowm.xchange.dvchain.dto.trade.DVChainTradesResponse;
public class DVChainTradesJSONTest {
@Test
public void testUnmarshal() throws IOException {
// Read in the JSON from the example resources
InputStream is =
DVChainTradesJSONTest.class.getResourceAsStream(
"/org/knowm/xchange/dvchain/v4/trades/example-trades-data.json");
// Use Jackson to parse it
ObjectMapper mapper = new ObjectMapper();
DVChainTradesResponse readValue = mapper.readValue(is, DVChainTradesResponse.class);
assertEquals(readValue.getTotal().intValue(), 1);
assertEquals(readValue.getPageCount().intValue(), 1);
assertEquals(readValue.getData().get(0).getId(), "5bbd1c6709ac22627841ad32");
assertEquals(
readValue.getData().get(0).getCreatedAt(), Instant.parse("2018-10-09T21:23:51.757Z"));
assertEquals(readValue.getData().get(0).getPrice(), new BigDecimal(("513.3")));
assertEquals(readValue.getData().get(0).getQuantity(), new BigDecimal(".1"));
assertEquals(readValue.getData().get(0).getSide(), "Buy");
assertEquals(readValue.getData().get(0).getAsset(), "BCH");
assertEquals(readValue.getData().get(0).getStatus(), "Complete");
assertEquals(readValue.getData().get(0).getUser().getFirstName(), "Roger");
assertEquals(readValue.getData().get(0).getUser().getLastName(), "Ver");
assertEquals(readValue.getData().get(0).getUser().getId(), "5ab545a4b933aa1f78e25f34");
}
@Test
public void testPlaceOrder() throws IOException {
ObjectMapper mapper = new ObjectMapper();
DVChainNewMarketOrder newTrade =
new DVChainNewMarketOrder("Buy", new BigDecimal("527.51"), new BigDecimal(".1"), "BCH");
// Read in the JSON from the example resources
InputStream is =
DVChainTradesJSONTest.class.getResourceAsStream(
"/org/knowm/xchange/dvchain/v4/trades/example-new-order.json");
String trade = mapper.writeValueAsString(newTrade);
assertEquals(
trade,
"{\"side\":\"Buy\",\"price\":527.51,\"qty\":0.1,\"asset\":\"BCH\",\"orderType\":\"market\"}");
}
}
| {'content_hash': '5d42ed6da156771ad712ace665cae370', 'timestamp': '', 'source': 'github', 'line_count': 60, 'max_line_length': 102, 'avg_line_length': 40.96666666666667, 'alnum_prop': 0.7119609438567941, 'repo_name': 'Panchen/XChange', 'id': '6b228183cf8c79e0274ec3321227ad5539d40e44', 'size': '2458', 'binary': False, 'copies': '8', 'ref': 'refs/heads/develop', 'path': 'xchange-dvchain/src/test/java/org/knowm/xchange/dvchain/v4/dto/trades/DVChainTradesJSONTest.java', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Java', 'bytes': '9114865'}]} |
namespace ash {
namespace {
ShelfWidget* GetShelfWidget() {
return Launcher::ForPrimaryDisplay()->shelf_widget();
}
internal::ShelfLayoutManager* GetShelfLayoutManager() {
return GetShelfWidget()->shelf_layout_manager();
}
} // namespace
typedef test::AshTestBase ShelfWidgetTest;
// Launcher can't be activated on mouse click, but it is activable from
// the focus cycler or as fallback.
TEST_F(ShelfWidgetTest, ActivateAsFallback) {
// TODO(mtomasz): make this test work with the FocusController.
if (views::corewm::UseFocusController())
return;
Launcher* launcher = Launcher::ForPrimaryDisplay();
ShelfWidget* shelf_widget = launcher->shelf_widget();
EXPECT_FALSE(shelf_widget->CanActivate());
shelf_widget->WillActivateAsFallback();
EXPECT_TRUE(shelf_widget->CanActivate());
wm::ActivateWindow(shelf_widget->GetNativeWindow());
EXPECT_FALSE(shelf_widget->CanActivate());
}
void TestLauncherAlignment(aura::RootWindow* root,
ShelfAlignment alignment,
const std::string& expected) {
Shell::GetInstance()->SetShelfAlignment(alignment, root);
gfx::Screen* screen = gfx::Screen::GetScreenFor(root);
EXPECT_EQ(expected,
screen->GetDisplayNearestWindow(root).work_area().ToString());
}
TEST_F(ShelfWidgetTest, TestAlignment) {
Launcher* launcher = Launcher::ForPrimaryDisplay();
UpdateDisplay("400x400");
ASSERT_TRUE(launcher);
{
SCOPED_TRACE("Single Bottom");
TestLauncherAlignment(Shell::GetPrimaryRootWindow(),
SHELF_ALIGNMENT_BOTTOM,
"0,0 400x352");
}
{
SCOPED_TRACE("Single Right");
TestLauncherAlignment(Shell::GetPrimaryRootWindow(),
SHELF_ALIGNMENT_RIGHT,
"0,0 348x400");
}
{
SCOPED_TRACE("Single Left");
TestLauncherAlignment(Shell::GetPrimaryRootWindow(),
SHELF_ALIGNMENT_LEFT,
"52,0 348x400");
}
UpdateDisplay("300x300,500x500");
Shell::RootWindowList root_windows = Shell::GetAllRootWindows();
{
SCOPED_TRACE("Primary Bottom");
TestLauncherAlignment(root_windows[0],
SHELF_ALIGNMENT_BOTTOM,
"0,0 300x252");
}
{
SCOPED_TRACE("Primary Right");
TestLauncherAlignment(root_windows[0],
SHELF_ALIGNMENT_RIGHT,
"0,0 248x300");
}
{
SCOPED_TRACE("Primary Left");
TestLauncherAlignment(root_windows[0],
SHELF_ALIGNMENT_LEFT,
"52,0 248x300");
}
if (Shell::IsLauncherPerDisplayEnabled()) {
{
SCOPED_TRACE("Secondary Bottom");
TestLauncherAlignment(root_windows[1],
SHELF_ALIGNMENT_BOTTOM,
"300,0 500x452");
}
{
SCOPED_TRACE("Secondary Right");
TestLauncherAlignment(root_windows[1],
SHELF_ALIGNMENT_RIGHT,
"300,0 448x500");
}
{
SCOPED_TRACE("Secondary Left");
TestLauncherAlignment(root_windows[1],
SHELF_ALIGNMENT_LEFT,
"352,0 448x500");
}
}
}
// Makes sure the launcher is initially sized correctly.
TEST_F(ShelfWidgetTest, LauncherInitiallySized) {
ShelfWidget* shelf_widget = GetShelfWidget();
Launcher* launcher = shelf_widget->launcher();
ASSERT_TRUE(launcher);
internal::ShelfLayoutManager* shelf_layout_manager = GetShelfLayoutManager();
ASSERT_TRUE(shelf_layout_manager);
ASSERT_TRUE(shelf_widget->status_area_widget());
int status_width = shelf_widget->status_area_widget()->
GetWindowBoundsInScreen().width();
// Test only makes sense if the status is > 0, which it better be.
EXPECT_GT(status_width, 0);
EXPECT_EQ(status_width, shelf_widget->GetContentsView()->width() -
launcher->GetLauncherViewForTest()->width());
}
// Verifies when the shell is deleted with a full screen window we don't crash.
TEST_F(ShelfWidgetTest, DontReferenceLauncherAfterDeletion) {
views::Widget* widget = new views::Widget;
views::Widget::InitParams params(views::Widget::InitParams::TYPE_WINDOW);
params.bounds = gfx::Rect(0, 0, 200, 200);
params.context = CurrentContext();
// Widget is now owned by the parent window.
widget->Init(params);
widget->SetFullscreen(true);
}
} // namespace ash
| {'content_hash': 'f52450676512c9d69caa6f1c6d3ac3eb', 'timestamp': '', 'source': 'github', 'line_count': 134, 'max_line_length': 79, 'avg_line_length': 33.37313432835821, 'alnum_prop': 0.6301431127012522, 'repo_name': 'plxaye/chromium', 'id': 'bea1c6484b68ea3ab14b5efb5b9972e443926523', 'size': '5214', 'binary': False, 'copies': '3', 'ref': 'refs/heads/master', 'path': 'src/ash/shelf/shelf_widget_unittest.cc', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'ASP', 'bytes': '853'}, {'name': 'AppleScript', 'bytes': '6973'}, {'name': 'Arduino', 'bytes': '464'}, {'name': 'Assembly', 'bytes': '1176633'}, {'name': 'Awk', 'bytes': '9519'}, {'name': 'C', 'bytes': '75195981'}, {'name': 'C#', 'bytes': '36335'}, {'name': 'C++', 'bytes': '172360762'}, {'name': 'CSS', 'bytes': '740648'}, {'name': 'Dart', 'bytes': '12620'}, {'name': 'Emacs Lisp', 'bytes': '12454'}, {'name': 'F#', 'bytes': '381'}, {'name': 'Java', 'bytes': '3671513'}, {'name': 'JavaScript', 'bytes': '16204541'}, {'name': 'Max', 'bytes': '39069'}, {'name': 'Mercury', 'bytes': '10299'}, {'name': 'Objective-C', 'bytes': '1133728'}, {'name': 'Objective-C++', 'bytes': '5771619'}, {'name': 'PHP', 'bytes': '97817'}, {'name': 'Perl', 'bytes': '166372'}, {'name': 'Python', 'bytes': '11650532'}, {'name': 'Ragel in Ruby Host', 'bytes': '3641'}, {'name': 'Rebol', 'bytes': '262'}, {'name': 'Ruby', 'bytes': '14575'}, {'name': 'Shell', 'bytes': '1426780'}, {'name': 'Tcl', 'bytes': '277077'}, {'name': 'TeX', 'bytes': '43554'}, {'name': 'VimL', 'bytes': '4953'}, {'name': 'XSLT', 'bytes': '13493'}, {'name': 'nesC', 'bytes': '14650'}]} |
package dockerfile
import (
"testing"
)
func TestWrite(t *testing.T) {
var f = New("ubuntu")
var got, want = f.String(), "FROM ubuntu\n"
if got != want {
t.Errorf("Exepected New() returned %s, got %s", want, got)
}
f = &Dockerfile{}
f.WriteAdd("src", "target")
got, want = f.String(), "ADD src target\n"
if got != want {
t.Errorf("Exepected WriteAdd returned %s, got %s", want, got)
}
f = &Dockerfile{}
f.WriteFrom("ubuntu")
got, want = f.String(), "FROM ubuntu\n"
if got != want {
t.Errorf("Exepected WriteFrom returned %s, got %s", want, got)
}
f = &Dockerfile{}
f.WriteRun("whoami")
got, want = f.String(), "RUN whoami\n"
if got != want {
t.Errorf("Exepected WriteRun returned %s, got %s", want, got)
}
f = &Dockerfile{}
f.WriteUser("root")
got, want = f.String(), "USER root\n"
if got != want {
t.Errorf("Exepected WriteUser returned %s, got %s", want, got)
}
f = &Dockerfile{}
f.WriteEnv("FOO", "BAR")
got, want = f.String(), "ENV FOO BAR\n"
if got != want {
t.Errorf("Exepected WriteEnv returned %s, got %s", want, got)
}
f = &Dockerfile{}
f.WriteWorkdir("/home/ubuntu")
got, want = f.String(), "WORKDIR /home/ubuntu\n"
if got != want {
t.Errorf("Exepected WriteWorkdir returned %s, got %s", want, got)
}
f = &Dockerfile{}
f.WriteEntrypoint("/root")
got, want = f.String(), "ENTRYPOINT /root\n"
if got != want {
t.Errorf("Exepected WriteEntrypoint returned %s, got %s", want, got)
}
}
| {'content_hash': '8a7488852f52518988b64be4f03c3887', 'timestamp': '', 'source': 'github', 'line_count': 63, 'max_line_length': 70, 'avg_line_length': 23.126984126984127, 'alnum_prop': 0.6225120109814688, 'repo_name': 'bbmepic/drone', 'id': 'cb9a63a22bda92e33eb96fa545180b4a92aa52e9', 'size': '1457', 'binary': False, 'copies': '50', 'ref': 'refs/heads/master', 'path': 'shared/build/dockerfile/dockerfile_test.go', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '44707'}, {'name': 'Go', 'bytes': '494491'}, {'name': 'HTML', 'bytes': '27967'}, {'name': 'JavaScript', 'bytes': '42277'}, {'name': 'Makefile', 'bytes': '3666'}, {'name': 'Shell', 'bytes': '6694'}]} |
package com.intellij.ide.structureView;
import com.intellij.ide.util.treeView.smartTree.TreeModel;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* Defines the model for the data displayed in the standard structure view or file structure
* popup component. The model of the standard structure view is represented as a tree of elements.
*
* @see com.intellij.ide.structureView.TreeBasedStructureViewBuilder#createStructureViewModel()
* @see TextEditorBasedStructureViewModel
*/
public interface StructureViewModel extends TreeModel {
/**
* Returns the element currently selected in the editor linked to the structure view.
*
* @return the selected element, or null if the current editor position does not
* correspond to any element that can be shown in the structure view.
*/
@Nullable Object getCurrentEditorElement();
/**
* Adds a listener which gets notified when the selection in the editor linked to the
* structure view moves to a different element visible in the structure view.
*
* @param listener the listener to add.
*/
void addEditorPositionListener(@NotNull FileEditorPositionListener listener);
/**
* Removes a listener which gets notified when the selection in the editor linked to the
* structure view moves to a different element visible in the structure view.
*
* @param listener the listener to remove.
*/
void removeEditorPositionListener(@NotNull FileEditorPositionListener listener);
/**
* Adds a listener which gets notified when the data represented by the structure view
* is changed and the structure view needs to be rebuilt.
*
* @param modelListener the listener to add.
*/
void addModelListener(@NotNull ModelListener modelListener);
/**
* Removes a listener which gets notified when the data represented by the structure view
* is changed and the structure view needs to be rebuilt.
*
* @param modelListener the listener to remove.
*/
void removeModelListener(@NotNull ModelListener modelListener);
/**
* Returns the root element of the structure view tree.
*
* @return the structure view root.
*/
@Override
@NotNull
StructureViewTreeElement getRoot();
/**
* Disposes of the model.
*/
void dispose();
boolean shouldEnterElement(Object element);
interface ElementInfoProvider extends StructureViewModel {
boolean isAlwaysShowsPlus(StructureViewTreeElement element);
boolean isAlwaysLeaf(StructureViewTreeElement element);
}
interface ExpandInfoProvider {
boolean isAutoExpand(@NotNull StructureViewTreeElement element);
boolean isSmartExpand();
}
}
| {'content_hash': '66175567d6c145a725c3937e5dc155f5', 'timestamp': '', 'source': 'github', 'line_count': 81, 'max_line_length': 98, 'avg_line_length': 33.18518518518518, 'alnum_prop': 0.7526041666666666, 'repo_name': 'pwoodworth/intellij-community', 'id': '551d70d055e1dd83439e4fee72d7bc5089e5f598', 'size': '3288', 'binary': False, 'copies': '39', 'ref': 'refs/heads/master', 'path': 'platform/structure-view-api/src/com/intellij/ide/structureView/StructureViewModel.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'AMPL', 'bytes': '20665'}, {'name': 'AspectJ', 'bytes': '182'}, {'name': 'Batchfile', 'bytes': '63518'}, {'name': 'C', 'bytes': '214180'}, {'name': 'C#', 'bytes': '1538'}, {'name': 'C++', 'bytes': '190028'}, {'name': 'CSS', 'bytes': '111474'}, {'name': 'CoffeeScript', 'bytes': '1759'}, {'name': 'Cucumber', 'bytes': '14382'}, {'name': 'Erlang', 'bytes': '10'}, {'name': 'FLUX', 'bytes': '57'}, {'name': 'Groff', 'bytes': '35232'}, {'name': 'Groovy', 'bytes': '2196045'}, {'name': 'HTML', 'bytes': '1726130'}, {'name': 'J', 'bytes': '5050'}, {'name': 'Java', 'bytes': '148513224'}, {'name': 'JavaScript', 'bytes': '125292'}, {'name': 'Kotlin', 'bytes': '528634'}, {'name': 'Lex', 'bytes': '166177'}, {'name': 'Makefile', 'bytes': '2352'}, {'name': 'NSIS', 'bytes': '85969'}, {'name': 'Objective-C', 'bytes': '28634'}, {'name': 'Perl6', 'bytes': '26'}, {'name': 'Protocol Buffer', 'bytes': '6570'}, {'name': 'Python', 'bytes': '21468722'}, {'name': 'Ruby', 'bytes': '1213'}, {'name': 'Scala', 'bytes': '11698'}, {'name': 'Shell', 'bytes': '63190'}, {'name': 'Smalltalk', 'bytes': '64'}, {'name': 'TeX', 'bytes': '60798'}, {'name': 'TypeScript', 'bytes': '6152'}, {'name': 'XSLT', 'bytes': '113040'}]} |
import urbackup_api
from urbackup_api import installer_os
import datetime
import time
import sys
server = urbackup_api.urbackup_server("http://127.0.0.1:55414/x", "admin", "foo")
for extra_client in server.get_extra_clients():
server.remove_extra_client(extra_client["id"])
computernames = """2.2.2.2
3.3.3.3"""
for line in computernames:
server.add_extra_client(line)
clients = server.get_status()
usage = server.get_usage()
if len(clients) != len(usage):
print("Failed to retreive usage or status information. Length of both lists is different.")
# Uncomment to format time differently
# locale.setlocale(locale.LC_TIME, "german")
diff_time = 3*24*60*60 # 3 days
for client in clients:
if client["lastbackup"]=="-" or client["lastbackup"] < time.time() - diff_time:
if client["lastbackup"]=="-" or client["lastbackup"]==0:
lastbackup = "Never"
else:
lastbackup = datetime.datetime.fromtimestamp(client["lastbackup"]).strftime("%x %X")
print("Last file backup at {lastbackup} of client {clientname} is older than three days".format(
lastbackup=lastbackup, clientname=client["name"] ) )
#if server.start_incr_file_backup("Johnwin7test-PC2"):
# print("Started file backup successfully")
#else:
# print("Failed to start file backup")
if not server.get_livelog():
print("Failed to get livelog contents")
settings = server.get_client_settings("Johnwin7test-PC2")
for key in settings:
print("{key}={value}".format(key=key, value=settings[key]))
print("Authkey: "+server.get_client_authkey("Johnwin7test-PC2"))
if server.change_client_setting("Johnwin7test-PC2", "max_image_incr", "40"):
print("Changed setting successfully")
else:
print("Failed to change setting")
settings = server.get_global_settings()
for key in settings:
print("Global: {key}={value}".format(key=key, value=settings[key]))
if server.set_global_setting("max_image_incr", "40"):
print("Changed global setting successfully")
else:
print("Failed to change global setting")
#Get all file backups for a specified client id
backups = server.get_clientbackups('8')
#Get all image backups for a specified client id
backups_image = server.get_clientimagebackups('8')
#Download a client installer
server.download_installer("test.exe", "test", installer_os.Windows)
| {'content_hash': 'd3cf1e59ec1665f1c6c291ec676ba219', 'timestamp': '', 'source': 'github', 'line_count': 83, 'max_line_length': 104, 'avg_line_length': 30.289156626506024, 'alnum_prop': 0.6599045346062052, 'repo_name': 'uroni/urbackup-server-python-web-api-wrapper', 'id': '77a923ebc28c12f7f1e620aaacb172d27f8173ef', 'size': '2514', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'test/urbackup_api_test.py', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Python', 'bytes': '22702'}]} |
package seedu.address.model.task;
import java.time.LocalDateTime;
import java.util.Objects;
import seedu.address.commons.exceptions.IllegalValueException;
public class DeadlineTask extends Task {
private static final String FMT_STRING = "DeadlineTask[name=%s, due=%s, finished=%s]";
private final LocalDateTime due;
private final boolean finished;
public DeadlineTask(Name name, LocalDateTime due, boolean finished) {
super(name);
assert due != null;
this.due = due;
this.finished = finished;
}
public DeadlineTask(Name name, LocalDateTime due) {
this(name, due, false);
}
public DeadlineTask(String name, LocalDateTime due) throws IllegalValueException {
this(new Name(name), due, false);
}
public LocalDateTime getDue() {
return due;
}
public boolean isFinished() {
return this.finished;
}
@Override
public boolean equals(Object other) {
return other == this
|| (other instanceof DeadlineTask
&& name.equals(((DeadlineTask)other).name)
&& due.equals(((DeadlineTask)other).due)
&& finished == ((DeadlineTask)other).finished);
}
@Override
public int hashCode() {
return Objects.hash(name, due, finished);
}
@Override
public String toString() {
return String.format(FMT_STRING, name, due, finished);
}
}
| {'content_hash': '35df895b6a94df9d71d709a38777ec2c', 'timestamp': '', 'source': 'github', 'line_count': 58, 'max_line_length': 90, 'avg_line_length': 25.06896551724138, 'alnum_prop': 0.6327372764786795, 'repo_name': 'snehasp13/main', 'id': 'a159171e957798ad424a64c79a38763a2f73080a', 'size': '1454', 'binary': False, 'copies': '4', 'ref': 'refs/heads/master', 'path': 'src/main/java/seedu/address/model/task/DeadlineTask.java', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '7465'}, {'name': 'Java', 'bytes': '459596'}, {'name': 'Python', 'bytes': '4145'}]} |
DROP TABLE IF EXISTS `profile`;
CREATE TABLE `profile` (
`id` INTEGER PRIMARY KEY AUTOINCREMENT,
`author_id` BIGINT NOT NULL,
`bio` VARCHAR(255) NOT NULL UNIQUE
);
INSERT INTO `profile`(`author_id`, `bio`)
VALUES (1, 'Used to be a geophysical engineer');
| {'content_hash': '843cfedde00fb7a0b6cc57b071d9a40a', 'timestamp': '', 'source': 'github', 'line_count': 9, 'max_line_length': 55, 'avg_line_length': 29.88888888888889, 'alnum_prop': 0.6840148698884758, 'repo_name': 'brianloveswords/streamsql', 'id': '5a873281443e69455289368f688865f62d874cc5', 'size': '269', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'test/sql/profile-sqlite.sql', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'JavaScript', 'bytes': '73606'}]} |
package servicebroker
import (
"errors"
"fmt"
"time"
"github.com/golang/glog"
utilruntime "k8s.io/apimachinery/pkg/util/runtime"
restclient "k8s.io/client-go/rest"
kclientsetexternal "k8s.io/kubernetes/pkg/client/clientset_generated/clientset"
kclientset "k8s.io/kubernetes/pkg/client/clientset_generated/internalclientset"
"github.com/openshift/origin/pkg/cmd/server/bootstrappolicy"
"github.com/openshift/origin/pkg/openservicebroker/api"
extrouteclientset "github.com/openshift/origin/pkg/route/generated/clientset/typed/route/v1"
"github.com/openshift/origin/pkg/serviceaccounts"
templateinformer "github.com/openshift/origin/pkg/template/generated/informers/internalversion/template/internalversion"
templateclientset "github.com/openshift/origin/pkg/template/generated/internalclientset"
internalversiontemplate "github.com/openshift/origin/pkg/template/generated/internalclientset/typed/template/internalversion"
templatelister "github.com/openshift/origin/pkg/template/generated/listers/template/internalversion"
)
// Broker represents the template service broker. It implements
// openservicebroker/api.Broker.
type Broker struct {
kc kclientset.Interface
templateclient internalversiontemplate.TemplateInterface
extkc kclientsetexternal.Interface
extrouteclient extrouteclientset.RouteV1Interface
lister templatelister.TemplateLister
templateNamespaces map[string]struct{}
ready chan struct{}
}
var _ api.Broker = &Broker{}
// NewBroker returns a new instance of the template service broker. While
// built into origin, its initialisation is asynchronous. This is because it is
// part of the API server, but requires the API server to be up to get its
// service account token.
func NewBroker(privrestconfig restclient.Config, privkc kclientset.Interface, infraNamespace string, informer templateinformer.TemplateInformer, namespaces []string) *Broker {
templateNamespaces := map[string]struct{}{}
for _, namespace := range namespaces {
templateNamespaces[namespace] = struct{}{}
}
b := &Broker{
lister: informer.Lister(),
templateNamespaces: templateNamespaces,
ready: make(chan struct{}),
}
go func() {
// the broker is initialised asynchronously because fetching the service
// account token requires the main API server to be running.
glog.V(2).Infof("Template service broker: waiting for authentication token")
restconfig, _, kc, extkc, err := serviceaccounts.Clients(
privrestconfig,
&serviceaccounts.ClientLookupTokenRetriever{Client: privkc},
infraNamespace,
bootstrappolicy.InfraTemplateServiceBrokerServiceAccountName,
)
if err != nil {
utilruntime.HandleError(fmt.Errorf("Template service broker: failed to initialize clients: %v", err))
return
}
extrouteclientset, err := extrouteclientset.NewForConfig(restconfig)
if err != nil {
utilruntime.HandleError(fmt.Errorf("Template service broker: failed to initialize route clientset: %v", err))
return
}
templateclientset, err := templateclientset.NewForConfig(restconfig)
if err != nil {
utilruntime.HandleError(fmt.Errorf("Template service broker: failed to initialize template clientset: %v", err))
return
}
b.kc = kc
b.extkc = extkc
b.extrouteclient = extrouteclientset
b.templateclient = templateclientset.Template()
glog.V(2).Infof("Template service broker: waiting for informer sync")
for !informer.Informer().HasSynced() {
time.Sleep(100 * time.Millisecond)
}
glog.V(2).Infof("Template service broker: ready; reading namespaces %v", namespaces)
close(b.ready)
}()
return b
}
// WaitForReady is called on each incoming API request via a server filter. It
// is intended to be a quick check that the broker is initialized (which should
// itself be a fast one-off start-up event).
func (b *Broker) WaitForReady() error {
// delay up to 10 seconds if not ready (unlikely), before returning a
// "try again" response.
timer := time.NewTimer(10 * time.Second)
defer timer.Stop()
select {
case <-b.ready:
return nil
case <-timer.C:
return errors.New("timeout waiting for broker to be ready")
}
}
| {'content_hash': 'd3067f4e629added85e752dad65e9735', 'timestamp': '', 'source': 'github', 'line_count': 118, 'max_line_length': 175, 'avg_line_length': 35.652542372881356, 'alnum_prop': 0.7537437603993344, 'repo_name': 'wanghaoran1988/origin', 'id': 'c2938ca3690e005ce36af5b4a37131cb0a7f26dd', 'size': '4207', 'binary': False, 'copies': '4', 'ref': 'refs/heads/master', 'path': 'pkg/template/servicebroker/servicebroker.go', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Awk', 'bytes': '1842'}, {'name': 'DIGITAL Command Language', 'bytes': '117'}, {'name': 'Go', 'bytes': '18870102'}, {'name': 'Groovy', 'bytes': '5288'}, {'name': 'HTML', 'bytes': '74732'}, {'name': 'Makefile', 'bytes': '21696'}, {'name': 'Protocol Buffer', 'bytes': '635483'}, {'name': 'Python', 'bytes': '33408'}, {'name': 'Roff', 'bytes': '2049'}, {'name': 'Ruby', 'bytes': '484'}, {'name': 'Shell', 'bytes': '2155506'}, {'name': 'Smarty', 'bytes': '626'}]} |
<?xml version="1.0" encoding="UTF-8"?>
<!--
Pass: (3300) Inconsistent math
-->
<sbml xmlns="http://www.sbml.org/sbml/level2" level="2" version="1">
<model>
<listOfCompartments>
<compartment id="c" constant="false"/>
</listOfCompartments>
<listOfParameters>
<parameter id="p" value="1" constant="false"/>
<parameter id="p1" units="metre" value="1" constant="false"/>
<parameter id="delta_t" units="second" value="1" constant="false"/>
</listOfParameters>
<listOfRules>
<assignmentRule variable = "p">
<math xmlns="http://www.w3.org/1998/Math/MathML">
<apply>
<times/>
<cn type="integer"> 12</cn>
<cn> 5 </cn>
</apply>
</math>
</assignmentRule>
</listOfRules>
</model>
</sbml>
| {'content_hash': 'a916f8415d8111e95f3b6aa6331f6f5b', 'timestamp': '', 'source': 'github', 'line_count': 29, 'max_line_length': 73, 'avg_line_length': 27.586206896551722, 'alnum_prop': 0.57375, 'repo_name': 'TheCoSMoCompany/biopredyn', 'id': '78a74ad1106a8cdf9e17d2407159d8cae6bc92eb', 'size': '800', 'binary': False, 'copies': '4', 'ref': 'refs/heads/master', 'path': 'Prototype/src/libsbml-5.10.0/src/sbml/validator/test/test-data/sbml-mathml-constraints/10207-pass-00-03.xml', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'C', 'bytes': '3535918'}, {'name': 'C++', 'bytes': '26120778'}, {'name': 'CMake', 'bytes': '455400'}, {'name': 'CSS', 'bytes': '49020'}, {'name': 'Gnuplot', 'bytes': '206'}, {'name': 'HTML', 'bytes': '193068'}, {'name': 'Java', 'bytes': '66517'}, {'name': 'JavaScript', 'bytes': '3847'}, {'name': 'Makefile', 'bytes': '30905'}, {'name': 'Perl', 'bytes': '3018'}, {'name': 'Python', 'bytes': '7891301'}, {'name': 'Shell', 'bytes': '247654'}, {'name': 'TeX', 'bytes': '22566'}, {'name': 'XSLT', 'bytes': '55564'}]} |
<script src="chrome://md-settings/search_engines_page/search_engines_browser_proxy.js"></script>
| {'content_hash': '068e6ebf4041854ee5b00bf9aafbd8fd', 'timestamp': '', 'source': 'github', 'line_count': 1, 'max_line_length': 96, 'avg_line_length': 97.0, 'alnum_prop': 0.7731958762886598, 'repo_name': 'ds-hwang/chromium-crosswalk', 'id': '6b977e46ebfb92bddc18d1ba724f41304761c5fe', 'size': '97', 'binary': False, 'copies': '7', 'ref': 'refs/heads/master', 'path': 'chrome/browser/resources/settings/search_engines_page/search_engines_browser_proxy.html', 'mode': '33188', 'license': 'bsd-3-clause', 'language': []} |
<?xml version="1.0" encoding="UTF-8" ?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!-- prolog cauases IE to stop working, WICKET-2718 -->
<!-- < ?xml version="1.0" encoding="UTF-8" ? > -->
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<html xmlns:wicket="http://wicket.apache.org"><head><meta wicket:id="redirect" http-equiv="refresh" content="0; url=app"/></head></html> | {'content_hash': '0aa63ea64867acd783ec2765d12b98fe', 'timestamp': '', 'source': 'github', 'line_count': 21, 'max_line_length': 136, 'avg_line_length': 56.523809523809526, 'alnum_prop': 0.7245155855096883, 'repo_name': 'apache/wicket', 'id': 'bef0e12f9279f4b33a14d43ebad559a4af481575', 'size': '1187', 'binary': False, 'copies': '16', 'ref': 'refs/heads/master', 'path': 'wicket-core/src/main/java/org/apache/wicket/markup/html/pages/RedirectPage.html', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '146480'}, {'name': 'Dockerfile', 'bytes': '164'}, {'name': 'HTML', 'bytes': '897918'}, {'name': 'Java', 'bytes': '12309248'}, {'name': 'JavaScript', 'bytes': '510653'}, {'name': 'Less', 'bytes': '28'}, {'name': 'Logos', 'bytes': '18871'}, {'name': 'Python', 'bytes': '1547'}, {'name': 'Shell', 'bytes': '25598'}, {'name': 'XSLT', 'bytes': '2162'}]} |
// Package worker contains the functionality for the background worker process.
package worker
| {'content_hash': 'f6bda186dbc1ef6f5e5232307ae86dc8', 'timestamp': '', 'source': 'github', 'line_count': 4, 'max_line_length': 79, 'avg_line_length': 24.25, 'alnum_prop': 0.8144329896907216, 'repo_name': 'alenbhclynpblc/gophish', 'id': 'dbc961b7eb267a6e7da1d306687181437b53c104', 'size': '1224', 'binary': False, 'copies': '6', 'ref': 'refs/heads/master', 'path': 'worker/doc.go', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'API Blueprint', 'bytes': '21447'}, {'name': 'Go', 'bytes': '134386'}, {'name': 'HTML', 'bytes': '269228'}, {'name': 'JavaScript', 'bytes': '92931'}, {'name': 'SQLPL', 'bytes': '1004'}]} |
<!doctype html>
<!--
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
-->
<html>
<head>
<title>iron-collapse-basic</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<script src="../../webcomponentsjs/webcomponents-lite.js"></script>
<script src="../../web-component-tester/browser.js"></script>
<script src="../../test-fixture/test-fixture-mocha.js"></script>
<link rel="import" href="../../test-fixture/test-fixture.html">
<link rel="import" href="../iron-collapse.html">
</head>
<body>
<test-fixture id="test">
<template>
<iron-collapse id="collapse" opened>
<div style="height:100px;">
Lorem ipsum
</div>
</iron-collapse>
</template>
</test-fixture>
<script>
suite('basic', function() {
var collapse;
var collapseHeight;
setup(function() {
collapse = fixture('test');
collapseHeight = getComputedStyle(collapse).height;
});
test('opened attribute', function() {
assert.equal(collapse.opened, true);
});
test('animated by default', function() {
assert.isTrue(!collapse.noAnimation, '`noAnimation` is falsy');
});
test('horizontal attribute', function() {
assert.equal(collapse.horizontal, false);
});
test('default opened height', function() {
assert.equal(collapse.style.height, 'auto');
});
test('set opened to false triggers animation', function(done) {
collapse.opened = false;
// Animation got enabled.
assert.notEqual(collapse.style.transitionDuration, '0s');
collapse.addEventListener('transitionend', function() {
// Animation disabled.
assert.equal(collapse.style.transitionDuration, '0s');
done();
});
});
test('enableTransition(false) disables animations', function() {
collapse.enableTransition(false);
assert.isTrue(collapse.noAnimation, '`noAnimation` is true');
// trying to animate the size update
collapse.updateSize('0px', true);
// Animation immediately disabled.
assert.equal(collapse.style.height, '0px');
});
test('set opened to false, then to true', function(done) {
// this listener will be triggered twice (every time `opened` changes)
collapse.addEventListener('transitionend', function() {
if (collapse.opened) {
// Check finalSize after animation is done.
assert.equal(collapse.style.height, 'auto');
done();
} else {
// Check if size is still 0px.
assert.equal(collapse.style.height, '0px');
// Trigger 2nd toggle.
collapse.opened = true;
// Size should be immediately set.
assert.equal(collapse.style.height, collapseHeight);
}
});
// Trigger 1st toggle.
collapse.opened = false;
// Size should be immediately set.
assert.equal(collapse.style.height, '0px');
});
test('opened changes trigger iron-resize', function() {
var spy = sinon.stub();
collapse.addEventListener('iron-resize', spy);
// No animations for faster test.
collapse.noAnimation = true;
collapse.opened = false;
assert.isTrue(spy.calledOnce, 'iron-resize was fired');
});
test('overflow is hidden while animating', function(done) {
collapse.addEventListener('transitionend', function() {
// Should still be hidden.
assert.equal(getComputedStyle(collapse).overflow, 'hidden');
done();
});
assert.equal(getComputedStyle(collapse).overflow, 'visible');
collapse.opened = false;
// Immediately updated style.
assert.equal(getComputedStyle(collapse).overflow, 'hidden');
});
test('toggle horizontal updates size', function() {
collapse.horizontal = false;
assert.equal(collapse.style.width, '');
assert.equal(collapse.style.height, 'auto');
assert.equal(collapse.style.transitionProperty, 'height');
collapse.horizontal = true;
assert.equal(collapse.style.width, 'auto');
assert.equal(collapse.style.height, '');
assert.equal(collapse.style.transitionProperty, 'width');
});
});
</script>
</body>
</html>
| {'content_hash': '2557ffc2b17994eab36365b603315825', 'timestamp': '', 'source': 'github', 'line_count': 147, 'max_line_length': 100, 'avg_line_length': 34.85034013605442, 'alnum_prop': 0.5939878977161819, 'repo_name': 'grodriguezr/hospital', 'id': 'e52459a3af01221cc5beb0c3037cf688dd9a3994', 'size': '5123', 'binary': False, 'copies': '25', 'ref': 'refs/heads/master', 'path': 'bower_components/iron-collapse/test/basic.html', 'mode': '33261', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '151006'}, {'name': 'HTML', 'bytes': '24053'}, {'name': 'JavaScript', 'bytes': '281553'}, {'name': 'Makefile', 'bytes': '300'}]} |
package org.apache.camel.component.rest;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.builder.AdviceWithRouteBuilder;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.model.RouteDefinition;
import org.apache.camel.reifier.RouteReifier;
import org.junit.Test;
public class FromRestAdviceWithTest extends ContextTestSupport {
@Override
public boolean isUseRouteBuilder() {
return false;
}
@Test
public void testAdviceWith() throws Exception {
context.addRoutes(new RouteBuilder() {
@Override
public void configure() throws Exception {
rest("/say/hello")
.get()
.route().routeId("myRoute")
.transform().constant("Bye World")
.to("direct:hello");
from("direct:hello")
.to("mock:hello");
}
});
RouteDefinition route = context.getRouteDefinition("myRoute");
RouteReifier.adviceWith(route, context, new AdviceWithRouteBuilder() {
@Override
public void configure() throws Exception {
replaceFromWith("direct:foo");
}
});
context.start();
getMockEndpoint("mock:hello").expectedMessageCount(1);
String out = template.requestBody("direct:foo", "I was here", String.class);
assertEquals("Bye World", out);
assertMockEndpointsSatisfied();
}
}
| {'content_hash': 'e6f75d1f2fb96a7325061c624e24c1d2', 'timestamp': '', 'source': 'github', 'line_count': 52, 'max_line_length': 84, 'avg_line_length': 29.576923076923077, 'alnum_prop': 0.5975292587776333, 'repo_name': 'davidkarlsen/camel', 'id': '68e3b7cf6ee43e6ade6a4f4dc114444ac35a4d65', 'size': '2340', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'core/camel-core/src/test/java/org/apache/camel/component/rest/FromRestAdviceWithTest.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Apex', 'bytes': '6521'}, {'name': 'Batchfile', 'bytes': '2353'}, {'name': 'CSS', 'bytes': '17204'}, {'name': 'Elm', 'bytes': '10852'}, {'name': 'FreeMarker', 'bytes': '8015'}, {'name': 'Groovy', 'bytes': '14479'}, {'name': 'HTML', 'bytes': '909437'}, {'name': 'Java', 'bytes': '82303679'}, {'name': 'JavaScript', 'bytes': '102432'}, {'name': 'Makefile', 'bytes': '513'}, {'name': 'Shell', 'bytes': '17240'}, {'name': 'TSQL', 'bytes': '28835'}, {'name': 'Tcl', 'bytes': '4974'}, {'name': 'Thrift', 'bytes': '6979'}, {'name': 'XQuery', 'bytes': '546'}, {'name': 'XSLT', 'bytes': '271473'}]} |
using System;
using Amazon.ImportExport;
using Amazon.ImportExport.Model;
namespace Amazon.ImportExport
{
/// <summary>
/// Interface for accessing AmazonImportExport.
///
/// AWS Import/Export Service <para> AWS Import/Export accelerates transferring large amounts of data between the AWS cloud and portable storage
/// devices that you mail to us. AWS Import/Export transfers data directly onto and off of your storage devices using Amazon's high-speed
/// internal network and bypassing the Internet. For large data sets, AWS Import/Export is often faster than Internet transfer and more cost
/// effective than upgrading your connectivity. </para>
/// </summary>
public interface AmazonImportExport : IDisposable
{
#region CreateJob
/// <summary>
/// <para> This operation initiates the process of scheduling an upload or download of your data. You include in the request a manifest that
/// describes the data transfer specifics. The response to the request includes a job ID, which you can use in other operations, a signature
/// that you use to identify your storage device, and the address where you should ship your storage device. </para>
/// </summary>
///
/// <param name="createJobRequest">Container for the necessary parameters to execute the CreateJob service method on AmazonImportExport.</param>
///
/// <returns>The response from the CreateJob service method, as returned by AmazonImportExport.</returns>
///
/// <exception cref="MalformedManifestException"/>
/// <exception cref="InvalidAddressException"/>
/// <exception cref="BucketPermissionException"/>
/// <exception cref="InvalidParameterException"/>
/// <exception cref="MultipleRegionsException"/>
/// <exception cref="MissingParameterException"/>
/// <exception cref="InvalidFileSystemException"/>
/// <exception cref="MissingCustomsException"/>
/// <exception cref="NoSuchBucketException"/>
/// <exception cref="InvalidAccessKeyIdException"/>
/// <exception cref="InvalidManifestFieldException"/>
/// <exception cref="InvalidCustomsException"/>
/// <exception cref="MissingManifestFieldException"/>
/// <exception cref="CreateJobQuotaExceededException"/>
/// <exception cref="InvalidJobIdException"/>
CreateJobResponse CreateJob(CreateJobRequest createJobRequest);
/// <summary>
/// Initiates the asynchronous execution of the CreateJob operation.
/// <seealso cref="Amazon.ImportExport.AmazonImportExport.CreateJob"/>
/// </summary>
///
/// <param name="createJobRequest">Container for the necessary parameters to execute the CreateJob operation on AmazonImportExport.</param>
/// <param name="callback">An AsyncCallback delegate that is invoked when the operation completes.</param>
/// <param name="state">A user-defined state object that is passed to the callback procedure. Retrieve this object from within the callback
/// procedure using the AsyncState property.</param>
///
/// <returns>An IAsyncResult that can be used to poll or wait for results, or both; this value is also needed when invoking EndCreateJob
/// operation.</returns>
IAsyncResult BeginCreateJob(CreateJobRequest createJobRequest, AsyncCallback callback, object state);
/// <summary>
/// Finishes the asynchronous execution of the CreateJob operation.
/// <seealso cref="Amazon.ImportExport.AmazonImportExport.CreateJob"/>
/// </summary>
///
/// <param name="asyncResult">The IAsyncResult returned by the call to BeginCreateJob.</param>
///
/// <returns>Returns a CreateJobResult from AmazonImportExport.</returns>
CreateJobResponse EndCreateJob(IAsyncResult asyncResult);
#endregion
#region CancelJob
/// <summary>
/// <para> This operation cancels a specified job. Only the job owner can cancel it. The operation fails if the job has already started or is
/// complete. </para>
/// </summary>
///
/// <param name="cancelJobRequest">Container for the necessary parameters to execute the CancelJob service method on AmazonImportExport.</param>
///
/// <returns>The response from the CancelJob service method, as returned by AmazonImportExport.</returns>
///
/// <exception cref="CanceledJobIdException"/>
/// <exception cref="ExpiredJobIdException"/>
/// <exception cref="InvalidAccessKeyIdException"/>
/// <exception cref="InvalidJobIdException"/>
/// <exception cref="UnableToCancelJobIdException"/>
CancelJobResponse CancelJob(CancelJobRequest cancelJobRequest);
/// <summary>
/// Initiates the asynchronous execution of the CancelJob operation.
/// <seealso cref="Amazon.ImportExport.AmazonImportExport.CancelJob"/>
/// </summary>
///
/// <param name="cancelJobRequest">Container for the necessary parameters to execute the CancelJob operation on AmazonImportExport.</param>
/// <param name="callback">An AsyncCallback delegate that is invoked when the operation completes.</param>
/// <param name="state">A user-defined state object that is passed to the callback procedure. Retrieve this object from within the callback
/// procedure using the AsyncState property.</param>
///
/// <returns>An IAsyncResult that can be used to poll or wait for results, or both; this value is also needed when invoking EndCancelJob
/// operation.</returns>
IAsyncResult BeginCancelJob(CancelJobRequest cancelJobRequest, AsyncCallback callback, object state);
/// <summary>
/// Finishes the asynchronous execution of the CancelJob operation.
/// <seealso cref="Amazon.ImportExport.AmazonImportExport.CancelJob"/>
/// </summary>
///
/// <param name="asyncResult">The IAsyncResult returned by the call to BeginCancelJob.</param>
///
/// <returns>Returns a CancelJobResult from AmazonImportExport.</returns>
CancelJobResponse EndCancelJob(IAsyncResult asyncResult);
#endregion
#region GetStatus
/// <summary>
/// <para> This operation returns information about a job, including where the job is in the processing pipeline, the status of the results, and
/// the signature value associated with the job. You can only return information about jobs you own. </para>
/// </summary>
///
/// <param name="getStatusRequest">Container for the necessary parameters to execute the GetStatus service method on AmazonImportExport.</param>
///
/// <returns>The response from the GetStatus service method, as returned by AmazonImportExport.</returns>
///
/// <exception cref="CanceledJobIdException"/>
/// <exception cref="ExpiredJobIdException"/>
/// <exception cref="InvalidAccessKeyIdException"/>
/// <exception cref="InvalidJobIdException"/>
GetStatusResponse GetStatus(GetStatusRequest getStatusRequest);
/// <summary>
/// Initiates the asynchronous execution of the GetStatus operation.
/// <seealso cref="Amazon.ImportExport.AmazonImportExport.GetStatus"/>
/// </summary>
///
/// <param name="getStatusRequest">Container for the necessary parameters to execute the GetStatus operation on AmazonImportExport.</param>
/// <param name="callback">An AsyncCallback delegate that is invoked when the operation completes.</param>
/// <param name="state">A user-defined state object that is passed to the callback procedure. Retrieve this object from within the callback
/// procedure using the AsyncState property.</param>
///
/// <returns>An IAsyncResult that can be used to poll or wait for results, or both; this value is also needed when invoking EndGetStatus
/// operation.</returns>
IAsyncResult BeginGetStatus(GetStatusRequest getStatusRequest, AsyncCallback callback, object state);
/// <summary>
/// Finishes the asynchronous execution of the GetStatus operation.
/// <seealso cref="Amazon.ImportExport.AmazonImportExport.GetStatus"/>
/// </summary>
///
/// <param name="asyncResult">The IAsyncResult returned by the call to BeginGetStatus.</param>
///
/// <returns>Returns a GetStatusResult from AmazonImportExport.</returns>
GetStatusResponse EndGetStatus(IAsyncResult asyncResult);
#endregion
#region ListJobs
/// <summary>
/// <para> This operation returns the jobs associated with the requester. AWS Import/Export lists the jobs in reverse chronological order based
/// on the date of creation. For example if Job Test1 was created 2009Dec30 and Test2 was created 2010Feb05, the ListJobs operation would return
/// Test2 followed by Test1. </para>
/// </summary>
///
/// <param name="listJobsRequest">Container for the necessary parameters to execute the ListJobs service method on AmazonImportExport.</param>
///
/// <returns>The response from the ListJobs service method, as returned by AmazonImportExport.</returns>
///
/// <exception cref="InvalidParameterException"/>
/// <exception cref="InvalidAccessKeyIdException"/>
ListJobsResponse ListJobs(ListJobsRequest listJobsRequest);
/// <summary>
/// Initiates the asynchronous execution of the ListJobs operation.
/// <seealso cref="Amazon.ImportExport.AmazonImportExport.ListJobs"/>
/// </summary>
///
/// <param name="listJobsRequest">Container for the necessary parameters to execute the ListJobs operation on AmazonImportExport.</param>
/// <param name="callback">An AsyncCallback delegate that is invoked when the operation completes.</param>
/// <param name="state">A user-defined state object that is passed to the callback procedure. Retrieve this object from within the callback
/// procedure using the AsyncState property.</param>
///
/// <returns>An IAsyncResult that can be used to poll or wait for results, or both; this value is also needed when invoking EndListJobs
/// operation.</returns>
IAsyncResult BeginListJobs(ListJobsRequest listJobsRequest, AsyncCallback callback, object state);
/// <summary>
/// Finishes the asynchronous execution of the ListJobs operation.
/// <seealso cref="Amazon.ImportExport.AmazonImportExport.ListJobs"/>
/// </summary>
///
/// <param name="asyncResult">The IAsyncResult returned by the call to BeginListJobs.</param>
///
/// <returns>Returns a ListJobsResult from AmazonImportExport.</returns>
ListJobsResponse EndListJobs(IAsyncResult asyncResult);
/// <summary>
/// <para> This operation returns the jobs associated with the requester. AWS Import/Export lists the jobs in reverse chronological order based
/// on the date of creation. For example if Job Test1 was created 2009Dec30 and Test2 was created 2010Feb05, the ListJobs operation would return
/// Test2 followed by Test1. </para>
/// </summary>
///
/// <returns>The response from the ListJobs service method, as returned by AmazonImportExport.</returns>
///
/// <exception cref="InvalidParameterException"/>
/// <exception cref="InvalidAccessKeyIdException"/>
ListJobsResponse ListJobs();
#endregion
#region UpdateJob
/// <summary>
/// <para> You use this operation to change the parameters specified in the original manifest file by supplying a new manifest file. The
/// manifest file attached to this request replaces the original manifest file. You can only use the operation after a CreateJob request but
/// before the data transfer starts and you can only use it on jobs you own. </para>
/// </summary>
///
/// <param name="updateJobRequest">Container for the necessary parameters to execute the UpdateJob service method on AmazonImportExport.</param>
///
/// <returns>The response from the UpdateJob service method, as returned by AmazonImportExport.</returns>
///
/// <exception cref="MalformedManifestException"/>
/// <exception cref="BucketPermissionException"/>
/// <exception cref="InvalidAddressException"/>
/// <exception cref="InvalidParameterException"/>
/// <exception cref="UnableToUpdateJobIdException"/>
/// <exception cref="MultipleRegionsException"/>
/// <exception cref="MissingParameterException"/>
/// <exception cref="InvalidFileSystemException"/>
/// <exception cref="CanceledJobIdException"/>
/// <exception cref="MissingCustomsException"/>
/// <exception cref="NoSuchBucketException"/>
/// <exception cref="ExpiredJobIdException"/>
/// <exception cref="InvalidAccessKeyIdException"/>
/// <exception cref="InvalidCustomsException"/>
/// <exception cref="InvalidManifestFieldException"/>
/// <exception cref="MissingManifestFieldException"/>
/// <exception cref="InvalidJobIdException"/>
UpdateJobResponse UpdateJob(UpdateJobRequest updateJobRequest);
/// <summary>
/// Initiates the asynchronous execution of the UpdateJob operation.
/// <seealso cref="Amazon.ImportExport.AmazonImportExport.UpdateJob"/>
/// </summary>
///
/// <param name="updateJobRequest">Container for the necessary parameters to execute the UpdateJob operation on AmazonImportExport.</param>
/// <param name="callback">An AsyncCallback delegate that is invoked when the operation completes.</param>
/// <param name="state">A user-defined state object that is passed to the callback procedure. Retrieve this object from within the callback
/// procedure using the AsyncState property.</param>
///
/// <returns>An IAsyncResult that can be used to poll or wait for results, or both; this value is also needed when invoking EndUpdateJob
/// operation.</returns>
IAsyncResult BeginUpdateJob(UpdateJobRequest updateJobRequest, AsyncCallback callback, object state);
/// <summary>
/// Finishes the asynchronous execution of the UpdateJob operation.
/// <seealso cref="Amazon.ImportExport.AmazonImportExport.UpdateJob"/>
/// </summary>
///
/// <param name="asyncResult">The IAsyncResult returned by the call to BeginUpdateJob.</param>
///
/// <returns>Returns a UpdateJobResult from AmazonImportExport.</returns>
UpdateJobResponse EndUpdateJob(IAsyncResult asyncResult);
#endregion
}
}
| {'content_hash': '5911318bae69956c7373cde7a29b52d9', 'timestamp': '', 'source': 'github', 'line_count': 285, 'max_line_length': 152, 'avg_line_length': 54.112280701754386, 'alnum_prop': 0.66463493710284, 'repo_name': 'emcvipr/dataservices-sdk-dotnet', 'id': '333fd9ee5ae6d01af65f00188493f8ff9ece177e', 'size': '16009', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'AWSSDK/Amazon.ImportExport/AmazonImportExport.cs', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'C#', 'bytes': '30500772'}, {'name': 'Shell', 'bytes': '1726'}, {'name': 'XSLT', 'bytes': '337772'}]} |
title: Nominate Student of the Month
nav: educate_nav
rightbar: blank
---
[#sotw-intro]
# Student of the Month winners
- [JC](http://blog.code.org/post/114154520598/sotw29), 4th grade, Wilmington, NC
- [Nadia](http://blog.code.org/post/112525708808/sotw28), 4th grade, Yardley, PA
- [Nishanth](http://blog.code.org/post/111481781993/sotw27), 11th grade, San Ramon, CA
- [Nikki](http://blog.code.org/post/110291421258/sotw26), 10th grade, Charleston, SC
- [Maxeen](http://blog.code.org/post/109514069813/sotw25), 10th grade, Charleston, SC
- [Spencer](http://blog.code.org/post/108836545868/sotw24), 12th grade, Clayton, OH
- [Tyler](http://blog.code.org/post/101932717833/sotw23), Grade 9, Stow, Ohio
- [Luna](http://blog.code.org/post/100268410698/sotw22), 11th Grade, New York, NY
- [Lauren](http://blog.code.org/post/97828511078/sotw21), 10th grade, Beachwood, OH
- [Paulina](http://codeorg.tumblr.com/post/96102625248/sotw20), 9th grade, Mansfield, MA
- [Djassi](http://codeorg.tumblr.com/post/95389174313/sotw19), 11th grade, New York, NY
- [Courtney](http://codeorg.tumblr.com/post/94192118883/sotw18), 11th grade, Harrisburg, PA
- [Nikolas](http://codeorg.tumblr.com/post/92848143393/sotw17), 11th grade, New York, NY
- [Peter](http://codeorg.tumblr.com/post/92145747203/sotw16), 11th grade, Bend, OR
- [Danielle](http://codeorg.tumblr.com/post/91469829263/sotw15), 5th grade, Melbourne, FL
- [Fiona](http://codeorg.tumblr.com/post/89385292873/sotw12), 6th grade, Tacoma, WA
- [Semira](http://codeorg.tumblr.com/post/90098194043/sotw14), 6th grade, Tacoma, WA
- [Tejas](http://codeorg.tumblr.com/post/88676547818/sotw12), 9th grade, Brentwood, TN
- [Gili](http://codeorg.tumblr.com/post/88006093183/sotw11), 11th grade, Albany, NY
- [Michael](http://codeorg.tumblr.com/post/85935883173/sotw9), 12th grade, Miami, FL
- [Megan](http://codeorg.tumblr.com/post/85245168653/sotw8), 6th grade, Rowlett, TX
- [Jared](http://codeorg.tumblr.com/post/84543970908/sotw7), 12th grade, Fennimore, WI
- [Natalie](http://codeorg.tumblr.com/post/83841380493/sotw6), 11th grade, Waunakee WI
- [Alex](http://codeorg.tumblr.com/post/83005221286/sotw5), 3rd grade, Phoeniz, AZ
- [Milena](http://codeorg.tumblr.com/post/82392567045/sotw4), 7th grade, Chicago, IL
- [Mimi](http://codeorg.tumblr.com/post/81593199113/sotw3), 4th grade, Houston, TX
- [Alec](http://codeorg.tumblr.com/post/80891043398/sotw2), 10th grade, Tolland, CT
- [Akila](http://codeorg.tumblr.com/post/80181911110/sotw1)
, 12th grade, Langley, VA
# Nominate a Student of the Month
## We want to recognize students who are changing the face of computer science.
What does a Code.org **Student of the Month** look like?
- A student who's learning in the face of adversity
- One who's building amazing things with computer science
- Or, one who's helping their peers learn, and is bringing computer science to others.
Every winning student will get a prize from Code.org and be featured online. See [past Code.org Students of the month](http://codeorg.tumblr.com/tagged/sotw).
### Please tell us about a rockstar student you know
Whether they're just learning the basics, or a coding whiz, all K-12 nominations are welcome. The world deserves to know about the amazing work they're doing!
[/sotw-intro]
[#thanks]
Thanks for your nomination!
[/thanks]
<%= view :nominate_student_form %>
<br />
<br />
[See Teacher of the Month winners](/totm)
| {'content_hash': '80ab795309a47b58511a10a14d439374', 'timestamp': '', 'source': 'github', 'line_count': 66, 'max_line_length': 158, 'avg_line_length': 51.63636363636363, 'alnum_prop': 0.7417840375586855, 'repo_name': 'dillia23/code-dot-org', 'id': 'cc932abb73ce938d8f94b6d4150a50347e25278c', 'size': '3412', 'binary': False, 'copies': '4', 'ref': 'refs/heads/staging', 'path': 'pegasus/sites.v3/code.org/public/sotm-old.md', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'ApacheConf', 'bytes': '93'}, {'name': 'C++', 'bytes': '4844'}, {'name': 'CSS', 'bytes': '2398346'}, {'name': 'Cucumber', 'bytes': '130717'}, {'name': 'Emacs Lisp', 'bytes': '2410'}, {'name': 'HTML', 'bytes': '9371177'}, {'name': 'JavaScript', 'bytes': '83773111'}, {'name': 'PHP', 'bytes': '2303483'}, {'name': 'Perl', 'bytes': '14821'}, {'name': 'Processing', 'bytes': '11068'}, {'name': 'Prolog', 'bytes': '679'}, {'name': 'Python', 'bytes': '124866'}, {'name': 'Racket', 'bytes': '131852'}, {'name': 'Ruby', 'bytes': '2070008'}, {'name': 'Shell', 'bytes': '37544'}, {'name': 'SourcePawn', 'bytes': '74109'}]} |
SELECT
"s_store_name"
, "s_store_id"
, "sum"((CASE WHEN ("d_day_name" = 'Sunday ') THEN "ss_sales_price" ELSE null END)) "sun_sales"
, "sum"((CASE WHEN ("d_day_name" = 'Monday ') THEN "ss_sales_price" ELSE null END)) "mon_sales"
, "sum"((CASE WHEN ("d_day_name" = 'Tuesday ') THEN "ss_sales_price" ELSE null END)) "tue_sales"
, "sum"((CASE WHEN ("d_day_name" = 'Wednesday') THEN "ss_sales_price" ELSE null END)) "wed_sales"
, "sum"((CASE WHEN ("d_day_name" = 'Thursday ') THEN "ss_sales_price" ELSE null END)) "thu_sales"
, "sum"((CASE WHEN ("d_day_name" = 'Friday ') THEN "ss_sales_price" ELSE null END)) "fri_sales"
, "sum"((CASE WHEN ("d_day_name" = 'Saturday ') THEN "ss_sales_price" ELSE null END)) "sat_sales"
FROM
${database}.${schema}.date_dim
, ${database}.${schema}.store_sales
, ${database}.${schema}.store
WHERE ("d_date_sk" = "ss_sold_date_sk")
AND ("s_store_sk" = "ss_store_sk")
AND ("s_gmt_offset" = -5)
AND ("d_year" = 2000)
GROUP BY "s_store_name", "s_store_id"
ORDER BY "s_store_name" ASC, "s_store_id" ASC, "sun_sales" ASC, "mon_sales" ASC, "tue_sales" ASC, "wed_sales" ASC, "thu_sales" ASC, "fri_sales" ASC, "sat_sales" ASC
LIMIT 100
| {'content_hash': 'd4c692dd67d68bfcf73745f96ee9badf', 'timestamp': '', 'source': 'github', 'line_count': 21, 'max_line_length': 164, 'avg_line_length': 55.904761904761905, 'alnum_prop': 0.6226575809199318, 'repo_name': 'wagnermarkd/presto', 'id': '4ab8e55b9ccce456b8e87a370b760e1e88080da8', 'size': '1174', 'binary': False, 'copies': '21', 'ref': 'refs/heads/master', 'path': 'presto-benchto-benchmarks/src/main/resources/sql/presto/tpcds/q43.sql', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'ANTLR', 'bytes': '26588'}, {'name': 'CSS', 'bytes': '12676'}, {'name': 'HTML', 'bytes': '55873'}, {'name': 'Java', 'bytes': '26011186'}, {'name': 'JavaScript', 'bytes': '201042'}, {'name': 'Makefile', 'bytes': '6822'}, {'name': 'PLSQL', 'bytes': '7831'}, {'name': 'Python', 'bytes': '6568'}, {'name': 'SQLPL', 'bytes': '8048'}, {'name': 'Shell', 'bytes': '25153'}, {'name': 'Thrift', 'bytes': '10742'}]} |
import io
import unittest
from wc2 import count_lines_and_words
class CountLinesAndWordsTests(unittest.TestCase):
def test_returns_correct_lines_and_words_for_empty_file(self):
file = io.StringIO('')
lines, words = count_lines_and_words(file)
self.assertEqual(lines, 0)
self.assertEqual(words, 0)
def test_returns_correct_lines_and_words_for_file_with_single_line(self):
file = io.StringIO('a b c')
lines, words = count_lines_and_words(file)
self.assertEqual(lines, 1)
self.assertEqual(words, 3)
def test_returns_correct_lines_and_words_for_file_with_empty_line(self):
file = io.StringIO('a\n\nb')
lines, words = count_lines_and_words(file)
self.assertEqual(lines, 3)
self.assertEqual(words, 2)
| {'content_hash': '590050f08fba639335c32b6aab9fff47', 'timestamp': '', 'source': 'github', 'line_count': 29, 'max_line_length': 77, 'avg_line_length': 28.03448275862069, 'alnum_prop': 0.6580565805658056, 'repo_name': 's3rvac/talks', 'id': 'c8391e065df341e2ca07ab31391aad25fc797bcd', 'size': '850', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': '2017-03-07-Introduction-to-Python/demos/wc2_tests.py', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'C', 'bytes': '1007'}, {'name': 'C++', 'bytes': '2904'}, {'name': 'Python', 'bytes': '102073'}, {'name': 'Rust', 'bytes': '2565'}]} |
module OS
def self.mac?
/darwin/i === RUBY_PLATFORM
end
def self.linux?
/linux/i === RUBY_PLATFORM
end
if OS.mac?
GITHUB_REPOSITORY = "homebrew"
ISSUES_URL = "https://git.io/brew-troubleshooting"
PATH_OPEN = "/usr/bin/open"
PATH_PATCH = "/usr/bin/patch"
elsif OS.linux?
GITHUB_REPOSITORY = "linuxbrew"
ISSUES_URL = "https://github.com/Homebrew/#{GITHUB_REPOSITORY}/blob/master/share/doc/homebrew/Troubleshooting.md#troubleshooting"
PATH_OPEN = "xdg-open"
PATH_PATCH = "patch"
else
raise "Unknown operating system"
end
end
| {'content_hash': '8318dbc851328cc44fc52f9896b62aaa', 'timestamp': '', 'source': 'github', 'line_count': 23, 'max_line_length': 133, 'avg_line_length': 25.391304347826086, 'alnum_prop': 0.6643835616438356, 'repo_name': 'sarvex/linuxbrew', 'id': '4631fac3d005e8d22d7dac31f7a5ce21d4162735', 'size': '584', 'binary': False, 'copies': '30', 'ref': 'refs/heads/master', 'path': 'Library/Homebrew/os.rb', 'mode': '33188', 'license': 'bsd-2-clause', 'language': [{'name': 'Groff', 'bytes': '25076'}, {'name': 'JavaScript', 'bytes': '18'}, {'name': 'Perl', 'bytes': '547'}, {'name': 'PostScript', 'bytes': '485'}, {'name': 'Ruby', 'bytes': '4368222'}, {'name': 'Shell', 'bytes': '19062'}]} |
Deploy a CoreOS running Kubernetes environment. This particular guild is made to help those in an OFFLINE system, wither for testing a POC before the real deal, or you are restricted to be totally offline for your applications.
## High Level Design
1. Manage the tftp directory
* /tftpboot/(coreos)(centos)(RHEL)
* /tftpboot/pxelinux.0/(MAC) -> linked to Linux image config file
2. Update per install the link for pxelinux
3. Update the DHCP config to reflect the host needing deployment
4. Setup nodes to deploy CoreOS creating a etcd cluster.
5. Have no access to the public [etcd discovery tool](https://discovery.etcd.io/).
6. Installing the CoreOS slaves to become Kubernetes minions.
## Pre-requisites
1. Installed *CentOS 6* for PXE server
2. At least two bare metal nodes to work with
## This Guides variables
| Node Description | MAC | IP |
| :---------------------------- | :---------------: | :---------: |
| CoreOS/etcd/Kubernetes Master | d0:00:67:13:0d:00 | 10.20.30.40 |
| CoreOS Slave 1 | d0:00:67:13:0d:01 | 10.20.30.41 |
| CoreOS Slave 2 | d0:00:67:13:0d:02 | 10.20.30.42 |
## Setup PXELINUX CentOS
To setup CentOS PXELINUX environment there is a complete [guide here](http://docs.fedoraproject.org/en-US/Fedora/7/html/Installation_Guide/ap-pxe-server.html). This section is the abbreviated version.
1. Install packages needed on CentOS
sudo yum install tftp-server dhcp syslinux
2. ```vi /etc/xinetd.d/tftp``` to enable tftp service and change disable to 'no'
disable = no
3. Copy over the syslinux images we will need.
su -
mkdir -p /tftpboot
cd /tftpboot
cp /usr/share/syslinux/pxelinux.0 /tftpboot
cp /usr/share/syslinux/menu.c32 /tftpboot
cp /usr/share/syslinux/memdisk /tftpboot
cp /usr/share/syslinux/mboot.c32 /tftpboot
cp /usr/share/syslinux/chain.c32 /tftpboot
/sbin/service dhcpd start
/sbin/service xinetd start
/sbin/chkconfig tftp on
4. Setup default boot menu
mkdir /tftpboot/pxelinux.cfg
touch /tftpboot/pxelinux.cfg/default
5. Edit the menu ```vi /tftpboot/pxelinux.cfg/default```
default menu.c32
prompt 0
timeout 15
ONTIMEOUT local
display boot.msg
MENU TITLE Main Menu
LABEL local
MENU LABEL Boot local hard drive
LOCALBOOT 0
Now you should have a working PXELINUX setup to image CoreOS nodes. You can verify the services by using VirtualBox locally or with bare metal servers.
## Adding CoreOS to PXE
This section describes how to setup the CoreOS images to live alongside a pre-existing PXELINUX environment.
1. Find or create the TFTP root directory that everything will be based off of.
* For this document we will assume ```/tftpboot/``` is our root directory.
2. Once we know and have our tftp root directory we will create a new directory structure for our CoreOS images.
3. Download the CoreOS PXE files provided by the CoreOS team.
MY_TFTPROOT_DIR=/tftpboot
mkdir -p $MY_TFTPROOT_DIR/images/coreos/
cd $MY_TFTPROOT_DIR/images/coreos/
wget http://stable.release.core-os.net/amd64-usr/current/coreos_production_pxe.vmlinuz
wget http://stable.release.core-os.net/amd64-usr/current/coreos_production_pxe.vmlinuz.sig
wget http://stable.release.core-os.net/amd64-usr/current/coreos_production_pxe_image.cpio.gz
wget http://stable.release.core-os.net/amd64-usr/current/coreos_production_pxe_image.cpio.gz.sig
gpg --verify coreos_production_pxe.vmlinuz.sig
gpg --verify coreos_production_pxe_image.cpio.gz.sig
4. Edit the menu ```vi /tftpboot/pxelinux.cfg/default``` again
default menu.c32
prompt 0
timeout 300
ONTIMEOUT local
display boot.msg
MENU TITLE Main Menu
LABEL local
MENU LABEL Boot local hard drive
LOCALBOOT 0
MENU BEGIN CoreOS Menu
LABEL coreos-master
MENU LABEL CoreOS Master
KERNEL images/coreos/coreos_production_pxe.vmlinuz
APPEND initrd=images/coreos/coreos_production_pxe_image.cpio.gz cloud-config-url=http://<xxx.xxx.xxx.xxx>/pxe-cloud-config-single-master.yml
LABEL coreos-slave
MENU LABEL CoreOS Slave
KERNEL images/coreos/coreos_production_pxe.vmlinuz
APPEND initrd=images/coreos/coreos_production_pxe_image.cpio.gz cloud-config-url=http://<xxx.xxx.xxx.xxx>/pxe-cloud-config-slave.yml
MENU END
This configuration file will now boot from local drive but have the option to PXE image CoreOS.
## DHCP configuration
This section covers configuring the DHCP server to hand out our new images. In this case we are assuming that there are other servers that will boot alongside other images.
1. Add the ```filename``` to the _host_ or _subnet_ sections.
filename "/tftpboot/pxelinux.0";
2. At this point we want to make pxelinux configuration files that will be the templates for the different CoreOS deployments.
subnet 10.20.30.0 netmask 255.255.255.0 {
next-server 10.20.30.242;
option broadcast-address 10.20.30.255;
filename "<other default image>";
...
# http://www.syslinux.org/wiki/index.php/PXELINUX
host core_os_master {
hardware ethernet d0:00:67:13:0d:00;
option routers 10.20.30.1;
fixed-address 10.20.30.40;
option domain-name-servers 10.20.30.242;
filename "/pxelinux.0";
}
host core_os_slave {
hardware ethernet d0:00:67:13:0d:01;
option routers 10.20.30.1;
fixed-address 10.20.30.41;
option domain-name-servers 10.20.30.242;
filename "/pxelinux.0";
}
host core_os_slave2 {
hardware ethernet d0:00:67:13:0d:02;
option routers 10.20.30.1;
fixed-address 10.20.30.42;
option domain-name-servers 10.20.30.242;
filename "/pxelinux.0";
}
...
}
We will be specifying the node configuration later in the guide.
# Kubernetes
To deploy our configuration we need to create an ```etcd``` master. To do so we want to pxe CoreOS with a specific cloud-config.yml. There are two options we have here.
1. Is to template the cloud config file and programmatically create new static configs for different cluster setups.
2. Have a service discovery protocol running in our stack to do auto discovery.
This demo we just make a static single ```etcd``` server to host our Kubernetes and ```etcd``` master servers.
Since we are OFFLINE here most of the helping processes in CoreOS and Kubernetes are then limited. To do our setup we will then have to download and serve up our binaries for Kubernetes in our local environment.
An easy solution is to host a small web server on the DHCP/TFTP host for all our binaries to make them available to the local CoreOS PXE machines.
To get this up and running we are going to setup a simple ```apache``` server to serve our binaries needed to bootstrap Kubernetes.
This is on the PXE server from the previous section:
rm /etc/httpd/conf.d/welcome.conf
cd /var/www/html/
wget -O kube-register https://github.com/kelseyhightower/kube-register/releases/download/v0.0.2/kube-register-0.0.2-linux-amd64
wget -O setup-network-environment https://github.com/kelseyhightower/setup-network-environment/releases/download/v1.0.0/setup-network-environment
wget https://storage.googleapis.com/kubernetes-release/release/v0.15.0/bin/linux/amd64/kubernetes --no-check-certificate
wget https://storage.googleapis.com/kubernetes-release/release/v0.15.0/bin/linux/amd64/kube-apiserver --no-check-certificate
wget https://storage.googleapis.com/kubernetes-release/release/v0.15.0/bin/linux/amd64/kube-controller-manager --no-check-certificate
wget https://storage.googleapis.com/kubernetes-release/release/v0.15.0/bin/linux/amd64/kube-scheduler --no-check-certificate
wget https://storage.googleapis.com/kubernetes-release/release/v0.15.0/bin/linux/amd64/kubectl --no-check-certificate
wget https://storage.googleapis.com/kubernetes-release/release/v0.15.0/bin/linux/amd64/kubecfg --no-check-certificate
wget https://storage.googleapis.com/kubernetes-release/release/v0.15.0/bin/linux/amd64/kubelet --no-check-certificate
wget https://storage.googleapis.com/kubernetes-release/release/v0.15.0/bin/linux/amd64/kube-proxy --no-check-certificate
wget -O flanneld https://storage.googleapis.com/k8s/flanneld --no-check-certificate
This sets up our binaries we need to run Kubernetes. This would need to be enhanced to download from the Internet for updates in the future.
Now for the good stuff!
## Cloud Configs
The following config files are tailored for the OFFLINE version of a Kubernetes deployment.
These are based on the work found here: [master.yml](http://docs.k8s.io/getting-started-guides/coreos/cloud-configs/master.yaml), [node.yml](http://docs.k8s.io/getting-started-guides/coreos/cloud-configs/node.yaml)
To make the setup work, you need to replace a few placeholders:
- Replace `<PXE_SERVER_IP>` with your PXE server ip address (e.g. 10.20.30.242)
- Replace `<MASTER_SERVER_IP>` with the kubernetes master ip address (e.g. 10.20.30.40)
- If you run a private docker registry, replace `rdocker.example.com` with your docker registry dns name.
- If you use a proxy, replace `rproxy.example.com` with your proxy server (and port)
- Add your own SSH public key(s) to the cloud config at the end
### master.yml
On the PXE server make and fill in the variables ```vi /var/www/html/coreos/pxe-cloud-config-master.yml```.
#cloud-config
---
write_files:
- path: /opt/bin/waiter.sh
owner: root
content: |
#! /usr/bin/bash
until curl http://127.0.0.1:4001/v2/machines; do sleep 2; done
- path: /opt/bin/kubernetes-download.sh
owner: root
permissions: 0755
content: |
#! /usr/bin/bash
/usr/bin/wget -N -P "/opt/bin" "http://<PXE_SERVER_IP>/kubectl"
/usr/bin/wget -N -P "/opt/bin" "http://<PXE_SERVER_IP>/kubernetes"
/usr/bin/wget -N -P "/opt/bin" "http://<PXE_SERVER_IP>/kubecfg"
chmod +x /opt/bin/*
- path: /etc/profile.d/opt-path.sh
owner: root
permissions: 0755
content: |
#! /usr/bin/bash
PATH=$PATH/opt/bin
coreos:
units:
- name: 10-eno1.network
runtime: true
content: |
[Match]
Name=eno1
[Network]
DHCP=yes
- name: 20-nodhcp.network
runtime: true
content: |
[Match]
Name=en*
[Network]
DHCP=none
- name: get-kube-tools.service
runtime: true
command: start
content: |
[Service]
ExecStartPre=-/usr/bin/mkdir -p /opt/bin
ExecStart=/opt/bin/kubernetes-download.sh
RemainAfterExit=yes
Type=oneshot
- name: setup-network-environment.service
command: start
content: |
[Unit]
Description=Setup Network Environment
Documentation=https://github.com/kelseyhightower/setup-network-environment
Requires=network-online.target
After=network-online.target
[Service]
ExecStartPre=-/usr/bin/mkdir -p /opt/bin
ExecStartPre=/usr/bin/wget -N -P /opt/bin http://<PXE_SERVER_IP>/setup-network-environment
ExecStartPre=/usr/bin/chmod +x /opt/bin/setup-network-environment
ExecStart=/opt/bin/setup-network-environment
RemainAfterExit=yes
Type=oneshot
- name: etcd.service
command: start
content: |
[Unit]
Description=etcd
Requires=setup-network-environment.service
After=setup-network-environment.service
[Service]
EnvironmentFile=/etc/network-environment
User=etcd
PermissionsStartOnly=true
ExecStart=/usr/bin/etcd \
--name ${DEFAULT_IPV4} \
--addr ${DEFAULT_IPV4}:4001 \
--bind-addr 0.0.0.0 \
--cluster-active-size 1 \
--data-dir /var/lib/etcd \
--http-read-timeout 86400 \
--peer-addr ${DEFAULT_IPV4}:7001 \
--snapshot true
Restart=always
RestartSec=10s
- name: fleet.socket
command: start
content: |
[Socket]
ListenStream=/var/run/fleet.sock
- name: fleet.service
command: start
content: |
[Unit]
Description=fleet daemon
Wants=etcd.service
After=etcd.service
Wants=fleet.socket
After=fleet.socket
[Service]
Environment="FLEET_ETCD_SERVERS=http://127.0.0.1:4001"
Environment="FLEET_METADATA=role=master"
ExecStart=/usr/bin/fleetd
Restart=always
RestartSec=10s
- name: etcd-waiter.service
command: start
content: |
[Unit]
Description=etcd waiter
Wants=network-online.target
Wants=etcd.service
After=etcd.service
After=network-online.target
Before=flannel.service
Before=setup-network-environment.service
[Service]
ExecStartPre=/usr/bin/chmod +x /opt/bin/waiter.sh
ExecStart=/usr/bin/bash /opt/bin/waiter.sh
RemainAfterExit=true
Type=oneshot
- name: flannel.service
command: start
content: |
[Unit]
Wants=etcd-waiter.service
After=etcd-waiter.service
Requires=etcd.service
After=etcd.service
After=network-online.target
Wants=network-online.target
Description=flannel is an etcd backed overlay network for containers
[Service]
Type=notify
ExecStartPre=-/usr/bin/mkdir -p /opt/bin
ExecStartPre=/usr/bin/wget -N -P /opt/bin http://<PXE_SERVER_IP>/flanneld
ExecStartPre=/usr/bin/chmod +x /opt/bin/flanneld
ExecStartPre=-/usr/bin/etcdctl mk /coreos.com/network/config '{"Network":"10.100.0.0/16", "Backend": {"Type": "vxlan"}}'
ExecStart=/opt/bin/flanneld
- name: kube-apiserver.service
command: start
content: |
[Unit]
Description=Kubernetes API Server
Documentation=https://github.com/GoogleCloudPlatform/kubernetes
Requires=etcd.service
After=etcd.service
[Service]
ExecStartPre=-/usr/bin/mkdir -p /opt/bin
ExecStartPre=/usr/bin/wget -N -P /opt/bin http://<PXE_SERVER_IP>/kube-apiserver
ExecStartPre=/usr/bin/chmod +x /opt/bin/kube-apiserver
ExecStart=/opt/bin/kube-apiserver \
--address=0.0.0.0 \
--port=8080 \
--portal_net=10.100.0.0/16 \
--etcd_servers=http://127.0.0.1:4001 \
--logtostderr=true
Restart=always
RestartSec=10
- name: kube-controller-manager.service
command: start
content: |
[Unit]
Description=Kubernetes Controller Manager
Documentation=https://github.com/GoogleCloudPlatform/kubernetes
Requires=kube-apiserver.service
After=kube-apiserver.service
[Service]
ExecStartPre=/usr/bin/wget -N -P /opt/bin http://<PXE_SERVER_IP>/kube-controller-manager
ExecStartPre=/usr/bin/chmod +x /opt/bin/kube-controller-manager
ExecStart=/opt/bin/kube-controller-manager \
--master=127.0.0.1:8080 \
--logtostderr=true
Restart=always
RestartSec=10
- name: kube-scheduler.service
command: start
content: |
[Unit]
Description=Kubernetes Scheduler
Documentation=https://github.com/GoogleCloudPlatform/kubernetes
Requires=kube-apiserver.service
After=kube-apiserver.service
[Service]
ExecStartPre=/usr/bin/wget -N -P /opt/bin http://<PXE_SERVER_IP>/kube-scheduler
ExecStartPre=/usr/bin/chmod +x /opt/bin/kube-scheduler
ExecStart=/opt/bin/kube-scheduler --master=127.0.0.1:8080
Restart=always
RestartSec=10
- name: kube-register.service
command: start
content: |
[Unit]
Description=Kubernetes Registration Service
Documentation=https://github.com/kelseyhightower/kube-register
Requires=kube-apiserver.service
After=kube-apiserver.service
Requires=fleet.service
After=fleet.service
[Service]
ExecStartPre=/usr/bin/wget -N -P /opt/bin http://<PXE_SERVER_IP>/kube-register
ExecStartPre=/usr/bin/chmod +x /opt/bin/kube-register
ExecStart=/opt/bin/kube-register \
--metadata=role=node \
--fleet-endpoint=unix:///var/run/fleet.sock \
--healthz-port=10248 \
--api-endpoint=http://127.0.0.1:8080
Restart=always
RestartSec=10
update:
group: stable
reboot-strategy: off
ssh_authorized_keys:
- ssh-rsa AAAAB3NzaC1yc2EAAAAD...
### node.yml
On the PXE server make and fill in the variables ```vi /var/www/html/coreos/pxe-cloud-config-slave.yml```.
#cloud-config
---
write_files:
- path: /etc/default/docker
content: |
DOCKER_EXTRA_OPTS='--insecure-registry="rdocker.example.com:5000"'
coreos:
units:
- name: 10-eno1.network
runtime: true
content: |
[Match]
Name=eno1
[Network]
DHCP=yes
- name: 20-nodhcp.network
runtime: true
content: |
[Match]
Name=en*
[Network]
DHCP=none
- name: etcd.service
mask: true
- name: docker.service
drop-ins:
- name: 50-insecure-registry.conf
content: |
[Service]
Environment="HTTP_PROXY=http://rproxy.example.com:3128/" "NO_PROXY=localhost,127.0.0.0/8,rdocker.example.com"
- name: fleet.service
command: start
content: |
[Unit]
Description=fleet daemon
Wants=fleet.socket
After=fleet.socket
[Service]
Environment="FLEET_ETCD_SERVERS=http://<MASTER_SERVER_IP>:4001"
Environment="FLEET_METADATA=role=node"
ExecStart=/usr/bin/fleetd
Restart=always
RestartSec=10s
- name: flannel.service
command: start
content: |
[Unit]
After=network-online.target
Wants=network-online.target
Description=flannel is an etcd backed overlay network for containers
[Service]
Type=notify
ExecStartPre=-/usr/bin/mkdir -p /opt/bin
ExecStartPre=/usr/bin/wget -N -P /opt/bin http://<PXE_SERVER_IP>/flanneld
ExecStartPre=/usr/bin/chmod +x /opt/bin/flanneld
ExecStart=/opt/bin/flanneld -etcd-endpoints http://<MASTER_SERVER_IP>:4001
- name: docker.service
command: start
content: |
[Unit]
After=flannel.service
Wants=flannel.service
Description=Docker Application Container Engine
Documentation=http://docs.docker.io
[Service]
EnvironmentFile=-/etc/default/docker
EnvironmentFile=/run/flannel/subnet.env
ExecStartPre=/bin/mount --make-rprivate /
ExecStart=/usr/bin/docker -d --bip=${FLANNEL_SUBNET} --mtu=${FLANNEL_MTU} -s=overlay -H fd:// ${DOCKER_EXTRA_OPTS}
[Install]
WantedBy=multi-user.target
- name: setup-network-environment.service
command: start
content: |
[Unit]
Description=Setup Network Environment
Documentation=https://github.com/kelseyhightower/setup-network-environment
Requires=network-online.target
After=network-online.target
[Service]
ExecStartPre=-/usr/bin/mkdir -p /opt/bin
ExecStartPre=/usr/bin/wget -N -P /opt/bin http://<PXE_SERVER_IP>/setup-network-environment
ExecStartPre=/usr/bin/chmod +x /opt/bin/setup-network-environment
ExecStart=/opt/bin/setup-network-environment
RemainAfterExit=yes
Type=oneshot
- name: kube-proxy.service
command: start
content: |
[Unit]
Description=Kubernetes Proxy
Documentation=https://github.com/GoogleCloudPlatform/kubernetes
Requires=setup-network-environment.service
After=setup-network-environment.service
[Service]
ExecStartPre=/usr/bin/wget -N -P /opt/bin http://<PXE_SERVER_IP>/kube-proxy
ExecStartPre=/usr/bin/chmod +x /opt/bin/kube-proxy
ExecStart=/opt/bin/kube-proxy \
--etcd_servers=http://<MASTER_SERVER_IP>:4001 \
--logtostderr=true
Restart=always
RestartSec=10
- name: kube-kubelet.service
command: start
content: |
[Unit]
Description=Kubernetes Kubelet
Documentation=https://github.com/GoogleCloudPlatform/kubernetes
Requires=setup-network-environment.service
After=setup-network-environment.service
[Service]
EnvironmentFile=/etc/network-environment
ExecStartPre=/usr/bin/wget -N -P /opt/bin http://<PXE_SERVER_IP>/kubelet
ExecStartPre=/usr/bin/chmod +x /opt/bin/kubelet
ExecStart=/opt/bin/kubelet \
--address=0.0.0.0 \
--port=10250 \
--hostname_override=${DEFAULT_IPV4} \
--api_servers=<MASTER_SERVER_IP>:8080 \
--healthz_bind_address=0.0.0.0 \
--healthz_port=10248 \
--logtostderr=true
Restart=always
RestartSec=10
update:
group: stable
reboot-strategy: off
ssh_authorized_keys:
- ssh-rsa AAAAB3NzaC1yc2EAAAAD...
## New pxelinux.cfg file
Create a pxelinux target file for a _slave_ node: ```vi /tftpboot/pxelinux.cfg/coreos-node-slave```
default coreos
prompt 1
timeout 15
display boot.msg
label coreos
menu default
kernel images/coreos/coreos_production_pxe.vmlinuz
append initrd=images/coreos/coreos_production_pxe_image.cpio.gz cloud-config-url=http://<pxe-host-ip>/coreos/pxe-cloud-config-slave.yml console=tty0 console=ttyS0 coreos.autologin=tty1 coreos.autologin=ttyS0
And one for the _master_ node: ```vi /tftpboot/pxelinux.cfg/coreos-node-master```
default coreos
prompt 1
timeout 15
display boot.msg
label coreos
menu default
kernel images/coreos/coreos_production_pxe.vmlinuz
append initrd=images/coreos/coreos_production_pxe_image.cpio.gz cloud-config-url=http://<pxe-host-ip>/coreos/pxe-cloud-config-master.yml console=tty0 console=ttyS0 coreos.autologin=tty1 coreos.autologin=ttyS0
## Specify the pxelinux targets
Now that we have our new targets setup for master and slave we want to configure the specific hosts to those targets. We will do this by using the pxelinux mechanism of setting a specific MAC addresses to a specific pxelinux.cfg file.
Refer to the MAC address table in the beginning of this guide. Documentation for more details can be found [here](http://www.syslinux.org/wiki/index.php/PXELINUX).
cd /tftpboot/pxelinux.cfg
ln -s coreos-node-master 01-d0-00-67-13-0d-00
ln -s coreos-node-slave 01-d0-00-67-13-0d-01
ln -s coreos-node-slave 01-d0-00-67-13-0d-02
Reboot these servers to get the images PXEd and ready for running containers!
## Creating test pod
Now that the CoreOS with Kubernetes installed is up and running lets spin up some Kubernetes pods to demonstrate the system.
Here is a fork where you can do a full walk through by using [Kubernetes docs](https://github.com/GoogleCloudPlatform/kubernetes/tree/master/examples/walkthrough), or use the following example for a quick test.
On the Kubernetes Master node lets create a '''nginx.yml'''
apiVersion: v1beta1
kind: Pod
id: www
desiredState:
manifest:
version: v1beta1
id: www
containers:
- name: nginx
image: nginx
Now for the service: ```nginx-service.yml```
kind: Service
apiVersion: v1beta1
# must be a DNS compatible name
id: nginx-example
# the port that this service should serve on
port: 80
# just like the selector in the replication controller,
# but this time it identifies the set of pods to load balance
# traffic to.
selector:
name: www
# the container on each pod to connect to, can be a name
# (e.g. 'www') or a number (e.g. 80)
containerPort: 80
Now add the pod to Kubernetes:
kubectl create -f nginx.yml
This might take a while to download depending on the environment.
## Helping commands for debugging
List all keys in etcd:
etcdctl ls --recursive
List fleet machines
fleetctl list-machines
Check system status of services on master node:
systemctl status kube-apiserver
systemctl status kube-controller-manager
systemctl status kube-scheduler
systemctl status kube-register
Check system status of services on a minion node:
systemctl status kube-kubelet
systemctl status docker.service
List Kubernetes
kubectl get pods
kubectl get minions
Kill all pods:
for i in `kubectl get pods | awk '{print $1}'`; do kubectl stop pod $i; done
[]()
| {'content_hash': '7c9ee5aee7b5cb922d7144ba372145fb', 'timestamp': '', 'source': 'github', 'line_count': 678, 'max_line_length': 234, 'avg_line_length': 39.75811209439528, 'alnum_prop': 0.6224217242914379, 'repo_name': 'tobegit3hub/kubernetes', 'id': 'fd323a42380a2d3be5ae5bb9b49da472a4640160', 'size': '27002', 'binary': False, 'copies': '6', 'ref': 'refs/heads/master', 'path': 'docs/getting-started-guides/coreos/bare_metal_offline.md', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '60606'}, {'name': 'Go', 'bytes': '9971791'}, {'name': 'HTML', 'bytes': '51569'}, {'name': 'Java', 'bytes': '3851'}, {'name': 'JavaScript', 'bytes': '177353'}, {'name': 'Makefile', 'bytes': '11664'}, {'name': 'Nginx', 'bytes': '1013'}, {'name': 'PHP', 'bytes': '736'}, {'name': 'Python', 'bytes': '60519'}, {'name': 'Ruby', 'bytes': '2778'}, {'name': 'SaltStack', 'bytes': '23791'}, {'name': 'Shell', 'bytes': '638927'}]} |
REPLACE INTO `cw_addons` (`addon`, `descr`, `active`, `status`, `parent`, `version`, `orderby`) VALUES ('mslive_login', 'Login via Microsoft Live account', 1, 1, '', '0.1', 0);
-- configuration options
REPLACE INTO `cw_config_categories` (`config_category_id` ,`category` ,`is_local`) VALUES (NULL , 'mslive_login', '0');
SET @config_category_id = LAST_INSERT_ID();
REPLACE INTO cw_config SET name='mslive_login_consumer_key', comment='MS Live API consumer key', value='', config_category_id=@config_category_id, orderby='100', type='text', defvalue='', variants='';
REPLACE INTO cw_config SET name='mslive_login_consumer_secret', comment='MS Live API consumer secret key', value='', config_category_id=@config_category_id, orderby='200', type='text', defvalue='', variants='';
-- Addon name/description
REPLACE INTO cw_languages SET code='EN', name='addon_descr_mslive_login', value='Login via Microsoft Live account', topic='Addons';
REPLACE INTO cw_languages SET code='EN', name='addon_name_mslive_login', value='Microsoft Live Login', topic='Addons';
| {'content_hash': 'b7344616ea24091b8dc80cba988294d8', 'timestamp': '', 'source': 'github', 'line_count': 12, 'max_line_length': 210, 'avg_line_length': 88.16666666666667, 'alnum_prop': 0.7126654064272212, 'repo_name': 'Cartworks/Platform', 'id': '8d9a87a15558a3408ea7b8f17c30b0061051ef14', 'size': '1067', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'core/addons/mslive_login/install.sql', 'mode': '33261', 'license': 'mit', 'language': [{'name': 'ApacheConf', 'bytes': '4105'}, {'name': 'CSS', 'bytes': '1030520'}, {'name': 'HTML', 'bytes': '3347961'}, {'name': 'JavaScript', 'bytes': '2328159'}, {'name': 'PHP', 'bytes': '11383291'}, {'name': 'PLpgSQL', 'bytes': '41336'}, {'name': 'PostScript', 'bytes': '466581'}, {'name': 'SQLPL', 'bytes': '816'}, {'name': 'Smarty', 'bytes': '2561096'}, {'name': 'XSLT', 'bytes': '11238'}]} |
package org.drools.workbench.screens.guided.dtable.client.handlers;
import javax.inject.Inject;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.uibinder.client.UiHandler;
import com.google.gwt.user.client.ui.Composite;
import com.google.gwt.user.client.ui.Widget;
import org.drools.workbench.models.guided.dtable.shared.model.GuidedDecisionTable52;
import org.drools.workbench.screens.guided.dtable.client.resources.HitPolicyInternationalizer;
import org.gwtbootstrap3.client.ui.CheckBox;
import org.gwtbootstrap3.client.ui.Heading;
import org.gwtbootstrap3.client.ui.html.Paragraph;
/**
* A control providing options for creating a Guided Decision Table asset
*/
public class GuidedDecisionTableOptions extends Composite {
interface GuidedDecisionTableOptionsBinder
extends
UiBinder<Widget, GuidedDecisionTableOptions> {
}
private static GuidedDecisionTableOptionsBinder uiBinder = GWT.create(GuidedDecisionTableOptionsBinder.class);
@UiField
CheckBox chkUseWizard;
@UiField(provided = true)
HitPolicySelector hitPolicySelector;
@UiField
Heading hitPolicyDescriptionHeading;
@UiField
Paragraph hitPolicyDescriptionText;
private boolean isUsingWizard = false;
private GuidedDecisionTable52.TableFormat tableFormat = GuidedDecisionTable52.TableFormat.EXTENDED_ENTRY;
private GuidedDecisionTable52.HitPolicy hitPolicy = GuidedDecisionTable52.HitPolicy.NONE;
@Inject
public GuidedDecisionTableOptions(final HitPolicySelector hitPolicySelector) {
this.hitPolicySelector = hitPolicySelector;
initWidget(uiBinder.createAndBindUi(this));
hitPolicySelector.addValueChangeHandler(result -> {
GuidedDecisionTableOptions.this.hitPolicy = result;
setHitPolicyDescription(result);
});
setHitPolicyDescription(GuidedDecisionTable52.HitPolicy.NONE);
}
public boolean isUsingWizard() {
return this.isUsingWizard;
}
public GuidedDecisionTable52.TableFormat getTableFormat() {
return this.tableFormat;
}
public GuidedDecisionTable52.HitPolicy getHitPolicy() {
return hitPolicy;
}
@UiHandler(value = "chkUseWizard")
void chkUseWizardClick(ClickEvent event) {
this.isUsingWizard = chkUseWizard.getValue();
}
@UiHandler(value = "optExtendedEntry")
void optExtendedEntryClick(ClickEvent event) {
tableFormat = GuidedDecisionTable52.TableFormat.EXTENDED_ENTRY;
}
@UiHandler(value = "optLimitedEntry")
void optLimitedEntryClick(ClickEvent event) {
tableFormat = GuidedDecisionTable52.TableFormat.LIMITED_ENTRY;
}
private void setHitPolicyDescription(GuidedDecisionTable52.HitPolicy hitPolicy) {
hitPolicyDescriptionHeading.setText(HitPolicyInternationalizer.internationalize(hitPolicy));
hitPolicyDescriptionText.setText(HitPolicyInternationalizer.internationalizeDescription(hitPolicy));
}
}
| {'content_hash': '34c642bdd6593fb504d848e93a49ef04', 'timestamp': '', 'source': 'github', 'line_count': 93, 'max_line_length': 114, 'avg_line_length': 33.774193548387096, 'alnum_prop': 0.7653613498885705, 'repo_name': 'jomarko/drools-wb', 'id': 'f8c829cb74c7a49efc23f7ac3638cf866f45f702', 'size': '3760', 'binary': False, 'copies': '8', 'ref': 'refs/heads/master', 'path': 'drools-wb-screens/drools-wb-guided-dtable-editor/drools-wb-guided-dtable-editor-client/src/main/java/org/drools/workbench/screens/guided/dtable/client/handlers/GuidedDecisionTableOptions.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '42765'}, {'name': 'HTML', 'bytes': '96669'}, {'name': 'Java', 'bytes': '10784877'}, {'name': 'JavaScript', 'bytes': '62056'}]} |
package net.metadata.dataspace.oaipmh.crosswalk;
import ORG.oclc.oai.server.crosswalk.Crosswalk;
import ORG.oclc.oai.server.verb.CannotDisseminateFormatException;
import net.metadata.dataspace.atom.util.AdapterOutputHelper;
import net.metadata.dataspace.atom.writer.XSLTTransformerWriter;
import net.metadata.dataspace.data.model.Version;
import net.metadata.dataspace.data.model.record.Activity;
import net.metadata.dataspace.data.model.record.Agent;
import net.metadata.dataspace.data.model.record.Collection;
import net.metadata.dataspace.data.model.record.Service;
import net.metadata.dataspace.data.model.version.ActivityVersion;
import net.metadata.dataspace.data.model.version.AgentVersion;
import net.metadata.dataspace.data.model.version.CollectionVersion;
import net.metadata.dataspace.data.model.version.ServiceVersion;
import org.apache.abdera.model.Entry;
import org.apache.abdera.util.AbstractWriterOptions;
import org.apache.log4j.Logger;
import java.util.Properties;
/**
* Author: alabri
* Date: 04/02/2011
* Time: 11:45:55 AM
*/
public class RIFCSCrosswalk extends Crosswalk {
private AdapterOutputHelper adapterOutputHelper = new AdapterOutputHelper();
private Logger logger = Logger.getLogger(getClass());
public RIFCSCrosswalk(Properties props) {
super("http://services.ands.org.au/documentation/rifcs/schema/registryObjects.xsd");
}
@Override
public String createMetadata(Object nativeItem) throws CannotDisseminateFormatException {
try {
logger.debug("Assembling a record");
String xslFilePath = "";
if (nativeItem instanceof ActivityVersion) {
xslFilePath = "/files/xslt/rifcs/atom2rifcs-" + Activity.class.getSimpleName().toLowerCase() + ".xsl";
} else if (nativeItem instanceof CollectionVersion) {
xslFilePath = "/files/xslt/rifcs/atom2rifcs-" + Collection.class.getSimpleName().toLowerCase() + ".xsl";
} else if (nativeItem instanceof AgentVersion) {
xslFilePath = "/files/xslt/rifcs/atom2rifcs-" + Agent.class.getSimpleName().toLowerCase() + ".xsl";
} else if (nativeItem instanceof ServiceVersion) {
xslFilePath = "/files/xslt/rifcs/atom2rifcs-" + Service.class.getSimpleName().toLowerCase() + ".xsl";
}
XSLTTransformerWriter writer = new XSLTTransformerWriter(xslFilePath);
Entry entry = adapterOutputHelper.getEntryFromEntity((Version) nativeItem, true);
AbstractWriterOptions writerOptions = new AbstractWriterOptions() {
};
writerOptions.setCharset("UTF8");
Object object = writer.write(entry, writerOptions);
String xmlContent = object.toString();
// need to strip the xml declaration - <?xml version="1.0"
// encoding="UTF-16"?>
if (xmlContent.startsWith("<?xml ")) {
xmlContent = xmlContent.substring(xmlContent.indexOf("?>") + 2);
}
return xmlContent;
} catch (Exception e) {
throw new CannotDisseminateFormatException(e.toString());
}
}
@Override
public boolean isAvailableFor(Object o) {
return true;
}
}
| {'content_hash': 'aadc483eaf0ebd8f04668f5fbb9ad317', 'timestamp': '', 'source': 'github', 'line_count': 72, 'max_line_length': 120, 'avg_line_length': 45.166666666666664, 'alnum_prop': 0.6958794587945879, 'repo_name': 'uq-eresearch/dataspace', 'id': '54bcb0767407c274b8271a781c6ef724b755602d', 'size': '3252', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'data-registry-webapp/src/main/java/net/metadata/dataspace/oaipmh/crosswalk/RIFCSCrosswalk.java', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Java', 'bytes': '526609'}, {'name': 'JavaScript', 'bytes': '327593'}, {'name': 'Ruby', 'bytes': '1009'}, {'name': 'Shell', 'bytes': '637'}, {'name': 'XSLT', 'bytes': '207459'}]} |
// THE SOFTWARE.
using System;
using Gdk;
namespace Xwt.GtkBackend
{
public static class Gtk2Extensions
{
public static void SetHasWindow (this Gtk.Widget widget, bool value)
{
if (value)
widget.WidgetFlags &= ~Gtk.WidgetFlags.NoWindow;
else
widget.WidgetFlags |= Gtk.WidgetFlags.NoWindow;
}
public static bool GetHasWindow (this Gtk.Widget widget)
{
return !widget.IsNoWindow;
}
public static void SetAppPaintable (this Gtk.Widget widget, bool value)
{
if (value)
widget.WidgetFlags |= Gtk.WidgetFlags.AppPaintable;
else
widget.WidgetFlags &= ~Gtk.WidgetFlags.AppPaintable;
}
public static void SetStateActive(this Gtk.Widget widget)
{
widget.State = Gtk.StateType.Active;
}
public static void SetStateNormal(this Gtk.Widget widget)
{
widget.State = Gtk.StateType.Normal;
}
public static void AddSignalHandler (this Gtk.Widget widget, string name, Delegate handler, Type args_type)
{
var signal = GLib.Signal.Lookup (widget, name, args_type);
signal.AddDelegate (handler);
}
public static void RemoveSignalHandler (this Gtk.Widget widget, string name, Delegate handler)
{
var signal = GLib.Signal.Lookup (widget, name);
signal.RemoveDelegate (handler);
}
public static Gdk.Pixbuf ToPixbuf (this Gdk.Window window, int src_x, int src_y, int width, int height)
{
return Gdk.Pixbuf.FromDrawable (window, Gdk.Colormap.System, src_x, src_y, 0, 0, width, height);
}
public static Gtk.CellRenderer[] GetCellRenderers (this Gtk.TreeViewColumn column)
{
return column.CellRenderers;
}
public static Gdk.DragAction GetSelectedAction (this Gdk.DragContext context)
{
return context.Action;
}
public static Gdk.Atom[] ListTargets (this Gdk.DragContext context)
{
return context.Targets;
}
public static void AddContent (this Gtk.Dialog dialog, Gtk.Widget widget, bool expand = true, bool fill = true, uint padding = 0)
{
dialog.VBox.PackStart (widget, expand, fill, padding);
}
public static void AddContent (this Gtk.MessageDialog dialog, Gtk.Widget widget, bool expand = true, bool fill = true, uint padding = 0)
{
var messageArea = dialog.GetMessageArea () ?? dialog.VBox;
messageArea.PackStart (widget, expand, fill, padding);
}
public static void SetContentSpacing (this Gtk.Dialog dialog, int spacing)
{
dialog.VBox.Spacing = spacing;
}
public static void SetTextColumn (this Gtk.ComboBox comboBox, int column)
{
((Gtk.ComboBoxEntry)comboBox).TextColumn = column;
}
public static void FixContainerLeak (this Gtk.Container c)
{
GtkWorkarounds.FixContainerLeak (c);
}
public static Xwt.Drawing.Color GetBackgroundColor (this Gtk.Widget widget)
{
return widget.GetBackgroundColor (Gtk.StateType.Normal);
}
public static Xwt.Drawing.Color GetBackgroundColor (this Gtk.Widget widget, Gtk.StateType state)
{
return widget.Style.Background (state).ToXwtValue ();
}
public static void SetBackgroundColor (this Gtk.Widget widget, Xwt.Drawing.Color color)
{
widget.SetBackgroundColor (Gtk.StateType.Normal, color);
}
public static void SetBackgroundColor (this Gtk.Widget widget, Gtk.StateType state, Xwt.Drawing.Color color)
{
widget.ModifyBg (state, color.ToGtkValue ());
}
public static void SetChildBackgroundColor (this Gtk.Container container, Xwt.Drawing.Color color)
{
foreach (var widget in container.Children)
widget.ModifyBg (Gtk.StateType.Normal, color.ToGtkValue ());
}
public static Xwt.Drawing.Color GetForegroundColor (this Gtk.Widget widget)
{
return widget.GetForegroundColor (Gtk.StateType.Normal);
}
public static Xwt.Drawing.Color GetForegroundColor (this Gtk.Widget widget, Gtk.StateType state)
{
return widget.Style.Foreground (state).ToXwtValue ();
}
public static void SetForegroundColor (this Gtk.Widget widget, Xwt.Drawing.Color color)
{
widget.SetForegroundColor (Gtk.StateType.Normal, color);
}
public static void SetForegroundColor (this Gtk.Widget widget, Gtk.StateType state, Xwt.Drawing.Color color)
{
widget.ModifyFg (state, color.ToGtkValue ());
}
public static Xwt.Drawing.Color GetTextColor (this Gtk.Widget widget)
{
return widget.GetTextColor (Gtk.StateType.Normal);
}
public static Xwt.Drawing.Color GetTextColor (this Gtk.Widget widget, Gtk.StateType state)
{
return widget.Style.Text (state).ToXwtValue ();
}
public static void SetTextColor (this Gtk.Widget widget, Xwt.Drawing.Color color)
{
widget.SetTextColor (Gtk.StateType.Normal, color);
}
public static void SetTextColor (this Gtk.Widget widget, Gtk.StateType state, Xwt.Drawing.Color color)
{
widget.ModifyText (state, color.ToGtkValue ());
}
public static string GetText (this Gtk.TextInsertedArgs args)
{
return args.Text;
}
public static void RenderPlaceholderText (this Gtk.Entry entry, Gtk.ExposeEventArgs args, string placeHolderText, ref Pango.Layout layout)
{
// The Entry's GdkWindow is the top level window onto which
// the frame is drawn; the actual text entry is drawn into a
// separate window, so we can ensure that for themes that don't
// respect HasFrame, we never ever allow the base frame drawing
// to happen
if (args.Event.Window == entry.GdkWindow)
return;
if (entry.Text.Length > 0)
return;
RenderPlaceholderText_internal (entry, args, placeHolderText, ref layout, entry.Xalign, 0.5f, 1, 0);
}
static void RenderPlaceholderText_internal (Gtk.Widget widget, Gtk.ExposeEventArgs args, string placeHolderText, ref Pango.Layout layout, float xalign, float yalign, int xpad, int ypad)
{
if (layout == null) {
layout = new Pango.Layout (widget.PangoContext);
layout.FontDescription = widget.PangoContext.FontDescription.Copy ();
}
int wh, ww;
args.Event.Window.GetSize (out ww, out wh);
int width, height;
layout.SetText (placeHolderText);
layout.GetPixelSize (out width, out height);
int x = xpad + (int)((ww - width) * xalign);
int y = ypad + (int)((wh - height) * yalign);
using (var gc = new Gdk.GC (args.Event.Window)) {
gc.Copy (widget.Style.TextGC (Gtk.StateType.Normal));
Xwt.Drawing.Color color_a = widget.Style.Base (Gtk.StateType.Normal).ToXwtValue ();
Xwt.Drawing.Color color_b = widget.Style.Text (Gtk.StateType.Normal).ToXwtValue ();
gc.RgbFgColor = color_b.BlendWith (color_a, 0.5).ToGtkValue ();
args.Event.Window.DrawLayout (gc, x, y, layout);
}
}
public static double GetSliderPosition (this Gtk.Scale scale)
{
Gtk.Orientation orientation;
if (scale is Gtk.HScale)
orientation = Gtk.Orientation.Horizontal;
else if (scale is Gtk.VScale)
orientation = Gtk.Orientation.Vertical;
else
throw new InvalidOperationException ("Can not obtain slider position from " + scale.GetType ());
var padding = (int)scale.StyleGetProperty ("focus-padding");
var slwidth = Convert.ToDouble (scale.StyleGetProperty ("slider-width"));
int orientationSize;
if (orientation == Gtk.Orientation.Horizontal)
orientationSize = scale.Allocation.Width - (2 * padding);
else
orientationSize = scale.Allocation.Height - (2 * padding);
double prct = 0;
if (scale.Adjustment.Lower >= 0) {
prct = (scale.Value / (scale.Adjustment.Upper - scale.Adjustment.Lower));
} else if (scale.Adjustment.Upper <= 0) {
prct = (Math.Abs (scale.Value) / Math.Abs (scale.Adjustment.Lower - scale.Adjustment.Upper));
} else if (scale.Adjustment.Lower < 0) {
if (scale.Value >= 0)
prct = 0.5 + ((scale.Value / 2) / scale.Adjustment.Upper);
else
prct = 0.5 - Math.Abs ((scale.Value / 2) / scale.Adjustment.Lower);
}
if (orientation == Gtk.Orientation.Vertical || scale.Inverted)
prct = 1 - prct;
return (int)(((orientationSize - (slwidth)) * prct) + (slwidth / 2));
}
}
}
| {'content_hash': 'e1ae66086109ffe4e603298ffd7b7ddd', 'timestamp': '', 'source': 'github', 'line_count': 250, 'max_line_length': 187, 'avg_line_length': 31.676, 'alnum_prop': 0.7098118449299154, 'repo_name': 'TheBrainTech/xwt', 'id': '8abab839bb41a4526c14b0569ce0cdfbe4b057b7', 'size': '9101', 'binary': False, 'copies': '8', 'ref': 'refs/heads/master', 'path': 'Xwt.Gtk/Xwt.GtkBackend/Gtk2Extensions.cs', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C#', 'bytes': '3855456'}, {'name': 'Makefile', 'bytes': '245'}]} |
'use strict';
var EventPluginHub = require('EventPluginHub');
var ReactTreeTraversal = require('ReactTreeTraversal');
var accumulateInto = require('accumulateInto');
var forEachAccumulated = require('forEachAccumulated');
type PropagationPhases = 'bubbled' | 'captured';
var getListener = EventPluginHub.getListener;
if (__DEV__) {
var warning = require('fbjs/lib/warning');
}
/**
* Some event types have a notion of different registration names for different
* "phases" of propagation. This finds listeners by a given phase.
*/
function listenerAtPhase(inst, event, propagationPhase: PropagationPhases) {
var registrationName =
event.dispatchConfig.phasedRegistrationNames[propagationPhase];
return getListener(inst, registrationName);
}
/**
* Tags a `SyntheticEvent` with dispatched listeners. Creating this function
* here, allows us to not have to bind or create functions for each event.
* Mutating the event's members allows us to not have to create a wrapping
* "dispatch" object that pairs the event with the listener.
*/
function accumulateDirectionalDispatches(inst, phase, event) {
if (__DEV__) {
warning(inst, 'Dispatching inst must not be null');
}
var listener = listenerAtPhase(inst, event, phase);
if (listener) {
event._dispatchListeners = accumulateInto(
event._dispatchListeners,
listener,
);
event._dispatchInstances = accumulateInto(event._dispatchInstances, inst);
}
}
/**
* Collect dispatches (must be entirely collected before dispatching - see unit
* tests). Lazily allocate the array to conserve memory. We must loop through
* each event and perform the traversal for each one. We cannot perform a
* single traversal for the entire collection of events because each event may
* have a different target.
*/
function accumulateTwoPhaseDispatchesSingle(event) {
if (event && event.dispatchConfig.phasedRegistrationNames) {
ReactTreeTraversal.traverseTwoPhase(
event._targetInst,
accumulateDirectionalDispatches,
event,
);
}
}
/**
* Same as `accumulateTwoPhaseDispatchesSingle`, but skips over the targetID.
*/
function accumulateTwoPhaseDispatchesSingleSkipTarget(event) {
if (event && event.dispatchConfig.phasedRegistrationNames) {
var targetInst = event._targetInst;
var parentInst = targetInst
? ReactTreeTraversal.getParentInstance(targetInst)
: null;
ReactTreeTraversal.traverseTwoPhase(
parentInst,
accumulateDirectionalDispatches,
event,
);
}
}
/**
* Accumulates without regard to direction, does not look for phased
* registration names. Same as `accumulateDirectDispatchesSingle` but without
* requiring that the `dispatchMarker` be the same as the dispatched ID.
*/
function accumulateDispatches(inst, ignoredDirection, event) {
if (inst && event && event.dispatchConfig.registrationName) {
var registrationName = event.dispatchConfig.registrationName;
var listener = getListener(inst, registrationName);
if (listener) {
event._dispatchListeners = accumulateInto(
event._dispatchListeners,
listener,
);
event._dispatchInstances = accumulateInto(event._dispatchInstances, inst);
}
}
}
/**
* Accumulates dispatches on an `SyntheticEvent`, but only for the
* `dispatchMarker`.
* @param {SyntheticEvent} event
*/
function accumulateDirectDispatchesSingle(event) {
if (event && event.dispatchConfig.registrationName) {
accumulateDispatches(event._targetInst, null, event);
}
}
function accumulateTwoPhaseDispatches(events) {
forEachAccumulated(events, accumulateTwoPhaseDispatchesSingle);
}
function accumulateTwoPhaseDispatchesSkipTarget(events) {
forEachAccumulated(events, accumulateTwoPhaseDispatchesSingleSkipTarget);
}
function accumulateEnterLeaveDispatches(leave, enter, from, to) {
ReactTreeTraversal.traverseEnterLeave(
from,
to,
accumulateDispatches,
leave,
enter,
);
}
function accumulateDirectDispatches(events) {
forEachAccumulated(events, accumulateDirectDispatchesSingle);
}
/**
* A small set of propagation patterns, each of which will accept a small amount
* of information, and generate a set of "dispatch ready event objects" - which
* are sets of events that have already been annotated with a set of dispatched
* listener functions/ids. The API is designed this way to discourage these
* propagation strategies from actually executing the dispatches, since we
* always want to collect the entire set of dispatches before executing even a
* single one.
*
* @constructor EventPropagators
*/
var EventPropagators = {
accumulateTwoPhaseDispatches: accumulateTwoPhaseDispatches,
accumulateTwoPhaseDispatchesSkipTarget: accumulateTwoPhaseDispatchesSkipTarget,
accumulateDirectDispatches: accumulateDirectDispatches,
accumulateEnterLeaveDispatches: accumulateEnterLeaveDispatches,
};
module.exports = EventPropagators;
| {'content_hash': '2148f9ed074ec3c944ff7a1c3682ed64', 'timestamp': '', 'source': 'github', 'line_count': 153, 'max_line_length': 81, 'avg_line_length': 32.3202614379085, 'alnum_prop': 0.7573306370070778, 'repo_name': 'pyitphyoaung/react', 'id': '9788e09f73d23ac10bd1fba90cf0ca935049dc2c', 'size': '5166', 'binary': False, 'copies': '4', 'ref': 'refs/heads/master', 'path': 'packages/events/EventPropagators.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C', 'bytes': '5219'}, {'name': 'C++', 'bytes': '44242'}, {'name': 'CSS', 'bytes': '4644'}, {'name': 'CoffeeScript', 'bytes': '12151'}, {'name': 'HTML', 'bytes': '22782'}, {'name': 'JavaScript', 'bytes': '1119727'}, {'name': 'Makefile', 'bytes': '189'}, {'name': 'Python', 'bytes': '259'}, {'name': 'Shell', 'bytes': '4208'}, {'name': 'TypeScript', 'bytes': '15498'}]} |
package org.waveprotocol.box.server.frontend;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.argThat;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyZeroInteractions;
import static org.mockito.Mockito.when;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import junit.framework.TestCase;
import org.mockito.ArgumentCaptor;
import org.mockito.ArgumentMatcher;
import org.mockito.Matchers;
import org.mockito.Mockito;
import org.waveprotocol.box.common.DeltaSequence;
import org.waveprotocol.box.common.ExceptionalIterator;
import org.waveprotocol.box.common.comms.WaveClientRpc.WaveletVersion;
import org.waveprotocol.box.server.common.CoreWaveletOperationSerializer;
import org.waveprotocol.box.server.frontend.ClientFrontend.OpenListener;
import org.waveprotocol.box.server.util.WaveletDataUtil;
import org.waveprotocol.box.server.waveserver.WaveServerException;
import org.waveprotocol.box.server.waveserver.WaveletProvider;
import org.waveprotocol.box.server.waveserver.WaveletProvider.SubmitRequestListener;
import org.waveprotocol.wave.federation.Proto.ProtocolWaveletDelta;
import org.waveprotocol.wave.model.id.IdConstants;
import org.waveprotocol.wave.model.id.IdFilter;
import org.waveprotocol.wave.model.id.IdFilters;
import org.waveprotocol.wave.model.id.IdURIEncoderDecoder;
import org.waveprotocol.wave.model.id.WaveId;
import org.waveprotocol.wave.model.id.WaveletId;
import org.waveprotocol.wave.model.id.WaveletName;
import org.waveprotocol.wave.model.operation.OperationException;
import org.waveprotocol.wave.model.operation.wave.TransformedWaveletDelta;
import org.waveprotocol.wave.model.operation.wave.WaveletOperation;
import org.waveprotocol.wave.model.testing.DeltaTestUtil;
import org.waveprotocol.wave.model.version.HashedVersion;
import org.waveprotocol.wave.model.version.HashedVersionFactory;
import org.waveprotocol.wave.model.version.HashedVersionFactoryImpl;
import org.waveprotocol.wave.model.wave.ParticipantId;
import org.waveprotocol.wave.model.wave.data.ReadableWaveletData;
import org.waveprotocol.wave.model.wave.data.WaveletData;
import org.waveprotocol.wave.util.escapers.jvm.JavaUrlCodec;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
/**
* Tests for {@link ClientFrontendImpl}.
*/
public class ClientFrontendImplTest extends TestCase {
private static final IdURIEncoderDecoder URI_CODEC =
new IdURIEncoderDecoder(new JavaUrlCodec());
private static final HashedVersionFactory HASH_FACTORY =
new HashedVersionFactoryImpl(URI_CODEC);
private static final WaveId WAVE_ID = WaveId.of("example.com", "waveId");
private static final WaveId INDEX_WAVE_ID = WaveId.of("indexdomain", "indexwave");
private static final WaveletId W1 =
WaveletId.of("example.com", IdConstants.CONVERSATION_ROOT_WAVELET);
private static final WaveletId W2 = WaveletId.of("example.com", "conv+2");
private static final WaveletName WN1 = WaveletName.of(WAVE_ID, W1);
private static final WaveletName WN2 = WaveletName.of(WAVE_ID, W2);
private static final ParticipantId USER = new ParticipantId("[email protected]");
private static final DeltaTestUtil UTIL = new DeltaTestUtil(USER);
private static final HashedVersion V0 = HASH_FACTORY.createVersionZero(WN1);
private static final HashedVersion V1 = HashedVersion.unsigned(1L);
private static final HashedVersion V2 = HashedVersion.unsigned(2L);
private static final TransformedWaveletDelta DELTA = TransformedWaveletDelta.cloneOperations(
USER, V1, 0, ImmutableList.of(UTIL.addParticipant(USER)));
private static final DeltaSequence DELTAS = DeltaSequence.of(DELTA);
private static final ProtocolWaveletDelta SERIALIZED_DELTA =
CoreWaveletOperationSerializer.serialize(DELTA);
private static final Collection<WaveletVersion> NO_KNOWN_WAVELETS =
Collections.<WaveletVersion>emptySet();
private ClientFrontendImpl clientFrontend;
private WaveletProvider waveletProvider;
@Override
protected void setUp() throws Exception {
super.setUp();
waveletProvider = mock(WaveletProvider.class);
when(waveletProvider.getWaveletIds(any(WaveId.class))).thenReturn(ImmutableSet.<WaveletId>of());
WaveletInfo waveletInfo = WaveletInfo.create(HASH_FACTORY, waveletProvider);
clientFrontend = new ClientFrontendImpl(waveletProvider, waveletInfo);
}
public void testCannotOpenWavesWhenNotLoggedIn() throws Exception {
OpenListener listener = mock(OpenListener.class);
clientFrontend.openRequest(null, WAVE_ID, IdFilters.ALL_IDS, NO_KNOWN_WAVELETS, listener);
verify(listener).onFailure("Not logged in");
CommittedWaveletSnapshot snapshot = provideWavelet(WN1);
clientFrontend.waveletUpdate(snapshot.snapshot, DELTAS);
Mockito.verifyNoMoreInteractions(listener);
}
public void testOpenEmptyWaveReceivesChannelIdAndMarker() {
OpenListener listener = openWave(IdFilters.ALL_IDS);
verifyChannelId(listener);
verifyMarker(listener, WAVE_ID);
}
public void testTwoSubscriptionsReceiveDifferentChannelIds() {
OpenListener listener1 = openWave(IdFilters.ALL_IDS);
String ch1 = verifyChannelId(listener1);
OpenListener listener2 = openWave(IdFilters.ALL_IDS);
String ch2 = verifyChannelId(listener2);
assertFalse(ch1.equals(ch2));
}
public void testOpenWaveRecievesSnapshotsThenMarker() throws Exception {
CommittedWaveletSnapshot snapshot1 = provideWavelet(WN1);
CommittedWaveletSnapshot snapshot2 = provideWavelet(WN2);
when(waveletProvider.getWaveletIds(WAVE_ID)).thenReturn(ImmutableSet.of(W1, W2));
when(waveletProvider.checkAccessPermission(WN1, USER)).thenReturn(true);
when(waveletProvider.checkAccessPermission(WN2, USER)).thenReturn(true);
OpenListener listener = openWave(IdFilters.ALL_IDS);
verify(listener).onUpdate(eq(WN1), eq(snapshot1), eq(DeltaSequence.empty()),
eq(V0), isNullMarker(), any(String.class));
verify(listener).onUpdate(eq(WN2), eq(snapshot2), eq(DeltaSequence.empty()),
eq(V0), isNullMarker(), any(String.class));
verifyMarker(listener, WAVE_ID);
}
/**
* Tests that a snapshot not matching the subscription filter is not received.
* @throws WaveServerException
*/
@SuppressWarnings("unchecked") // Mock container
public void testUnsubscribedSnapshotNotRecieved() throws Exception {
OpenListener listener = openWave(IdFilter.ofPrefixes("non-existing"));
verifyChannelId(listener);
verifyMarker(listener, WAVE_ID);
ReadableWaveletData wavelet = provideWavelet(WN1).snapshot;
clientFrontend.waveletUpdate(wavelet, DELTAS);
verify(listener, Mockito.never()).onUpdate(eq(WN1),
any(CommittedWaveletSnapshot.class), Matchers.anyList(),
any(HashedVersion.class), isNullMarker(), anyString());
}
/**
* Tests that we get deltas.
*/
public void testReceivedDeltasSentToClient() throws Exception {
CommittedWaveletSnapshot snapshot = provideWavelet(WN1);
when(waveletProvider.getWaveletIds(WAVE_ID)).thenReturn(ImmutableSet.of(W1));
when(waveletProvider.checkAccessPermission(WN1, USER)).thenReturn(true);
OpenListener listener = openWave(IdFilters.ALL_IDS);
verify(listener).onUpdate(eq(WN1), eq(snapshot), eq(DeltaSequence.empty()),
eq(V0), isNullMarker(), any(String.class));
verifyMarker(listener, WAVE_ID);
TransformedWaveletDelta delta = TransformedWaveletDelta.cloneOperations(USER, V2, 1234567890L,
Arrays.asList(UTIL.noOp()));
DeltaSequence deltas = DeltaSequence.of(delta);
clientFrontend.waveletUpdate(snapshot.snapshot, deltas);
verify(listener).onUpdate(eq(WN1), isNullSnapshot(), eq(deltas),
isNullVersion(), isNullMarker(), anyString());
}
/**
* Tests that submit requests are forwarded to the wavelet provider.
*/
public void testSubmitForwardedToWaveletProvider() {
OpenListener openListener = openWave(IdFilters.ALL_IDS);
String channelId = verifyChannelId(openListener);
SubmitRequestListener submitListener = mock(SubmitRequestListener.class);
clientFrontend.submitRequest(USER, WN1, SERIALIZED_DELTA, channelId, submitListener);
verify(waveletProvider).submitRequest(eq(WN1), eq(SERIALIZED_DELTA),
any(SubmitRequestListener.class));
verifyZeroInteractions(submitListener);
}
public void testCannotSubmitAsDifferentUser() {
ParticipantId otherParticipant = new ParticipantId("[email protected]");
OpenListener openListener = openWave(IdFilters.ALL_IDS);
String channelId = verifyChannelId(openListener);
SubmitRequestListener submitListener = mock(SubmitRequestListener.class);
clientFrontend.submitRequest(otherParticipant, WN1, SERIALIZED_DELTA, channelId,
submitListener);
verify(submitListener).onFailure(anyString());
verify(submitListener, never()).onSuccess(anyInt(), (HashedVersion) any(), anyLong());
}
/**
* Tests that if we open the index wave, we don't get updates from the
* original wave if they contain no interesting operations (add/remove
* participant or text).
*/
public void testUninterestingDeltasDontUpdateIndex() throws WaveServerException {
provideWaves(Collections.<WaveId> emptySet());
OpenListener listener = openWave(INDEX_WAVE_ID, IdFilters.ALL_IDS);
verifyChannelId(listener);
verifyMarker(listener, INDEX_WAVE_ID);
HashedVersion v1 = HashedVersion.unsigned(1L);
TransformedWaveletDelta delta = makeDelta(USER, v1, 0L, UTIL.noOp());
DeltaSequence deltas = DeltaSequence.of(delta);
WaveletData wavelet = WaveletDataUtil.createEmptyWavelet(WN1, USER, V0, 1234567890L);
clientFrontend.waveletUpdate(wavelet, deltas);
WaveletName dummyWaveletName = ClientFrontendImpl.createDummyWaveletName(INDEX_WAVE_ID);
verify(listener, Mockito.never()).onUpdate(eq(dummyWaveletName),
any(CommittedWaveletSnapshot.class),
isDeltasStartingAt(0),
any(HashedVersion.class), isNullMarker(), anyString());
}
/**
* Opens a wave and returns a mock listener.
*/
private ClientFrontend.OpenListener openWave(WaveId waveId, IdFilter filter) {
OpenListener openListener = mock(OpenListener.class);
clientFrontend.openRequest(USER, waveId, filter, NO_KNOWN_WAVELETS, openListener);
return openListener;
}
private ClientFrontend.OpenListener openWave(IdFilter filter) {
return openWave(WAVE_ID, filter);
}
private TransformedWaveletDelta makeDelta(ParticipantId author, HashedVersion endVersion,
long timestamp, WaveletOperation... operations) {
return TransformedWaveletDelta.cloneOperations(author, endVersion, timestamp,
Arrays.asList(operations));
}
/**
* Initialises the wavelet provider to provide a collection of waves.
*/
private void provideWaves(Collection<WaveId> waves) throws WaveServerException {
when(waveletProvider.getWaveIds()).thenReturn(
ExceptionalIterator.FromIterator.<WaveId, WaveServerException> create(
waves.iterator()));
}
/**
* Prepares the wavelet provider to provide a new wavelet.
*
* @param name new wavelet name
* @return the new wavelet snapshot
*/
private CommittedWaveletSnapshot provideWavelet(WaveletName name) throws WaveServerException,
OperationException {
WaveletData wavelet = WaveletDataUtil.createEmptyWavelet(name, USER, V0, 1234567890L);
DELTA.get(0).apply(wavelet);
CommittedWaveletSnapshot snapshot = new CommittedWaveletSnapshot(wavelet, V0);
when(waveletProvider.getSnapshot(name)).thenReturn(snapshot);
when(waveletProvider.getWaveletIds(name.waveId)).thenReturn(ImmutableSet.of(name.waveletId));
return snapshot;
}
/**
* Verifies that the listener received a channel id.
*
* @return the channel id received
*/
private static String verifyChannelId(OpenListener listener) {
ArgumentCaptor<String> channelIdCaptor = ArgumentCaptor.forClass(String.class);
verify(listener).onUpdate(any(WaveletName.class), isNullSnapshot(), eq(DeltaSequence.empty()),
isNullVersion(), isNullMarker(), channelIdCaptor.capture());
return channelIdCaptor.getValue();
}
/**
* Verifies that the listener received a marker.
*/
private static void verifyMarker(OpenListener listener, WaveId waveId) {
ArgumentCaptor<WaveletName> waveletNameCaptor = ArgumentCaptor.forClass(WaveletName.class);
verify(listener).onUpdate(waveletNameCaptor.capture(), isNullSnapshot(),
eq(DeltaSequence.empty()), isNullVersion(), eq(true), (String) Mockito.isNull());
assertEquals(waveId, waveletNameCaptor.getValue().waveId);
}
private static CommittedWaveletSnapshot isNullSnapshot() {
return (CommittedWaveletSnapshot) Mockito.isNull();
}
private static HashedVersion isNullVersion() {
return (HashedVersion) Mockito.isNull();
}
private static Boolean isNullMarker() {
return (Boolean) Mockito.isNull();
}
private static List<TransformedWaveletDelta> isDeltasStartingAt(final long version) {
return argThat(new ArgumentMatcher<List<TransformedWaveletDelta>>() {
public boolean matches(List<TransformedWaveletDelta> sequence) {
if (sequence != null) {
DeltaSequence s = (DeltaSequence) sequence;
return (s.size() > 0) && (s.getStartVersion() == version);
}
return false;
}
});
}
}
| {'content_hash': '6b253ddfa82434fe177a445a0ec6e1a5', 'timestamp': '', 'source': 'github', 'line_count': 325, 'max_line_length': 100, 'avg_line_length': 42.36615384615384, 'alnum_prop': 0.765342435906747, 'repo_name': 'vega113/incubator-wave', 'id': '54556e8e74d0b3742a17795b94b412b933cc2e2b', 'size': '14577', 'binary': False, 'copies': '4', 'ref': 'refs/heads/master', 'path': 'wave/src/test/java/org/waveprotocol/box/server/frontend/ClientFrontendImplTest.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '4189'}, {'name': 'CSS', 'bytes': '125991'}, {'name': 'HTML', 'bytes': '75893'}, {'name': 'Java', 'bytes': '12413414'}, {'name': 'JavaScript', 'bytes': '126626'}, {'name': 'Protocol Buffer', 'bytes': '62449'}, {'name': 'Shell', 'bytes': '20077'}, {'name': 'Smalltalk', 'bytes': '42272'}]} |
package com.huqi_weather.android.db;
import org.litepal.crud.DataSupport;
/**
* Created by HuQi on 2017/7/8.
*/
public class City extends DataSupport {
private int id;
private String cityName;
private int cityCode;
private int provinceId;
public int getId(){
return id;
}
public void setId(int id){
this.id=id;
}
public String getCityName(){
return cityName;
}
public void setCityName(String cityName){
this.cityName=cityName;
}
public int getCityCode(){
return cityCode;
}
public void setCityCode(int cityCode){
this.cityCode=cityCode;
}
public int getProvinceId(){
return provinceId;
}
public void setProvinceId(int provinceId){
this.provinceId=provinceId;
}
}
| {'content_hash': 'a06e574fb9b15862a9593ef9c31a80e9', 'timestamp': '', 'source': 'github', 'line_count': 39, 'max_line_length': 46, 'avg_line_length': 20.846153846153847, 'alnum_prop': 0.6297662976629766, 'repo_name': 'REUS1/huqi_weather', 'id': 'c3e87ddcced5abe2d8bb47231d6cf3ffb285880e', 'size': '813', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'app/src/main/java/com/huqi_weather/android/db/City.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Java', 'bytes': '33159'}]} |
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports["default"] = void 0;
var LogRole = {
relatedConcepts: [{
module: 'ARIA',
concept: {
name: 'log'
}
}],
type: 'structure'
};
var _default = LogRole;
exports["default"] = _default; | {'content_hash': '1c793fa20434d49f074edd3b24e86589', 'timestamp': '', 'source': 'github', 'line_count': 17, 'max_line_length': 46, 'avg_line_length': 17.0, 'alnum_prop': 0.5951557093425606, 'repo_name': 'jpoeng/jpoeng.github.io', 'id': 'e193e4f0a0a1970cc8fea5bed46011ab9fe1201a', 'size': '289', 'binary': False, 'copies': '12', 'ref': 'refs/heads/master', 'path': 'node_modules/axobject-query/lib/etc/objects/LogRole.js', 'mode': '33261', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '5765'}, {'name': 'HTML', 'bytes': '53947'}, {'name': 'JavaScript', 'bytes': '1885'}, {'name': 'PHP', 'bytes': '9773'}]} |
{% extends "base.html" %}
{% load i18n %}
{% load humanize %}
{% load btc_formats %}
{% load static %}
{% block title %}
Subscribe to a {{ coin_symbol|coin_symbol_to_display_name }} Address
{% endblock title %}
{% block page_header %}
<h1>
<i class="fa fa-send"></i>
Subscribe to an Address
</h1>
{% endblock page_header %}
{% block content %}
<div class="section">
<div class="container">
<form role="form" method="post" action="{% url 'subscribe_address' coin_symbol %}">
{% load crispy_forms_tags %}
{{ form|crispy }}
{% csrf_token %}
<p class="text-center">
<button type="submit" class="btn btn-primary btn-lg">{% trans "Subscribe" %}</button>
</p>
</form>
{% if not user.is_authenticated %}
<p>
If you have an account, please
<a href="{% url 'user_login' %}">login</a>.
</p>
{% endif %}
</div>
</div>
{% endblock content %}
| {'content_hash': '9a14f608bf36843c27333fcf8117dbe6', 'timestamp': '', 'source': 'github', 'line_count': 42, 'max_line_length': 101, 'avg_line_length': 23.833333333333332, 'alnum_prop': 0.5234765234765235, 'repo_name': 'blockcypher/php-wallet-sample', 'id': '5aba40bb6c9085f9f6f51cf4fe7b240bc6559a23', 'size': '1001', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/BlockCypher/AppCommon/Infrastructure/LayoutBundle/Resources/django_templates/subscribe_address.html', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '8971'}, {'name': 'HTML', 'bytes': '218778'}, {'name': 'JavaScript', 'bytes': '6326'}, {'name': 'PHP', 'bytes': '400629'}]} |
require 'fileutils'
which_config = "ci/database.#{ENV['DB'] || 'sqlite'}.yml"
puts "Copying database configuration for CI: #{which_config}"
FileUtils.cp which_config, 'spec/dummy/config/database.yml'
| {'content_hash': 'f23ea7803181c1e6aee825511c9984c5', 'timestamp': '', 'source': 'github', 'line_count': 6, 'max_line_length': 61, 'avg_line_length': 33.666666666666664, 'alnum_prop': 0.7277227722772277, 'repo_name': 'oktober/calagator', 'id': '9f75be708aad7bfdc405be6d9561b89aef6329da', 'size': '202', 'binary': False, 'copies': '14', 'ref': 'refs/heads/master', 'path': 'ci/copy_database_config.rb', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '30170'}, {'name': 'HTML', 'bytes': '190008'}, {'name': 'JavaScript', 'bytes': '3165'}, {'name': 'Ruby', 'bytes': '352043'}, {'name': 'Shell', 'bytes': '2132'}, {'name': 'XSLT', 'bytes': '22369'}]} |
//
// SourceRefreshOperation.m
// Installer
//
// Created by Slava Karpenko on 3/13/09.
// Copyright 2009 Ripdev. All rights reserved.
//
#import <unistd.h>
#import "SourceRefreshOperation.h"
#import "OperationQueue.h"
#import "NSFileManager+RipdevExtensions.h"
#import "DPKGParser.h"
#define SLEEP_AMT 1
static CFDataRef SourceRefreshOperation_MessagePortCallBack(CFMessagePortRef local, SInt32 msgid, CFDataRef data, void *info);
static void SourceRefreshOperation_ObserverCallBack(CFRunLoopObserverRef observer, CFRunLoopActivity activity, void *info);
@implementation SourceRefreshOperation
- (SourceRefreshOperation*)initWithDelegate:(id)del
{
if (self = [super init])
{
delegate = del;
mQueue = [[NSMutableArray alloc] initWithCapacity:0];
}
return self;
}
- (void)dealloc
{
if (mMessagePort)
{
CFMessagePortInvalidate(mMessagePort);
CFRelease(mMessagePort);
}
[mQueue release];
[super dealloc];
}
- (void)main
{
mDB = [Database database];
[mDB executeUpdate:@"PRAGMA temp_store = MEMORY"];
CFMessagePortContext ctx = { 0 };
ctx.info = self;
ctx.retain = &CFRetain;
ctx.release = &CFRelease;
CFRunLoopSourceRef src = NULL;
mMessagePort = CFMessagePortCreateLocal(kCFAllocatorDefault, kSourceRefreshOperationPortName, SourceRefreshOperation_MessagePortCallBack, &ctx, NO);
if (mMessagePort)
{
src = CFMessagePortCreateRunLoopSource(kCFAllocatorDefault, mMessagePort, 0);
if (src)
{
CFRunLoopAddSource(CFRunLoopGetCurrent(), src, kCFRunLoopDefaultMode);
}
}
CFRunLoopObserverContext octx = { 0 };
octx.info = self;
octx.retain = &CFRetain;
octx.release = &CFRelease;
CFRunLoopObserverRef obs = CFRunLoopObserverCreate(kCFAllocatorDefault, kCFRunLoopBeforeSources, TRUE, 0, SourceRefreshOperation_ObserverCallBack, &octx);
if (obs)
{
CFRunLoopAddObserver(CFRunLoopGetCurrent(), obs, kCFRunLoopDefaultMode);
}
do
{
NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
CFRunLoopRunInMode(kCFRunLoopDefaultMode, .5, FALSE);
[pool release];
} while (![self isCancelled]);
if (src)
{
CFRunLoopSourceInvalidate(src);
CFRunLoopRemoveSource(CFRunLoopGetCurrent(), src, kCFRunLoopDefaultMode);
CFRelease(src);
}
if (obs)
{
CFRunLoopObserverInvalidate(obs);
CFRunLoopRemoveObserver(CFRunLoopGetCurrent(), obs, kCFRunLoopDefaultMode);
CFRelease(obs);
}
/*
NSMutableArray* sources = [NSMutableArray arrayWithCapacity:0];
ResultSet* rs = [mDB executeQuery:@"SELECT RowID, lastupdate FROM sources"];
while ([rs next])
{
NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
sqlite_int64 sourceID = [rs intForColumn:@"rowid"];
NSDate* lastupdate = [rs dateForColumn:@"lastupdate"];
NSString* destPath = [kIcyIndexesPath stringByAppendingPathComponent:[NSString stringWithFormat:@"%d.idx", (int)sourceID]];
if (![[NSFileManager defaultManager] fileExistsAtPath:destPath])
{
[pool release];
continue;
}
// check lastupdate
NSDictionary* attrs = [[NSFileManager defaultManager] fileAttributesAtPath:destPath traverseLink:NO];
NSDate* lastmod = [attrs fileModificationDate];
if (![lastupdate isEqualToDate:lastmod])
{
[sources addObject:[NSDictionary dictionaryWithObjectsAndKeys:destPath, @"path", [NSNumber numberWithLongLong:sourceID], @"id", lastmod, @"date", nil]];
}
[pool release];
}
[rs close];
for (NSDictionary* source in sources)
{
[mDB beginTransaction];
[self parsePackagesFile:[source objectForKey:@"path"] forSourceID:[[source objectForKey:@"id"] longLongValue]];
[mDB executeUpdate:@"UPDATE sources SET lastupdate = ? WHERE RowID = ?", [source objectForKey:@"date"], [source objectForKey:@"id"]];
[mDB commit];
}
*/
NSLog(@"Source refreshes thread is done.");
if (delegate && [delegate respondsToSelector:@selector(sourceRefreshFinished)])
[delegate performSelectorOnMainThread:@selector(sourceRefreshFinished) withObject:nil waitUntilDone:YES];
}
- (void)checkForWork
{
if (![mQueue count])
return;
NSNumber* sourceID = [[mQueue objectAtIndex:0] retain];
[mQueue removeObjectAtIndex:0];
ResultSet* rs = [mDB executeQuery:@"SELECT RowID, lastupdate FROM sources WHERE RowID = ?", sourceID];
if ([rs next])
{
NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
sqlite_int64 sid = [rs intForColumn:@"rowid"];
NSDate* lastupdate = [[rs dateForColumn:@"lastupdate"] retain];
NSString* destPath = [kIcyIndexesPath stringByAppendingPathComponent:[NSString stringWithFormat:@"%d.idx", (int)sid]];
[rs close];
if ([[NSFileManager defaultManager] fileExistsAtPath:destPath])
{
NSDictionary* attrs = [[NSFileManager defaultManager] fileAttributesAtPath:destPath traverseLink:NO];
NSDate* lastmod = [attrs fileModificationDate];
if (![lastupdate isEqualToDate:lastmod])
{
[mDB beginTransaction];
[self parsePackagesFile:destPath forSourceID:sid];
[mDB executeUpdate:@"UPDATE sources SET lastupdate = ? WHERE RowID = ?", lastmod, sourceID];
[mDB commit];
}
}
[lastupdate release];
[pool release];
}
[self performSelectorOnMainThread:@selector(notifyDelegateWithSourceID:) withObject:sourceID waitUntilDone:YES];
[sourceID release];
}
- (void)handlePortMessage:(SInt32)msgID withData:(NSData*)data
{
NSNumber* sourceID = [NSNumber numberWithUnsignedInt:(unsigned int)msgID];
if (![mQueue containsObject:sourceID])
{
[mQueue addObject:sourceID];
}
}
- (void)parsePackagesFile:(NSString*)path forSourceID:(sqlite_int64)sourceID
{
NSAutoreleasePool* superPool = [[NSAutoreleasePool alloc] init];
DPKGParser* pa = [[DPKGParser alloc] init];
NSArray* packages = [pa parseDatabaseAtPath:path ignoreStatus:YES];
[pa release];
[mDB executeUpdate:@"DELETE FROM packages WHERE source = ?", [NSNumber numberWithLongLong:sourceID]];
if (!packages)
{
[self performSelectorOnMainThread:@selector(notifyDelegateWithSourceID:) withObject:[NSNumber numberWithLongLong:sourceID] waitUntilDone:YES];
[superPool release];
return;
}
NSBundle* bundle = [NSBundle mainBundle];
NSDate* dbModDate = [[[NSFileManager defaultManager] fileAttributesAtPath:path traverseLink:YES] fileModificationDate];
sqlite3_stmt* deleteFromPackagesStmt = NULL;
sqlite3_stmt* insertIntoPackagesStmt = NULL;
sqlite3_stmt* insertIntoMetaStmt = NULL;
sqlite3_stmt* insertIntoMemoriesStmt = NULL;
sqlite3* db = [mDB db];
NSCharacterSet* whitespaceNewlineSet = [NSCharacterSet whitespaceAndNewlineCharacterSet];
// Prepare our statements in advance
int rc = sqlite3_prepare_v2(db, "DELETE FROM packages WHERE identifier = ?", -1, &deleteFromPackagesStmt, NULL);
if (rc != SQLITE_OK)
NSLog(@"Error in preparing statement deleteFromPackagesStmt: %d", rc);
rc = sqlite3_prepare_v2(db, "INSERT INTO packages (source,identifier,category,name,version) VALUES(?,?,?,?,?)", -1, &insertIntoPackagesStmt, NULL);
if (rc != SQLITE_OK)
NSLog(@"Error in preparing statement insertIntoPackagesStmt: %d", rc);
rc = sqlite3_prepare_v2(db, "INSERT INTO meta (identifier,tag,data) VALUES(?,?,?)", -1, &insertIntoMetaStmt, NULL);
if (rc != SQLITE_OK)
NSLog(@"Error in preparing statement insertIntoMetaStmt: %d", rc);
rc = sqlite3_prepare_v2(db, "INSERT INTO memories (identifier,package,name,version,created) VALUES(?,?,?,?,?)", -1, &insertIntoMemoriesStmt, NULL);
if (rc != SQLITE_OK)
NSLog(@"Error in preparing statement insertIntoMemoriesStmt: %d", rc);
for (NSDictionary* p in packages)
{
if (![p objectForKey:@"package"])
continue;
/* if ([p objectForKey:@"tag"] &&
[[p objectForKey:@"tag"] rangeOfString:@"cydia::commercial"].length)
continue;
*/
NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
const char* currentIdentifier = [[p objectForKey:@"package"] UTF8String];
// DELETE FROM packages WHERE identifier = ?
sqlite3_bind_text(deleteFromPackagesStmt, 1, currentIdentifier, -1, SQLITE_STATIC);
do
{
rc = sqlite3_step(deleteFromPackagesStmt);
if (rc == SQLITE_BUSY)
usleep(SLEEP_AMT);
} while (rc == SQLITE_BUSY);
if (rc != SQLITE_OK && rc != SQLITE_DONE)
NSLog(@"sqlite3_step(DELETE FROM packages WHERE identifier = %@) = %d", [p objectForKey:@"package"], rc);
sqlite3_reset(deleteFromPackagesStmt);
// Add the package
// INSERT INTO packages (source,identifier,category,name,version) VALUES(?,?,?,?,?)
NSString* category = [p objectForKey:@"section"];
if (!category)
category = NSLocalizedString(@"Uncategorized", @"");
else
{
category = [category stringByTrimmingCharactersInSet:whitespaceNewlineSet];
category = [bundle localizedStringForKey:category value:category table:@"Categories"];
}
NSString* name = [p objectForKey:@"name"];
NSString* version = [p objectForKey:@"version"];
sqlite3_bind_int64(insertIntoPackagesStmt, 1, sourceID);
sqlite3_bind_text(insertIntoPackagesStmt, 2, currentIdentifier, -1, SQLITE_STATIC);
sqlite3_bind_text(insertIntoPackagesStmt, 3, [category UTF8String], -1, SQLITE_STATIC);
sqlite3_bind_text(insertIntoPackagesStmt, 4, name?[name UTF8String]:currentIdentifier, -1, SQLITE_STATIC);
sqlite3_bind_text(insertIntoPackagesStmt, 5, version?[version UTF8String]:"0.1", -1, SQLITE_STATIC);
do
{
rc = sqlite3_step(insertIntoPackagesStmt);
if (rc == SQLITE_BUSY)
usleep(SLEEP_AMT);
} while (rc == SQLITE_BUSY);
if (rc != SQLITE_OK && rc != SQLITE_DONE)
NSLog(@"sqlite3_step(INSERT INTO packages (%d,%s, ...) = %d", (int)sourceID, currentIdentifier, rc);
sqlite3_reset(insertIntoPackagesStmt);
// insert into memories
// INSERT INTO memories (identifier,package,name,version,created) VALUES(?,?,?,?,?)
sqlite3_bind_text(insertIntoMemoriesStmt, 1, [[NSString stringWithFormat:@"%s.%@", currentIdentifier, version?version:@"0"] UTF8String], -1, SQLITE_STATIC);
sqlite3_bind_text(insertIntoMemoriesStmt, 2, currentIdentifier, -1, SQLITE_STATIC);
sqlite3_bind_text(insertIntoMemoriesStmt, 3, name?[name UTF8String]:currentIdentifier, -1, SQLITE_STATIC);
sqlite3_bind_text(insertIntoMemoriesStmt, 4, version?[version UTF8String]:"0.1", -1, SQLITE_STATIC);
sqlite3_bind_double(insertIntoMemoriesStmt, 5, [dbModDate timeIntervalSince1970]);
do
{
rc = sqlite3_step(insertIntoMemoriesStmt);
} while (rc == SQLITE_BUSY);
sqlite3_reset(insertIntoMemoriesStmt);
// Now add meta
for (NSString* key in p)
{
NSString* value = [p objectForKey:key];
if ([key isEqualToString:@"section"])
value = [bundle localizedStringForKey:value value:value table:@"Categories"];
sqlite3_bind_text(insertIntoMetaStmt, 1, currentIdentifier, -1, SQLITE_STATIC);
sqlite3_bind_text(insertIntoMetaStmt, 2, [key UTF8String], -1, SQLITE_STATIC);
sqlite3_bind_text(insertIntoMetaStmt, 3, [value UTF8String], -1, SQLITE_STATIC);
do
{
rc = sqlite3_step(insertIntoMetaStmt);
if (rc == SQLITE_BUSY)
usleep(SLEEP_AMT);
} while (rc == SQLITE_BUSY);
if (rc != SQLITE_OK && rc != SQLITE_DONE)
NSLog(@"sqlite3_step(INSERT INTO meta (%d,%s,%@,...) = %d", (int)sourceID, currentIdentifier, key, rc);
sqlite3_reset(insertIntoMetaStmt);
}
[pool release];
}
sqlite3_finalize(deleteFromPackagesStmt);
sqlite3_finalize(insertIntoPackagesStmt);
sqlite3_finalize(insertIntoMetaStmt);
sqlite3_finalize(insertIntoMemoriesStmt);
[superPool release];
}
- (void)notifyDelegateWithSourceID:(NSNumber*)sourceID
{
if (delegate && [delegate respondsToSelector:@selector(sourceRefreshDone:withError:)])
[delegate sourceRefreshDone:sourceID withError:nil];
}
@end
CFDataRef SourceRefreshOperation_MessagePortCallBack(CFMessagePortRef local, SInt32 msgid, CFDataRef data, void *info)
{
SourceRefreshOperation* op = (SourceRefreshOperation*)info;
[op handlePortMessage:msgid withData:(NSData*)data];
return NULL;
}
static void SourceRefreshOperation_ObserverCallBack(CFRunLoopObserverRef observer, CFRunLoopActivity activity, void *info)
{
// perform
SourceRefreshOperation* op = (SourceRefreshOperation*)info;
[op checkForWork];
}
| {'content_hash': 'c5e479edcec31b8554c007cef9748b2c', 'timestamp': '', 'source': 'github', 'line_count': 374, 'max_line_length': 158, 'avg_line_length': 32.30748663101604, 'alnum_prop': 0.7269717785318216, 'repo_name': 'slavikus/Icy', 'id': '3b188358f4fd0c863ad80741763aa8fcb35e386a', 'size': '12083', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'Sources/Backend/Operations/SourceRefreshOperation.m', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C', 'bytes': '10859'}, {'name': 'Objective-C', 'bytes': '268359'}]} |
#ifndef FILEFIELD_H
#define FILEFIELD_H
#include "tcommon.h"
#include <QWidget>
#include <QFileDialog>
#include "tfilepath.h"
#include "toonzqt/lineedit.h"
#undef DVAPI
#undef DVVAR
#ifdef TOONZQT_EXPORTS
#define DVAPI DV_EXPORT_API
#define DVVAR DV_EXPORT_VAR
#else
#define DVAPI DV_IMPORT_API
#define DVVAR DV_IMPORT_VAR
#endif
// forward declaration
class QPushButton;
//=============================================================================
namespace DVGui
{
//=============================================================================
/*! \class DVGui::FileField
\brief The FileField class provides an object to manage file browser.
Inherits \b QWidget.
The object FileField is composed of two part, a field \b LineEdit and a
button \b QPushButton. Click button to open a directory browser popup
that is used to choose a directory.
You can set in constructor default path.
Maximum height object is fixed to \b DVGui::WidgetHeight.
By default dialog permit user to select only folder, but using setFileMode()
you can indicate what the user may select in the file dialog, folder or file;
you can also set file type using setFilters().
*/
class DVAPI FileField : public QWidget
{
Q_OBJECT
LineEdit *m_field;
QStringList m_filters;
QFileDialog::FileMode m_fileMode;
QString m_windowTitle;
protected: //used in the child class for CleanupSettings
QPushButton *m_fileBrowseButton;
QString m_lastSelectedPath;
public:
/* N.B. Vedi il commento della classe BrowserPopupController in filebrowserpopup.cpp*/
class BrowserPopupController
{
public:
BrowserPopupController() {}
virtual ~BrowserPopupController() {}
virtual bool isExecute() { return true; };
virtual QString getPath() { return QString(); };
virtual void openPopup(QStringList, bool, QString){};
};
static BrowserPopupController *m_browserPopupController;
FileField(QWidget *parent = 0, QString path = QString(), bool readOnly = false);
~FileField() {}
/*! Set what the user may select in the file dialog:
\li QFileDialog::DirectoryOnly show only directory.
\li QFileDialog::AnyFile, QFileDialog::ExistingFile, QFileDialog::Directory, QFileDialog::ExistingFiles
show directory and file. */
void setFileMode(const QFileDialog::FileMode &fileMode);
/*! Set file type in dialog popup. */
void setFilters(const QStringList &filters);
void setValidator(const QValidator *v) { m_field->setValidator(v); }
QString getPath();
void setPath(const QString &path);
static void setBrowserPopupController(BrowserPopupController *controller);
static BrowserPopupController *getBrowserPopupController();
protected slots:
/*! Open a static file dialog popup to browse and choose directories. If a
directory is seleceted and choose, set field to this directory. */
//reimplemented in the "save in" filefield in CleanupSettings
virtual void browseDirectory();
signals:
/*! This signal is emitted when path in field change, or by field edit or by
browse popup. */
void pathChanged();
};
//-----------------------------------------------------------------------------
} //namespace DVGui
//-----------------------------------------------------------------------------
#endif // FILEFIELD_H
| {'content_hash': '1703c2688c7697401829a6adf35380ac', 'timestamp': '', 'source': 'github', 'line_count': 105, 'max_line_length': 106, 'avg_line_length': 30.79047619047619, 'alnum_prop': 0.6807918342097123, 'repo_name': 'ss23/opentoonz', 'id': 'b1edeb41445a9b36b75f38c6565bc52b0ff38f0a', 'size': '3233', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'toonz/sources/include/toonzqt/filefield.h', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'C', 'bytes': '1202767'}, {'name': 'C++', 'bytes': '20262970'}, {'name': 'CMake', 'bytes': '75219'}, {'name': 'CSS', 'bytes': '77423'}, {'name': 'FLUX', 'bytes': '156349'}, {'name': 'GLSL', 'bytes': '24307'}, {'name': 'HTML', 'bytes': '62240'}, {'name': 'JavaScript', 'bytes': '2'}, {'name': 'Lua', 'bytes': '1882'}, {'name': 'Objective-C', 'bytes': '227146'}, {'name': 'QMake', 'bytes': '11508'}, {'name': 'Shell', 'bytes': '52168'}, {'name': 'Smarty', 'bytes': '21137'}]} |
package com.tupster24.tupsterlabs.letsmodreboot.proxy;
import com.tupster24.tupsterlabs.letsmodreboot.client.settings.keybindings;
import cpw.mods.fml.client.registry.ClientRegistry;
import net.minecraft.client.settings.KeyBinding;
public class clientProxy extends commonProxy
{
@Override
public void registerKeyBindings()
{
ClientRegistry.registerKeyBinding(keybindings.charge);
ClientRegistry.registerKeyBinding(keybindings.release);
}
}
| {'content_hash': 'e357f0457262dd7c78a9911bc5b94ba9', 'timestamp': '', 'source': 'github', 'line_count': 15, 'max_line_length': 75, 'avg_line_length': 31.6, 'alnum_prop': 0.7953586497890295, 'repo_name': 'tupperkion/Lets-mod-reboot', 'id': '95db2622ce0d324933afcac5d706aa2295a913ee', 'size': '474', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/main/java/com/tupster24/tupsterlabs/letsmodreboot/proxy/clientProxy.java', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Java', 'bytes': '27017'}]} |
<?xml version="1.0" encoding="UTF-8"?>
<!-- You may freely edit this file. See commented blocks below for -->
<!-- some examples of how to customize the build. -->
<!-- (If you delete it and reopen the project it will be recreated.) -->
<!-- By default, only the Clean and Build commands use this build script. -->
<!-- Commands such as Run, Debug, and Test only use this build script if -->
<!-- the Compile on Save feature is turned off for the project. -->
<!-- You can turn off the Compile on Save (or Deploy on Save) setting -->
<!-- in the project's Project Properties dialog box.-->
<project name="MessageConsole" default="default" basedir=".">
<description>Builds, tests, and runs the project MessageConsole.</description>
<import file="nbproject/build-impl.xml"/>
<!--
There exist several targets which are by default empty and which can be
used for execution of your tasks. These targets are usually executed
before and after some main targets. They are:
-pre-init: called before initialization of project properties
-post-init: called after initialization of project properties
-pre-compile: called before javac compilation
-post-compile: called after javac compilation
-pre-compile-single: called before javac compilation of single file
-post-compile-single: called after javac compilation of single file
-pre-compile-test: called before javac compilation of JUnit tests
-post-compile-test: called after javac compilation of JUnit tests
-pre-compile-test-single: called before javac compilation of single JUnit test
-post-compile-test-single: called after javac compilation of single JUunit test
-pre-jar: called before JAR building
-post-jar: called after JAR building
-post-clean: called after cleaning build products
(Targets beginning with '-' are not intended to be called on their own.)
Example of inserting an obfuscator after compilation could look like this:
<target name="-post-compile">
<obfuscate>
<fileset dir="${build.classes.dir}"/>
</obfuscate>
</target>
For list of available properties check the imported
nbproject/build-impl.xml file.
Another way to customize the build is by overriding existing main targets.
The targets of interest are:
-init-macrodef-javac: defines macro for javac compilation
-init-macrodef-junit: defines macro for junit execution
-init-macrodef-debug: defines macro for class debugging
-init-macrodef-java: defines macro for class execution
-do-jar: JAR building
run: execution of project
-javadoc-build: Javadoc generation
test-report: JUnit report generation
An example of overriding the target for project execution could look like this:
<target name="run" depends="MessageConsole-impl.jar">
<exec dir="bin" executable="launcher.exe">
<arg file="${dist.jar}"/>
</exec>
</target>
Notice that the overridden target depends on the jar target and not only on
the compile target as the regular run target does. Again, for a list of available
properties which you can use, check the target you are overriding in the
nbproject/build-impl.xml file.
-->
</project>
| {'content_hash': '24e1d7fbb1c311e0a6d957310afe30f3', 'timestamp': '', 'source': 'github', 'line_count': 73, 'max_line_length': 86, 'avg_line_length': 48.68493150684932, 'alnum_prop': 0.6553179516038267, 'repo_name': 'erhs-robotics/frc2014', 'id': 'c82873191b6deffcb0a38ddeca3325411162985a', 'size': '3554', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'smartdashboard/MessageConsole/build.xml', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'Java', 'bytes': '45140'}, {'name': 'Shell', 'bytes': '61'}]} |
package uk.gov.hmrc.ct.box
abstract class CtBoxIdentifier(val name: String = "Unknown"){
def id:String=this.getClass.getSimpleName
}
| {'content_hash': '0cfe6d0616299ac9615577ab98ce23ca', 'timestamp': '', 'source': 'github', 'line_count': 7, 'max_line_length': 61, 'avg_line_length': 19.714285714285715, 'alnum_prop': 0.7608695652173914, 'repo_name': 'scottcutts/ct-calculations', 'id': 'c6721a6437467c359484313cb65bf37daff98acd', 'size': '741', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'src/main/scala/uk/gov/hmrc/ct/box/CtBoxIdentifier.scala', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Scala', 'bytes': '1117125'}]} |
@implementation SPModel
#pragma mark - Parsing
+ (id)parse:(NSDictionary *)params {
return [MTLJSONAdapter modelOfClass:self.class fromJSONDictionary:params error:nil];
}
#pragma mark - Mantle
+ (NSDictionary *)JSONKeyPathsByPropertyKey {
return @{};
}
@end
| {'content_hash': '1d43f4decf8aa9bb99a5146716a46243', 'timestamp': '', 'source': 'github', 'line_count': 15, 'max_line_length': 88, 'avg_line_length': 18.066666666666666, 'alnum_prop': 0.7306273062730627, 'repo_name': 'fousa/punto-ios', 'id': 'db0e07d76587880a1ba833d05304ada5e59b8674', 'size': '423', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'Punto/SDK/Models/SPModel.m', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Objective-C', 'bytes': '37370'}, {'name': 'Ruby', 'bytes': '625'}]} |
// CHECKSTYLE:FileLength:OFF
package org.pentaho.di.core;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.text.StrBuilder;
import org.apache.commons.vfs2.FileSystemException;
import org.apache.commons.vfs2.provider.UriParser;
import org.apache.http.conn.util.InetAddressUtils;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.util.EnvUtil;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.laf.BasePropertyHandler;
import org.pentaho.di.version.BuildVersion;
import org.pentaho.support.encryption.Encr;
import java.awt.Font;
import java.awt.GraphicsEnvironment;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.math.BigDecimal;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.file.Paths;
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.text.NumberFormat;
import java.text.ParseException;
import java.text.ParsePosition;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Date;
import java.util.Enumeration;
import java.util.List;
import java.util.Locale;
import java.util.TimeZone;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
/**
* This class is used to define a number of default values for various settings throughout Kettle. It also contains a
* number of static final methods to make your life easier.
*
* @author Matt
* @since 07-05-2003
*
*/
public class Const {
private static Class<?> PKG = Const.class; // for i18n purposes, needed by Translator2!!
/**
* Version number
*
* @deprecated Use {@link BuildVersion#getVersion()} instead
*/
@Deprecated
public static final String VERSION = BuildVersion.getInstance().getVersion();
/**
* Copyright year
*/
public static final String COPYRIGHT_YEAR = "2015";
/**
* Release Type
*/
public enum ReleaseType {
RELEASE_CANDIDATE {
public String getMessage() {
return BaseMessages.getString( PKG, "Const.PreviewRelease.HelpAboutText" );
}
},
MILESTONE {
public String getMessage() {
return BaseMessages.getString( PKG, "Const.Candidate.HelpAboutText" );
}
},
PREVIEW {
public String getMessage() {
return BaseMessages.getString( PKG, "Const.Milestone.HelpAboutText" );
}
},
GA {
public String getMessage() {
return BaseMessages.getString( PKG, "Const.GA.HelpAboutText" );
}
};
public abstract String getMessage();
}
/**
* Sleep time waiting when buffer is empty (the default)
*/
public static final int TIMEOUT_GET_MILLIS = 50;
/**
* Sleep time waiting when buffer is full (the default)
*/
public static final int TIMEOUT_PUT_MILLIS = 50;
/**
* print update every ... lines
*/
public static final int ROWS_UPDATE = 50000;
/**
* Size of rowset: bigger = faster for large amounts of data
*/
public static final int ROWS_IN_ROWSET = 10000;
/**
* Fetch size in rows when querying a database
*/
public static final int FETCH_SIZE = 10000;
/**
* Sort size: how many rows do we sort in memory at once?
*/
public static final int SORT_SIZE = 5000;
/**
* job/trans heartbeat scheduled executor periodic interval ( in seconds )
*/
public static final int HEARTBEAT_PERIODIC_INTERVAL_IN_SECS = 10;
/**
* What's the file systems file separator on this operating system?
*/
public static final String FILE_SEPARATOR = System.getProperty( "file.separator" );
/**
* What's the path separator on this operating system?
*/
public static final String PATH_SEPARATOR = System.getProperty( "path.separator" );
/**
* CR: operating systems specific Carriage Return
*/
public static final String CR = System.getProperty( "line.separator" );
/**
* DOSCR: MS-DOS specific Carriage Return
*/
public static final String DOSCR = "\n\r";
/**
* An empty ("") String.
*/
public static final String EMPTY_STRING = "";
/**
* The Java runtime version
*/
public static final String JAVA_VERSION = System.getProperty( "java.vm.version" );
/**
* Path to the users home directory (keep this entry above references to getKettleDirectory())
*
* @deprecated Use {@link Const#getUserHomeDirectory()} instead.
*/
@Deprecated
public static final String USER_HOME_DIRECTORY = NVL( System.getProperty( "KETTLE_HOME" ), System
.getProperty( "user.home" ) );
/**
* Path to the simple-jndi directory
*/
public static String JNDI_DIRECTORY = NVL( System.getProperty( "KETTLE_JNDI_ROOT" ), System
.getProperty( "org.osjava.sj.root" ) );
/*
* The images directory
*
* public static final String IMAGE_DIRECTORY = "/ui/images/";
*/
public static final String PLUGIN_BASE_FOLDERS_PROP = "KETTLE_PLUGIN_BASE_FOLDERS";
/**
* the default comma separated list of base plugin folders.
*/
public static final String DEFAULT_PLUGIN_BASE_FOLDERS = "plugins,"
+ ( Utils.isEmpty( getDIHomeDirectory() ) ? "" : getDIHomeDirectory() + FILE_SEPARATOR + "plugins," )
+ getKettleDirectory() + FILE_SEPARATOR + "plugins";
/**
* Default minimum date range...
*/
public static final Date MIN_DATE = new Date( -2208992400000L ); // 1900/01/01 00:00:00.000
/**
* Default maximum date range...
*/
public static final Date MAX_DATE = new Date( 7258114799468L ); // 2199/12/31 23:59:59.999
/**
* The default minimum year in a dimension date range
*/
public static final int MIN_YEAR = 1900;
/**
* The default maximum year in a dimension date range
*/
public static final int MAX_YEAR = 2199;
/**
* Specifies the number of pixels to the right we have to go in dialog boxes.
*/
public static final int RIGHT = 400;
/**
* Specifies the length (width) of fields in a number of pixels in dialog boxes.
*/
public static final int LENGTH = 350;
/**
* The margin between the different dialog components & widgets
*/
public static final int MARGIN = 4;
/**
* The default percentage of the width of screen where we consider the middle of a dialog.
*/
public static final int MIDDLE_PCT = 35;
/**
* The default width of an arrow in the Graphical Views
*/
public static final int ARROW_WIDTH = 1;
/**
* The horizontal and vertical margin of a dialog box.
*/
public static final int FORM_MARGIN = 5;
/**
* The default shadow size on the graphical view.
*/
public static final int SHADOW_SIZE = 0;
/**
* The size of relationship symbols
*/
public static final int SYMBOLSIZE = 10;
/**
* Max nr. of files to remember
*/
public static final int MAX_FILE_HIST = 9; // Having more than 9 files in the file history is not compatible with pre
// 5.0 versions
/**
* The default locale for the kettle environment (system defined)
*/
public static final Locale DEFAULT_LOCALE = Locale.getDefault();
/**
* The default decimal separator . or ,
*/
public static final char DEFAULT_DECIMAL_SEPARATOR = ( new DecimalFormatSymbols( DEFAULT_LOCALE ) )
.getDecimalSeparator();
/**
* The default grouping separator , or .
*/
public static final char DEFAULT_GROUPING_SEPARATOR = ( new DecimalFormatSymbols( DEFAULT_LOCALE ) )
.getGroupingSeparator();
/**
* The default currency symbol
*/
public static final String DEFAULT_CURRENCY_SYMBOL = ( new DecimalFormatSymbols( DEFAULT_LOCALE ) )
.getCurrencySymbol();
/**
* The default number format
*/
public static final String DEFAULT_NUMBER_FORMAT = ( (DecimalFormat) ( NumberFormat.getInstance() ) )
.toPattern();
/**
* Default string representing Null String values (empty)
*/
public static final String NULL_STRING = "";
/**
* Default string representing Null Number values (empty)
*/
public static final String NULL_NUMBER = "";
/**
* Default string representing Null Date values (empty)
*/
public static final String NULL_DATE = "";
/**
* Default string representing Null BigNumber values (empty)
*/
public static final String NULL_BIGNUMBER = "";
/**
* Default string representing Null Boolean values (empty)
*/
public static final String NULL_BOOLEAN = "";
/**
* Default string representing Null Integer values (empty)
*/
public static final String NULL_INTEGER = "";
/**
* Default string representing Null Binary values (empty)
*/
public static final String NULL_BINARY = "";
/**
* Default string representing Null Undefined values (empty)
*/
public static final String NULL_NONE = "";
/**
* Rounding mode, not implemented in {@code BigDecimal}. Method java.lang.Math.round(double) processes this way. <br/>
* Rounding mode to round towards {@literal "nearest neighbor"} unless both neighbors are equidistant, in which case
* round ceiling. <br/>
* Behaves as for {@code ROUND_CEILING} if the discarded fraction is ≥ 0.5; otherwise, behaves as for
* {@code ROUND_FLOOR}. Note that this is the most common arithmetical rounding mode.
*/
public static final int ROUND_HALF_CEILING = -1;
/**
* The base name of the Chef logfile
*/
public static final String CHEF_LOG_FILE = "chef";
/**
* The base name of the Spoon logfile
*/
public static final String SPOON_LOG_FILE = "spoon";
/**
* The base name of the Menu logfile
*/
public static final String MENU_LOG_FILE = "menu";
/**
* An array of date conversion formats
*/
private static String[] dateFormats;
/**
* An array of date (timeless) conversion formats
*/
private static String[] dateTimelessFormats;
/**
* An array of number conversion formats
*/
private static String[] numberFormats;
/**
* Generalized date/time format: Wherever dates are used, date and time values are organized from the most to the
* least significant. see also method StringUtil.getFormattedDateTime()
*/
public static final String GENERALIZED_DATE_TIME_FORMAT = "yyyyddMM_hhmmss";
public static final String GENERALIZED_DATE_TIME_FORMAT_MILLIS = "yyyyddMM_hhmmssSSS";
/**
* Default we store our information in Unicode UTF-8 character set.
*/
public static final String XML_ENCODING = "UTF-8";
/** The possible extensions a transformation XML file can have. */
public static final String[] STRING_TRANS_AND_JOB_FILTER_EXT = new String[] {
"*.ktr;*.kjb;*.xml", "*.ktr;*.xml", "*.kjb;*.xml", "*.xml", "*.*" };
/** The descriptions of the possible extensions a transformation XML file can have. */
private static String[] STRING_TRANS_AND_JOB_FILTER_NAMES;
/** The extension of a Kettle transformation XML file */
public static final String STRING_TRANS_DEFAULT_EXT = "ktr";
/** The possible extensions a transformation XML file can have. */
public static final String[] STRING_TRANS_FILTER_EXT = new String[] { "*.ktr;*.xml", "*.xml", "*.*" };
/** The descriptions of the possible extensions a transformation XML file can have. */
private static String[] STRING_TRANS_FILTER_NAMES;
/** The extension of a Kettle job XML file */
public static final String STRING_JOB_DEFAULT_EXT = "kjb";
/** The possible extensions a job XML file can have. */
public static final String[] STRING_JOB_FILTER_EXT = new String[] { "*.kjb;*.xml", "*.xml", "*.*" };
/** The descriptions of the possible extensions a job XML file can have. */
private static String[] STRING_JOB_FILTER_NAMES;
/** Name of the kettle parameters file */
public static final String KETTLE_PROPERTIES = "kettle.properties";
/** Name of the kettle shared data file */
public static final String SHARED_DATA_FILE = "shared.xml";
/** The prefix that all internal kettle variables should have */
public static final String INTERNAL_VARIABLE_PREFIX = "Internal";
/** The version number as an internal variable */
public static final String INTERNAL_VARIABLE_KETTLE_VERSION = INTERNAL_VARIABLE_PREFIX + ".Kettle.Version";
/** The build version as an internal variable */
public static final String INTERNAL_VARIABLE_KETTLE_BUILD_VERSION = INTERNAL_VARIABLE_PREFIX
+ ".Kettle.Build.Version";
/** The build date as an internal variable */
public static final String INTERNAL_VARIABLE_KETTLE_BUILD_DATE = INTERNAL_VARIABLE_PREFIX + ".Kettle.Build.Date";
/** The job filename directory */
public static final String INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY = INTERNAL_VARIABLE_PREFIX
+ ".Job.Filename.Directory";
/** The job filename name */
public static final String INTERNAL_VARIABLE_JOB_FILENAME_NAME = INTERNAL_VARIABLE_PREFIX + ".Job.Filename.Name";
/** The job name */
public static final String INTERNAL_VARIABLE_JOB_NAME = INTERNAL_VARIABLE_PREFIX + ".Job.Name";
/** The job directory */
public static final String INTERNAL_VARIABLE_JOB_REPOSITORY_DIRECTORY = INTERNAL_VARIABLE_PREFIX
+ ".Job.Repository.Directory";
/** The job run ID */
public static final String INTERNAL_VARIABLE_JOB_RUN_ID = INTERNAL_VARIABLE_PREFIX + ".Job.Run.ID";
/** The job run attempt nr */
public static final String INTERNAL_VARIABLE_JOB_RUN_ATTEMPTNR = INTERNAL_VARIABLE_PREFIX + ".Job.Run.AttemptNr";
/** job/trans heartbeat scheduled executor periodic interval ( in seconds ) */
public static final String VARIABLE_HEARTBEAT_PERIODIC_INTERVAL_SECS = "heartbeat.periodic.interval.seconds";
/** comma-separated list of extension point plugins for which snmp traps should be sent */
public static final String VARIABLE_MONITORING_SNMP_TRAPS_ENABLED = "monitoring.snmp.traps.enabled";
/** The current transformation directory */
public static final String INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY = INTERNAL_VARIABLE_PREFIX
+ ".Entry.Current.Directory";
/**
* All the internal transformation variables
*/
public static final String[] INTERNAL_TRANS_VARIABLES = new String[] {
Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY,
Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY,
Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_NAME, Const.INTERNAL_VARIABLE_TRANSFORMATION_NAME,
Const.INTERNAL_VARIABLE_TRANSFORMATION_REPOSITORY_DIRECTORY,
};
/**
* All the internal job variables
*/
public static final String[] INTERNAL_JOB_VARIABLES = new String[] {
Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY,
Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, Const.INTERNAL_VARIABLE_JOB_FILENAME_NAME,
Const.INTERNAL_VARIABLE_JOB_NAME, Const.INTERNAL_VARIABLE_JOB_REPOSITORY_DIRECTORY,
Const.INTERNAL_VARIABLE_JOB_RUN_ID, Const.INTERNAL_VARIABLE_JOB_RUN_ATTEMPTNR, };
/*
* Deprecated variables array.
* Variables in this array will display with the prefix (deprecated) and will be moved
* at the bottom of the variables dropdown when pressing ctrl+space
* */
public static final String[] DEPRECATED_VARIABLES = new String[] {
Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY,
Const.INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_NAME,
Const.INTERNAL_VARIABLE_TRANSFORMATION_REPOSITORY_DIRECTORY,
Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY,
Const.INTERNAL_VARIABLE_JOB_FILENAME_NAME,
Const.INTERNAL_VARIABLE_JOB_REPOSITORY_DIRECTORY
};
/** The transformation filename directory */
public static final String INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_DIRECTORY = INTERNAL_VARIABLE_PREFIX
+ ".Transformation.Filename.Directory";
/** The transformation filename name */
public static final String INTERNAL_VARIABLE_TRANSFORMATION_FILENAME_NAME = INTERNAL_VARIABLE_PREFIX
+ ".Transformation.Filename.Name";
/** The transformation name */
public static final String INTERNAL_VARIABLE_TRANSFORMATION_NAME = INTERNAL_VARIABLE_PREFIX
+ ".Transformation.Name";
/** The transformation directory */
public static final String INTERNAL_VARIABLE_TRANSFORMATION_REPOSITORY_DIRECTORY = INTERNAL_VARIABLE_PREFIX
+ ".Transformation.Repository.Directory";
/** The step partition ID */
public static final String INTERNAL_VARIABLE_STEP_PARTITION_ID = INTERNAL_VARIABLE_PREFIX + ".Step.Partition.ID";
/** The step partition number */
public static final String INTERNAL_VARIABLE_STEP_PARTITION_NR = INTERNAL_VARIABLE_PREFIX
+ ".Step.Partition.Number";
/** The slave transformation number */
public static final String INTERNAL_VARIABLE_SLAVE_SERVER_NUMBER = INTERNAL_VARIABLE_PREFIX
+ ".Slave.Transformation.Number";
/** The slave transformation name */
public static final String INTERNAL_VARIABLE_SLAVE_SERVER_NAME = INTERNAL_VARIABLE_PREFIX + ".Slave.Server.Name";
/** The size of the cluster : number of slaves */
public static final String INTERNAL_VARIABLE_CLUSTER_SIZE = INTERNAL_VARIABLE_PREFIX + ".Cluster.Size";
/** The slave transformation number */
public static final String INTERNAL_VARIABLE_STEP_UNIQUE_NUMBER = INTERNAL_VARIABLE_PREFIX
+ ".Step.Unique.Number";
/** Is this transformation running clustered, on the master? */
public static final String INTERNAL_VARIABLE_CLUSTER_MASTER = INTERNAL_VARIABLE_PREFIX + ".Cluster.Master";
/**
* The internal clustered run ID, unique across a clustered execution, important while doing parallel clustered runs
*/
public static final String INTERNAL_VARIABLE_CLUSTER_RUN_ID = INTERNAL_VARIABLE_PREFIX + ".Cluster.Run.ID";
/** The size of the cluster : number of slaves */
public static final String INTERNAL_VARIABLE_STEP_UNIQUE_COUNT = INTERNAL_VARIABLE_PREFIX + ".Step.Unique.Count";
/** The step name */
public static final String INTERNAL_VARIABLE_STEP_NAME = INTERNAL_VARIABLE_PREFIX + ".Step.Name";
/** The step copy nr */
public static final String INTERNAL_VARIABLE_STEP_COPYNR = INTERNAL_VARIABLE_PREFIX + ".Step.CopyNr";
/** The default maximum for the nr of lines in the GUI logs */
public static final int MAX_NR_LOG_LINES = 5000;
/** The default maximum for the nr of lines in the history views */
public static final int MAX_NR_HISTORY_LINES = 50;
/** The default fetch size for lines of history. */
public static final int HISTORY_LINES_FETCH_SIZE = 10;
/** The default log line timeout in minutes : 12 hours */
public static final int MAX_LOG_LINE_TIMEOUT_MINUTES = 12 * 60;
/** UI-agnostic flag for warnings */
public static final int WARNING = 1;
/** UI-agnostic flag for warnings */
public static final int ERROR = 2;
/** UI-agnostic flag for warnings */
public static final int INFO = 3;
public static final int SHOW_MESSAGE_DIALOG_DB_TEST_DEFAULT = 0;
public static final int SHOW_MESSAGE_DIALOG_DB_TEST_SUCCESS = 1;
public static final int SHOW_FATAL_ERROR = 2;
/**
* The margin between the text of a note and its border.
*/
public static final int NOTE_MARGIN = 5;
/**
* The default undo level for Kettle
*/
public static final int MAX_UNDO = 100;
/**
* The file that documents these variables.
*/
public static final String KETTLE_VARIABLES_FILE = "kettle-variables.xml";
/**
* If you set this environment variable you can limit the log size of all transformations and jobs that don't have the
* "log size limit" property set in their respective properties.
*/
public static final String KETTLE_LOG_SIZE_LIMIT = "KETTLE_LOG_SIZE_LIMIT";
/**
* The name of the variable that defines the log database connection by default for all transformations
*/
public static final String KETTLE_TRANS_LOG_DB = "KETTLE_TRANS_LOG_DB";
/**
* The name of the variable that defines the logging schema for all transformations
*/
public static final String KETTLE_TRANS_LOG_SCHEMA = "KETTLE_TRANS_LOG_SCHEMA";
/**
* The name of the variable that defines the logging table for all transformations
*/
public static final String KETTLE_TRANS_LOG_TABLE = "KETTLE_TRANS_LOG_TABLE";
/**
* The name of the variable that defines the log database connection by default for all jobs
*/
public static final String KETTLE_JOB_LOG_DB = "KETTLE_JOB_LOG_DB";
/**
* The name of the variable that defines the logging schema for all jobs
*/
public static final String KETTLE_JOB_LOG_SCHEMA = "KETTLE_JOB_LOG_SCHEMA";
/**
* The name of the variable that defines the timer used for detecting slave nodes.
*/
public static final String KETTLE_SLAVE_DETECTION_TIMER = "KETTLE_SLAVE_DETECTION_TIMER";
/**
* The name of the variable that defines the logging table for all jobs
*/
public static final String KETTLE_JOB_LOG_TABLE = "KETTLE_JOB_LOG_TABLE";
/**
* The name of the variable that defines the transformation performance log schema by default for all transformations
*/
public static final String KETTLE_TRANS_PERFORMANCE_LOG_DB = "KETTLE_TRANS_PERFORMANCE_LOG_DB";
/**
* The name of the variable that defines the transformation performance log database connection by default for all
* transformations
*/
public static final String KETTLE_TRANS_PERFORMANCE_LOG_SCHEMA = "KETTLE_TRANS_PERFORMANCE_LOG_SCHEMA";
/**
* The name of the variable that defines the transformation performance log table by default for all transformations
*/
public static final String KETTLE_TRANS_PERFORMANCE_LOG_TABLE = "KETTLE_TRANS_PERFORMANCE_LOG_TABLE";
/**
* The name of the variable that defines the job entry log database by default for all jobs
*/
public static final String KETTLE_JOBENTRY_LOG_DB = "KETTLE_JOBENTRY_LOG_DB";
/**
* The name of the variable that defines the job entry log schema by default for all jobs
*/
public static final String KETTLE_JOBENTRY_LOG_SCHEMA = "KETTLE_JOBENTRY_LOG_SCHEMA";
/**
* The name of the variable that defines the job entry log table by default for all jobs
*/
public static final String KETTLE_JOBENTRY_LOG_TABLE = "KETTLE_JOBENTRY_LOG_TABLE";
/**
* The name of the variable that defines the steps log database by default for all transformations
*/
public static final String KETTLE_STEP_LOG_DB = "KETTLE_STEP_LOG_DB";
/**
* The name of the variable that defines the steps log schema by default for all transformations
*/
public static final String KETTLE_STEP_LOG_SCHEMA = "KETTLE_STEP_LOG_SCHEMA";
/**
* The name of the variable that defines the steps log table by default for all transformations
*/
public static final String KETTLE_STEP_LOG_TABLE = "KETTLE_STEP_LOG_TABLE";
/**
* The name of the variable that defines the log channel log database by default for all transformations and jobs
*/
public static final String KETTLE_CHANNEL_LOG_DB = "KETTLE_CHANNEL_LOG_DB";
/**
* The name of the variable that defines the log channel log schema by default for all transformations and jobs
*/
public static final String KETTLE_CHANNEL_LOG_SCHEMA = "KETTLE_CHANNEL_LOG_SCHEMA";
/**
* The name of the variable that defines the log channel log table by default for all transformations and jobs
*/
public static final String KETTLE_CHANNEL_LOG_TABLE = "KETTLE_CHANNEL_LOG_TABLE";
/**
* The name of the variable that defines the metrics log database by default for all transformations and jobs
*/
public static final String KETTLE_METRICS_LOG_DB = "KETTLE_METRICS_LOG_DB";
/**
* The name of the variable that defines the metrics log schema by default for all transformations and jobs
*/
public static final String KETTLE_METRICS_LOG_SCHEMA = "KETTLE_METRICS_LOG_SCHEMA";
/**
* The name of the variable that defines the metrics log table by default for all transformations and jobs
*/
public static final String KETTLE_METRICS_LOG_TABLE = "KETTLE_METRICS_LOG_TABLE";
/**
* The name of the variable that defines the checkpoint log database by default for all jobs
*/
public static final String KETTLE_CHECKPOINT_LOG_DB = "KETTLE_CHECKPOINT_LOG_DB";
/**
* The name of the variable that defines the checkpoint log schema by default for all jobs
*/
public static final String KETTLE_CHECKPOINT_LOG_SCHEMA = "KETTLE_CHECKPOINT_LOG_SCHEMA";
/**
* The name of the variable that defines the checkpoint log table by default for all jobs
*/
public static final String KETTLE_CHECKPOINT_LOG_TABLE = "KETTLE_CHECKPOINT_LOG_TABLE";
/**
* Name of the environment variable to set the location of the shared object file (xml) for transformations and jobs
*/
public static final String KETTLE_SHARED_OBJECTS = "KETTLE_SHARED_OBJECTS";
/**
* System wide flag to drive the evaluation of null in ValueMeta. If this setting is set to "Y", an empty string and
* null are different. Otherwise they are not.
*/
public static final String KETTLE_EMPTY_STRING_DIFFERS_FROM_NULL = "KETTLE_EMPTY_STRING_DIFFERS_FROM_NULL";
/**
* This flag will prevent Kettle from converting {@code null} strings to empty strings in {@link org.pentaho.di.core.row.value.ValueMetaBase}
* The default value is {@code false}.
*/
public static final String KETTLE_DO_NOT_NORMALIZE_NULL_STRING_TO_EMPTY = "KETTLE_DO_NOT_NORMALIZE_NULL_STRING_TO_EMPTY";
/**
* This flag will force to return the original string with only spaces instead of an empty string.
*/
public static final String KETTLE_DO_NOT_NORMALIZE_SPACES_ONLY_STRING_TO_EMPTY = "KETTLE_DO_NOT_NORMALIZE_SPACES_ONLY_STRING_TO_EMPTY";
/**
* This flag will prevent Kettle from yielding {@code null} as the value of an empty XML tag in {@link org.pentaho.di.core.xml.XMLHandler}
* The default value is {@code false} and an empty XML tag will produce a {@code null} value.
*/
public static final String KETTLE_XML_EMPTY_TAG_YIELDS_EMPTY_VALUE = "KETTLE_XML_EMPTY_TAG_YIELDS_EMPTY_VALUE";
/**
* This flag will cause the "Get XML data" step to yield null values on missing elements and empty values on empty elements when set to "Y".
* By default, both empty elements and missing elements will yield empty values.
*/
public static final String KETTLE_XML_MISSING_TAG_YIELDS_NULL_VALUE = "KETTLE_XML_MISSING_TAG_YIELDS_NULL_VALUE";
/**
* System wide flag to allow non-strict string to number conversion for backward compatibility. If this setting is set
* to "Y", an string starting with digits will be converted successfully into a number. (example: 192.168.1.1 will be
* converted into 192 or 192.168 depending on the decimal symbol). The default (N) will be to throw an error if
* non-numeric symbols are found in the string.
*/
public static final String KETTLE_LENIENT_STRING_TO_NUMBER_CONVERSION =
"KETTLE_LENIENT_STRING_TO_NUMBER_CONVERSION";
/**
* System wide flag to ignore timezone while writing date/timestamp value to the database. See PDI-10749 for details.
*/
public static final String KETTLE_COMPATIBILITY_DB_IGNORE_TIMEZONE = "KETTLE_COMPATIBILITY_DB_IGNORE_TIMEZONE";
/**
* System wide flag to use the root path prefix for a directory reference. See PDI-6779 for details.
*/
public static final String KETTLE_COMPATIBILITY_IMPORT_PATH_ADDITION_ON_VARIABLES = "KETTLE_COMPATIBILITY_IMPORT_PATH_ADDITION_ON_VARIABLES";
/**
* System wide flag to ignore logging table. See BACKLOG-15706 for details.
*/
public static final String KETTLE_COMPATIBILITY_IGNORE_TABLE_LOGGING = "KETTLE_COMPATIBILITY_IGNORE_TABLE_LOGGING";
/**
* System wide flag to set or not append and header options dependency on Text file output step. See PDI-5252 for
* details.
*/
public static final String KETTLE_COMPATIBILITY_TEXT_FILE_OUTPUT_APPEND_NO_HEADER =
"KETTLE_COMPATIBILITY_TEXT_FILE_OUTPUT_APPEND_NO_HEADER";
/**
* System wide flag to control behavior of the merge rows (diff) step in case of "identical" comparison. (PDI-736)
* 'Y' preserves the old behavior and takes the fields from the reference stream
* 'N' enables the documented behavior and takes the fields from the comparison stream (correct behavior)
*/
public static final String KETTLE_COMPATIBILITY_MERGE_ROWS_USE_REFERENCE_STREAM_WHEN_IDENTICAL =
"KETTLE_COMPATIBILITY_MERGE_ROWS_USE_REFERENCE_STREAM_WHEN_IDENTICAL";
/**
* System wide flag to control behavior of the Memory Group By step in case of SUM and AVERAGE aggregation. (PDI-5537)
* 'Y' preserves the old behavior and always returns a Number type for SUM and Average aggregations
* 'N' enables the documented behavior of returning the same type as the input fields use (correct behavior).
*/
public static final String KETTLE_COMPATIBILITY_MEMORY_GROUP_BY_SUM_AVERAGE_RETURN_NUMBER_TYPE =
"KETTLE_COMPATIBILITY_MEMORY_GROUP_BY_SUM_AVERAGE_RETURN_NUMBER_TYPE";
/**
* System wide flag to control behavior of the ExecuteTransformationStep and ExecuteJobStep when a file is specified.
* This only is used when PDI is connected to repository
* 'Y' It is possible specify a file with the extension or not that is saved in repository
* 'N' Should not be specified the extension, in other words, should be specified the name of file saved in repository.
*/
public static final String KETTLE_COMPATIBILITY_INVOKE_FILES_WITH_OR_WITHOUT_FILE_EXTENSION =
"KETTLE_COMPATIBILITY_INVOKE_FILES_WITH_OR_WITHOUT_FILE_EXTENSION";
/**
* System-wide flag to keep legacy behavior on json input step. See PDI-19445 and PDI-18521 for details.
*/
public static final String KETTLE_COMPATIBILITY_JSON_INPUT_LEGACY_MODE = "KETTLE_COMPATIBILITY_JSON_INPUT_LEGACY_MODE";
/**
* You can use this variable to speed up hostname lookup.
* Hostname lookup is performed by Kettle so that it is capable of logging the server on which a job or transformation is executed.
*/
public static final String KETTLE_SYSTEM_HOSTNAME = "KETTLE_SYSTEM_HOSTNAME";
/**
* System wide flag to set the maximum number of log lines that are kept internally by Kettle. Set to 0 to keep all
* rows (default)
*/
public static final String KETTLE_MAX_LOG_SIZE_IN_LINES = "KETTLE_MAX_LOG_SIZE_IN_LINES";
/**
* System wide flag to set the maximum age (in minutes) of a log line while being kept internally by Kettle. Set to 0
* to keep all rows indefinitely (default)
*/
public static final String KETTLE_MAX_LOG_TIMEOUT_IN_MINUTES = "KETTLE_MAX_LOG_TIMEOUT_IN_MINUTES";
/**
* System wide flag to determine whether standard error will be redirected to Kettle logging facilities. Will redirect
* if the value is equal ignoring case to the string "Y"
*/
public static final String KETTLE_REDIRECT_STDERR = "KETTLE_REDIRECT_STDERR";
/**
* System wide flag to determine whether standard out will be redirected to Kettle logging facilities. Will redirect
* if the value is equal ignoring case to the string "Y"
*/
public static final String KETTLE_REDIRECT_STDOUT = "KETTLE_REDIRECT_STDOUT";
/**
* This environment variable will set a time-out after which waiting, completed or stopped transformations and jobs
* will be automatically cleaned up. The default value is 1440 (one day).
*/
public static final String KETTLE_CARTE_OBJECT_TIMEOUT_MINUTES = "KETTLE_CARTE_OBJECT_TIMEOUT_MINUTES";
/**
* System wide parameter: the maximum number of step performance snapshots to keep in memory. Set to 0 to keep all
* snapshots indefinitely (default)
*/
public static final String KETTLE_STEP_PERFORMANCE_SNAPSHOT_LIMIT = "KETTLE_STEP_PERFORMANCE_SNAPSHOT_LIMIT";
/**
* A variable to configure the maximum number of job trackers kept in memory.
*/
public static final String KETTLE_MAX_JOB_TRACKER_SIZE = "KETTLE_MAX_JOB_TRACKER_SIZE";
/**
* A variable to configure the maximum number of job entry results kept in memory for logging purposes.
*/
public static final String KETTLE_MAX_JOB_ENTRIES_LOGGED = "KETTLE_MAX_JOB_ENTRIES_LOGGED";
/**
* A variable to configure the maximum number of logging registry entries kept in memory for logging purposes.
*/
public static final String KETTLE_MAX_LOGGING_REGISTRY_SIZE = "KETTLE_MAX_LOGGING_REGISTRY_SIZE";
/**
* A variable to configure the logging registry's purge timer which will trigger the registry to cleanup entries.
*/
public static final String KETTLE_LOGGING_REGISTRY_PURGE_TIMEOUT = "KETTLE_LOGGING_REGISTRY_PURGE_TIMEOUT";
/**
* A variable to configure the kettle log tab refresh delay.
*/
public static final String KETTLE_LOG_TAB_REFRESH_DELAY = "KETTLE_LOG_TAB_REFRESH_DELAY";
/**
* A variable to configure the kettle log tab refresh period.
*/
public static final String KETTLE_LOG_TAB_REFRESH_PERIOD = "KETTLE_LOG_TAB_REFRESH_PERIOD";
/**
* The name of the system wide variable that can contain the name of the SAP Connection factory for the test button in
* the DB dialog. This defaults to
*/
public static final String KETTLE_SAP_CONNECTION_FACTORY = "KETTLE_SAP_CONNECTION_FACTORY";
/**
* The default SAP ERP connection factory
*/
public static final String KETTLE_SAP_CONNECTION_FACTORY_DEFAULT_NAME =
"org.pentaho.di.trans.steps.sapinput.sap.SAPConnectionFactory";
/**
* Name of the environment variable to specify additional classes to scan for plugin annotations
*/
public static final String KETTLE_PLUGIN_CLASSES = "KETTLE_PLUGIN_CLASSES";
/**
* Name of the environment variable to specify additional packaged to scan for plugin annotations (warning: slow!)
*/
public static final String KETTLE_PLUGIN_PACKAGES = "KETTLE_PLUGIN_PACKAGES";
/**
* Name of the environment variable that contains the size of the transformation rowset size. This overwrites values
* that you set transformation settings.
*/
public static final String KETTLE_TRANS_ROWSET_SIZE = "KETTLE_TRANS_ROWSET_SIZE";
/**
* A general initial version comment
*/
public static final String VERSION_COMMENT_INITIAL_VERSION = "Creation of initial version";
/**
* A general edit version comment
*/
public static final String VERSION_COMMENT_EDIT_VERSION = "Modification by user";
/**
* The XML file that contains the list of native Kettle steps
*/
public static final String XML_FILE_KETTLE_STEPS = "kettle-steps.xml";
/**
* The name of the environment variable that will contain the alternative location of the kettle-steps.xml file
*/
public static final String KETTLE_CORE_STEPS_FILE = "KETTLE_CORE_STEPS_FILE";
/**
* The XML file that contains the list of native partition plugins
*/
public static final String XML_FILE_KETTLE_PARTITION_PLUGINS = "kettle-partition-plugins.xml";
/**
* The name of the environment variable that will contain the alternative location of the kettle-job-entries.xml file
*/
public static final String KETTLE_CORE_JOBENTRIES_FILE = "KETTLE_CORE_JOBENTRIES_FILE";
/**
* The XML file that contains the list of native Kettle Carte Servlets
*/
public static final String XML_FILE_KETTLE_SERVLETS = "kettle-servlets.xml";
/**
* The XML file that contains the list of native Kettle value metadata plugins
*/
public static final String XML_FILE_KETTLE_VALUEMETA_PLUGINS = "kettle-valuemeta-plugins.xml";
/**
* The XML file that contains the list of native Kettle two-way password encoder plugins
*/
@SuppressWarnings( "squid:S2068" )
public static final String XML_FILE_KETTLE_PASSWORD_ENCODER_PLUGINS = Encr.XML_FILE_KETTLE_PASSWORD_ENCODER_PLUGINS;
/**
* The name of the environment variable that will contain the alternative location of the kettle-valuemeta-plugins.xml
* file
*/
public static final String KETTLE_VALUEMETA_PLUGINS_FILE = "KETTLE_VALUEMETA_PLUGINS_FILE";
/**
* Specifies the password encoding plugin to use by ID (Kettle is the default).
*/
@SuppressWarnings( "squid:S2068" )
public static final String KETTLE_PASSWORD_ENCODER_PLUGIN = Encr.KETTLE_PASSWORD_ENCODER_PLUGIN;
/**
* The name of the environment variable that will contain the alternative location of the kettle-password-encoder-plugins.xml
* file
*/
@SuppressWarnings( "squid:S2068" )
public static final String KETTLE_PASSWORD_ENCODER_PLUGINS_FILE = Encr.KETTLE_PASSWORD_ENCODER_PLUGINS_FILE;
/**
* The name of the Kettle encryption seed environment variable for the KettleTwoWayPasswordEncoder class
*/
@SuppressWarnings( "squid:S2068" )
public static final String KETTLE_TWO_WAY_PASSWORD_ENCODER_SEED = Encr.KETTLE_TWO_WAY_PASSWORD_ENCODER_SEED;
/**
* The XML file that contains the list of native Kettle logging plugins
*/
public static final String XML_FILE_KETTLE_LOGGING_PLUGINS = "kettle-logging-plugins.xml";
/**
* The name of the environment variable that will contain the alternative location of the kettle-logging-plugins.xml
* file
*/
public static final String KETTLE_LOGGING_PLUGINS_FILE = "KETTLE_LOGGING_PLUGINS_FILE";
/**
* The name of the environment variable that will contain the alternative location of the kettle-servlets.xml file
*/
public static final String KETTLE_CORE_SERVLETS_FILE = "KETTLE_CORE_SERVLETS_FILE";
/**
* The name of the variable that optionally contains an alternative rowset get timeout (in ms). This only makes a
* difference for extremely short lived transformations.
*/
public static final String KETTLE_ROWSET_GET_TIMEOUT = "KETTLE_ROWSET_GET_TIMEOUT";
/**
* The name of the variable that optionally contains an alternative rowset put timeout (in ms). This only makes a
* difference for extremely short lived transformations.
*/
public static final String KETTLE_ROWSET_PUT_TIMEOUT = "KETTLE_ROWSET_PUT_TIMEOUT";
/**
* Set this variable to Y if you want to test a more efficient batching row set. (default = N)
*/
public static final String KETTLE_BATCHING_ROWSET = "KETTLE_BATCHING_ROWSET";
/**
* Set this variable to limit max number of files the Text File Output step can have open at one time.
*/
public static final String KETTLE_FILE_OUTPUT_MAX_STREAM_COUNT = "KETTLE_FILE_OUTPUT_MAX_STREAM_COUNT";
/**
* This variable contains the number of milliseconds between flushes of all open files in the Text File Output step.
*/
public static final String KETTLE_FILE_OUTPUT_MAX_STREAM_LIFE = "KETTLE_FILE_OUTPUT_MAX_STREAM_LIFE";
/**
* Set this variable to Y to disable standard Kettle logging to the console. (stdout)
*/
public static final String KETTLE_DISABLE_CONSOLE_LOGGING = "KETTLE_DISABLE_CONSOLE_LOGGING";
/**
* Set this variable to with the intended repository name ( in repositories.xml )
*/
public static final String KETTLE_REPOSITORY = "KETTLE_REPOSITORY";
/**
* Set this variable to with the intended username to pass as repository credentials
*/
public static final String KETTLE_USER = "KETTLE_USER";
/**
* Set this variable to with the intended password to pass as repository credentials
*/
@SuppressWarnings( "squid:S2068" )
public static final String KETTLE_PASSWORD = "KETTLE_PASSWORD";
/**
* The XML file that contains the list of native Kettle job entries
*/
public static final String XML_FILE_KETTLE_JOB_ENTRIES = "kettle-job-entries.xml";
/**
* The XML file that contains the list of native Kettle repository types (DB, File, etc)
*/
public static final String XML_FILE_KETTLE_REPOSITORIES = "kettle-repositories.xml";
/**
* The XML file that contains the list of native Kettle database types (MySQL, Oracle, etc)
*/
public static final String XML_FILE_KETTLE_DATABASE_TYPES = "kettle-database-types.xml";
/**
* The XML file that contains the list of native Kettle compression providers (None, ZIP, GZip, etc.)
*/
public static final String XML_FILE_KETTLE_COMPRESSION_PROVIDERS = "kettle-compression-providers.xml";
/**
* The XML file that contains the list of native Kettle compression providers (None, ZIP, GZip, etc.)
*/
public static final String XML_FILE_KETTLE_AUTHENTICATION_PROVIDERS = "kettle-authentication-providers.xml";
/**
* The XML file that contains the list of native extension points (None by default, this is mostly for OEM purposes)
*/
public static final String XML_FILE_KETTLE_EXTENSION_POINTS = "kettle-extension-points.xml";
/**
* The XML file that contains the list of native extension points (None by default, this is mostly for OEM purposes)
*/
public static final String XML_FILE_KETTLE_REGISTRY_EXTENSIONS = "kettle-registry-extensions.xml";
/**
* The XML file that contains the list of lifecycle listeners
*/
public static final String XML_FILE_KETTLE_LIFECYCLE_LISTENERS = "kettle-lifecycle-listeners.xml";
/**
* The XML file that contains the list of native engines
*/
public static final String XML_FILE_KETTLE_ENGINES = "kettle-engines.xml";
/**
* the value the Pan JVM should return on exit.
*/
public static final String KETTLE_TRANS_PAN_JVM_EXIT_CODE = "KETTLE_TRANS_PAN_JVM_EXIT_CODE";
/**
* The name of the variable containing an alternative default number format
*/
public static final String KETTLE_DEFAULT_NUMBER_FORMAT = "KETTLE_DEFAULT_NUMBER_FORMAT";
/**
* The name of the variable containing an alternative default bignumber format
*/
public static final String KETTLE_DEFAULT_BIGNUMBER_FORMAT = "KETTLE_DEFAULT_BIGNUMBER_FORMAT";
/**
* The name of the variable containing an alternative default integer format
*/
public static final String KETTLE_DEFAULT_INTEGER_FORMAT = "KETTLE_DEFAULT_INTEGER_FORMAT";
/**
* The name of the variable containing an alternative default date format
*/
public static final String KETTLE_DEFAULT_DATE_FORMAT = "KETTLE_DEFAULT_DATE_FORMAT";
// Null values tweaks
public static final String KETTLE_AGGREGATION_MIN_NULL_IS_VALUED = "KETTLE_AGGREGATION_MIN_NULL_IS_VALUED";
public static final String KETTLE_AGGREGATION_ALL_NULLS_ARE_ZERO = "KETTLE_AGGREGATION_ALL_NULLS_ARE_ZERO";
/**
* The name of the variable containing an alternative default timestamp format
*/
public static final String KETTLE_DEFAULT_TIMESTAMP_FORMAT = "KETTLE_DEFAULT_TIMESTAMP_FORMAT";
/**
* Variable that is responsible for removing enclosure symbol after splitting the string
*/
public static final String KETTLE_SPLIT_FIELDS_REMOVE_ENCLOSURE = "KETTLE_SPLIT_FIELDS_REMOVE_ENCLOSURE";
/**
* Variable that is responsible for checking empty field names and types.
*/
public static final String KETTLE_ALLOW_EMPTY_FIELD_NAMES_AND_TYPES = "KETTLE_ALLOW_EMPTY_FIELD_NAMES_AND_TYPES";
/**
* Set this variable to false to preserve global log variables defined in transformation / job Properties -> Log panel.
* Changing it to true will clear all global log variables when export transformation / job
*/
public static final String KETTLE_GLOBAL_LOG_VARIABLES_CLEAR_ON_EXPORT = "KETTLE_GLOBAL_LOG_VARIABLES_CLEAR_ON_EXPORT";
/**
* Property controls the capacity of the transFinishedBlockingQueue in Trans.
*/
public static final String KETTLE_TRANS_FINISHED_BLOCKING_QUEUE_SIZE = "KETTLE_TRANS_FINISHED_BLOCKING_QUEUE_SIZE";
/**
* Compatibility settings for {@link org.pentaho.di.core.row.ValueDataUtil#hourOfDay(ValueMetaInterface, Object)}.
*
* Switches off the fix for calculation of timezone decomposition.
*/
public static final String KETTLE_COMPATIBILITY_CALCULATION_TIMEZONE_DECOMPOSITION =
"KETTLE_COMPATIBILITY_CALCULATION_TIMEZONE_DECOMPOSITION";
/**
* Compatibility settings for setNrErrors
*/
// see PDI-10270 for details.
public static final String KETTLE_COMPATIBILITY_SET_ERROR_ON_SPECIFIC_JOB_ENTRIES =
"KETTLE_COMPATIBILITY_SET_ERROR_ON_SPECIFIC_JOB_ENTRIES";
// See PDI-15781 for details
public static final String KETTLE_COMPATIBILITY_SEND_RESULT_XML_WITH_FULL_STATUS = "KETTLE_COMPATIBILITY_SEND_RESULT_XML_WITH_FULL_STATUS";
// See PDI-16388 for details
public static final String KETTLE_COMPATIBILITY_SELECT_VALUES_TYPE_CHANGE_USES_TYPE_DEFAULTS = "KETTLE_COMPATIBILITY_SELECT_VALUES_TYPE_CHANGE_USES_TYPE_DEFAULTS";
// See PDI-17203 for details
public static final String KETTLE_COMPATIBILITY_XML_OUTPUT_NULL_VALUES = "KETTLE_COMPATIBILITY_XML_OUTPUT_NULL_VALUES";
// See PDI-17980 for details
public static final String KETTLE_COMPATIBILITY_USE_JDBC_METADATA = "KETTLE_COMPATIBILITY_USE_JDBC_METADATA";
// See PDI-18470 for details
public static final String KETTLE_COMPATIBILITY_DB_LOOKUP_USE_FIELDS_RETURN_TYPE_CHOSEN_IN_UI = "KETTLE_COMPATIBILITY_DB_LOOKUP_USE_FIELDS_RETURN_TYPE_CHOSEN_IN_UI";
// See PDI-PDI-18739 for details
public static final String KETTLE_COMPATIBILITY_TEXT_FILE_INPUT_USE_LENIENT_ENCLOSURE_HANDLING = "KETTLE_COMPATIBILITY_TEXT_FILE_INPUT_USE_LENIENT_ENCLOSURE_HANDLING";
// See PDI-18810 for details
public static final String KETTLE_COMPATIBILITY_MDI_INJECTED_FILE_ALWAYS_IN_FILESYSTEM = "KETTLE_COMPATIBILITY_MDI_INJECTED_FILE_ALWAYS_IN_FILESYSTEM";
// See PDI-19138 for details
public static final String KETTLE_JSON_INPUT_INCLUDE_NULLS = "KETTLE_JSON_INPUT_INCLUDE_NULLS";
/**
* This property when set to Y force the same output file even when splits is required.
* See PDI-19064 for details
*/
public static final String KETTLE_JSON_OUTPUT_FORCE_SAME_OUTPUT_FILE = "KETTLE_JSON_OUTPUT_FORCE_SAME_OUTPUT_FILE";
/**
* The XML file that contains the list of native import rules
*/
public static final String XML_FILE_KETTLE_IMPORT_RULES = "kettle-import-rules.xml";
private static String[] emptyPaddedSpacesStrings;
/**
* The release type of this compilation
*/
public static final ReleaseType RELEASE = ReleaseType.GA;
/**
* The system environment variable indicating where the alternative location for the Pentaho metastore folder is
* located.
*/
public static final String PENTAHO_METASTORE_FOLDER = "PENTAHO_METASTORE_FOLDER";
/**
* The name of the local client MetaStore
*
*/
public static final String PENTAHO_METASTORE_NAME = "Pentaho Local Client Metastore";
/**
* A variable to configure turning on/off detailed subjects in log.
*/
public static final String KETTLE_LOG_MARK_MAPPINGS = "KETTLE_LOG_MARK_MAPPINGS";
/**
* A variable to configure jetty option: acceptors for Carte
*/
public static final String KETTLE_CARTE_JETTY_ACCEPTORS = "KETTLE_CARTE_JETTY_ACCEPTORS";
/**
* A variable to configure jetty option: acceptQueueSize for Carte
*/
public static final String KETTLE_CARTE_JETTY_ACCEPT_QUEUE_SIZE = "KETTLE_CARTE_JETTY_ACCEPT_QUEUE_SIZE";
/**
* A variable to configure jetty option: lowResourcesMaxIdleTime for Carte
*/
public static final String KETTLE_CARTE_JETTY_RES_MAX_IDLE_TIME = "KETTLE_CARTE_JETTY_RES_MAX_IDLE_TIME";
/**
* A variable to configure refresh for carte job/trans status page
*/
public static final String KETTLE_CARTE_REFRESH_STATUS = "KETTLE_CARTE_REFRESH_STATUS";
/**
* A variable to configure s3vfs to use a temporary file on upload data to S3 Amazon."
*/
public static final String S3VFS_USE_TEMPORARY_FILE_ON_UPLOAD_DATA = "s3.vfs.useTempFileOnUploadData";
/**
* A variable to configure Tab size"
*/
public static final String KETTLE_MAX_TAB_LENGTH = "KETTLE_MAX_TAB_LENGTH";
/**
* A variable to log log info of logobjecttype GENERAL"
*/
public static final String KETTLE_LOG_GENERAL_OBJECTS_TO_DI_LOGGER = "KETTLE_LOG_GENERAL_OBJECTS_TO_DI_LOGGER";
/**
* A variable to configure VFS USER_DIR_IS_ROOT option: should be "true" or "false"
* {@linkplain org.apache.commons.vfs2.provider.sftp.SftpFileSystemConfigBuilder#USER_DIR_IS_ROOT}
*/
public static final String VFS_USER_DIR_IS_ROOT = "vfs.sftp.userDirIsRoot";
/**
* A variable to configure environment variables to ignore when initializing shell step
* */
public static final String SHELL_STEP_ENVIRONMENT_VARIABLES_TO_IGNORE = "SHELL_STEP_ENVIRONMENT_VARIABLES_TO_IGNORE";
/**
* The default value for the variable to configure environment variables to ignore when initializing shell step
* */
public static final String SHELL_STEP_ENVIRONMENT_VARIABLES_TO_IGNORE_DEFAULT = "";
/**
* <p>A variable to configure the minimum allowed ratio between de- and inflated bytes to detect a zipbomb.</p>
* <p>If not set or if the configured value is invalid, it defaults to {@value
* #KETTLE_ZIP_MIN_INFLATE_RATIO_DEFAULT}</p>
* <p>Check PDI-17586 for more details.</p>
*
* @see #KETTLE_ZIP_MIN_INFLATE_RATIO_DEFAULT
* @see #KETTLE_ZIP_MIN_INFLATE_RATIO_DEFAULT_STRING
*/
public static final String KETTLE_ZIP_MIN_INFLATE_RATIO = "KETTLE_ZIP_MIN_INFLATE_RATIO";
/**
* <p>The default value for the {@link #KETTLE_ZIP_MIN_INFLATE_RATIO} as a Double.</p>
* <p>Check PDI-17586 for more details.</p>
*
* @see #KETTLE_ZIP_MIN_INFLATE_RATIO
* @see #KETTLE_ZIP_MIN_INFLATE_RATIO_DEFAULT_STRING
*/
public static final Double KETTLE_ZIP_MIN_INFLATE_RATIO_DEFAULT = 0.01d;
/**
* <p>The default value for the {@link #KETTLE_ZIP_MIN_INFLATE_RATIO} as a String.</p>
* <p>Check PDI-17586 for more details.</p>
*
* @see #KETTLE_ZIP_MIN_INFLATE_RATIO
* @see #KETTLE_ZIP_MIN_INFLATE_RATIO_DEFAULT
*/
public static final String KETTLE_ZIP_MIN_INFLATE_RATIO_DEFAULT_STRING =
String.valueOf( KETTLE_ZIP_MIN_INFLATE_RATIO_DEFAULT );
/**
* <p>A variable to configure the maximum file size of a single zip entry.</p>
* <p>If not set or if the configured value is invalid, it defaults to {@value #KETTLE_ZIP_MAX_ENTRY_SIZE_DEFAULT}</p>
* <p>Check PDI-17586 for more details.</p>
*
* @see #KETTLE_ZIP_MAX_ENTRY_SIZE_DEFAULT
* @see #KETTLE_ZIP_MAX_ENTRY_SIZE_DEFAULT_STRING
*/
public static final String KETTLE_ZIP_MAX_ENTRY_SIZE = "KETTLE_ZIP_MAX_ENTRY_SIZE";
/**
* <p>The default value for the {@link #KETTLE_ZIP_MAX_ENTRY_SIZE} as a Long.</p>
* <p>Check PDI-17586 for more details.</p>
*
* @see #KETTLE_ZIP_MAX_ENTRY_SIZE
* @see #KETTLE_ZIP_MAX_ENTRY_SIZE_DEFAULT_STRING
*/
public static final Long KETTLE_ZIP_MAX_ENTRY_SIZE_DEFAULT = 0xFFFFFFFFL;
/**
* <p>The default value for the {@link #KETTLE_ZIP_MAX_ENTRY_SIZE} as a String.</p>
* <p>Check PDI-17586 for more details.</p>
*
* @see #KETTLE_ZIP_MAX_ENTRY_SIZE
* @see #KETTLE_ZIP_MAX_ENTRY_SIZE_DEFAULT
*/
public static final String KETTLE_ZIP_MAX_ENTRY_SIZE_DEFAULT_STRING =
String.valueOf( KETTLE_ZIP_MAX_ENTRY_SIZE_DEFAULT );
/**
* <p>A variable to configure the maximum number of characters of text that are extracted before an exception is
* thrown during extracting text from documents.</p>
* <p>If not set or if the configured value is invalid, it defaults to {@value #KETTLE_ZIP_MAX_TEXT_SIZE_DEFAULT}</p>
* <p>Check PDI-17586 for more details.</p>
*
* @see #KETTLE_ZIP_MAX_TEXT_SIZE_DEFAULT
* @see #KETTLE_ZIP_MAX_TEXT_SIZE_DEFAULT_STRING
*/
public static final String KETTLE_ZIP_MAX_TEXT_SIZE = "KETTLE_ZIP_MAX_TEXT_SIZE";
/**
* <p>The default value for the {@link #KETTLE_ZIP_MAX_TEXT_SIZE} as a Long.</p>
* <p>Check PDI-17586 for more details.</p>
*
* @see #KETTLE_ZIP_MAX_TEXT_SIZE
* @see #KETTLE_ZIP_MAX_TEXT_SIZE_DEFAULT_STRING
*/
public static final Long KETTLE_ZIP_MAX_TEXT_SIZE_DEFAULT = 10 * 1024 * 1024L;
/**
* <p>The default value for the {@link #KETTLE_ZIP_MAX_TEXT_SIZE} as a Long.</p>
* <p>Check PDI-17586 for more details.</p>
*
* @see #KETTLE_ZIP_MAX_TEXT_SIZE
* @see #KETTLE_ZIP_MAX_TEXT_SIZE_DEFAULT
*/
public static final String KETTLE_ZIP_MAX_TEXT_SIZE_DEFAULT_STRING =
String.valueOf( KETTLE_ZIP_MAX_TEXT_SIZE_DEFAULT );
/**
* <p>The default value for the {@link #KETTLE_ZIP_NEGATIVE_MIN_INFLATE} as a Double.</p>
* <p>Check PDI-18489 for more details.</p>
*/
public static final Double KETTLE_ZIP_NEGATIVE_MIN_INFLATE = -1.0d;
/**
* <p>This environment variable is used to define whether the check of xlsx zip bomb is performed. This is set to false by default.</p>
*/
public static final String KETTLE_XLSX_ZIP_BOMB_CHECK = "KETTLE_XLSX_ZIP_BOMB_CHECK";
private static final String KETTLE_XLSX_ZIP_BOMB_CHECK_DEFAULT = "false";
public static boolean checkXlsxZipBomb() {
String checkZipBomb = System.getProperty( KETTLE_XLSX_ZIP_BOMB_CHECK, KETTLE_XLSX_ZIP_BOMB_CHECK_DEFAULT );
return Boolean.valueOf( checkZipBomb );
}
/**
* <p>A variable to configure if the S3 input / output steps should use the Amazon Default Credentials Provider Chain
* even if access credentials are specified within the transformation.</p>
*/
public static final String KETTLE_USE_AWS_DEFAULT_CREDENTIALS = "KETTLE_USE_AWS_DEFAULT_CREDENTIALS";
/**
* <p>This environment variable is used by streaming consumer steps to limit the total of concurrent batches across transformations.</p>
*/
public static final String SHARED_STREAMING_BATCH_POOL_SIZE = "SHARED_STREAMING_BATCH_POOL_SIZE";
/**
* <p>This environment variable is used to specify a location used to deploy a shim driver into PDI.</p>
*/
public static final String SHIM_DRIVER_DEPLOYMENT_LOCATION = "SHIM_DRIVER_DEPLOYMENT_LOCATION";
private static final String DEFAULT_DRIVERS_DIR = "DEFAULT";
public static String getShimDriverDeploymentLocation() {
String driversLocation = System.getProperty( Const.SHIM_DRIVER_DEPLOYMENT_LOCATION, DEFAULT_DRIVERS_DIR );
if ( driversLocation.equals( DEFAULT_DRIVERS_DIR ) ) {
String karafDir = System.getProperty( "karaf.home" );
return Paths.get( karafDir ).getParent().getParent().toString() + File.separator + "drivers";
}
return driversLocation;
}
/**
* <p>This environment is used to specify how many attempts before failing to read an XML from within a Zip file
* while multy-thread execution and using XMLHandler.</p>
*/
public static final String KETTLE_RETRY_OPEN_XML_STREAM = "KETTLE_RETRY_OPEN_XML_STREAM";
/**
* <p>This environment variable is used by XSD validation steps to enable or disable external entities.</p>
* <p>By default external entities are allowed.</p>
*/
public static final String ALLOW_EXTERNAL_ENTITIES_FOR_XSD_VALIDATION = "ALLOW_EXTERNAL_ENTITIES_FOR_XSD_VALIDATION";
public static final String ALLOW_EXTERNAL_ENTITIES_FOR_XSD_VALIDATION_DEFAULT = "true";
/**
* <p>This environment variable is used to define the default division result precision between BigDecimals.</p>
* <p>By default, and when precision is -1, precision is unlimited.</p>
*/
public static final String KETTLE_BIGDECIMAL_DIVISION_PRECISION = "KETTLE_BIGDECIMAL_DIVISION_PRECISION";
public static final String KETTLE_BIGDECIMAL_DIVISION_PRECISION_DEFAULT = "-1";
/**
* <p>This environment variable is used to define the default division result rounding mode between BigDecimals.</p>
* <p>By default, rouding mode is half even.</p>
*/
public static final String KETTLE_BIGDECIMAL_DIVISION_ROUNDING_MODE = "KETTLE_BIGDECIMAL_DIVISION_ROUNDING_MODE";
public static final String KETTLE_BIGDECIMAL_DIVISION_ROUNDING_MODE_DEFAULT = "HALF_EVEN";
/**
* <p>This environment variable is used to define how Timestamp should be converted to a number and vice-versa.</p>
* <p>Three options exist:</p>
* <ul>
* <li>{@link #KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_LEGACY}: converting a Timestamp to a number uses
* milliseconds but converting a number to Timestamp assumes the value is in nanoseconds</li>
* <li>{@link #KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_MILLISECONDS}: both Timestamp to number and number to
* Timestamp use milliseconds</li>
* <li>{@link #KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_NANOSECONDS}: both Timestamp to number and number to
* Timestamp use nanoseconds</li>
* </ul>
* <p>The default is {@value #KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_DEFAULT}.</p>
*
* @see #KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_DEFAULT
* @see #KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_LEGACY
* @see #KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_MILLISECONDS
* @see #KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_NANOSECONDS
*/
public static final String KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE = "KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE";
/**
* <p>The value to use for setting the {@link #KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE} as it behaved on former
* versions.</p>
*
* @see #KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_MILLISECONDS
* @see #KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_NANOSECONDS
*/
public static final String KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_LEGACY = "LEGACY";
/**
* <p>The value to use for setting the {@link #KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE} to use milliseconds.</p>
*
* @see #KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_LEGACY
* @see #KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_NANOSECONDS
*/
public static final String KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_MILLISECONDS = "MILLISECONDS";
/**
* <p>The value to use for setting the {@link #KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE} to use nanoseconds.</p>
*
* @see #KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_LEGACY
* @see #KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_MILLISECONDS
*/
public static final String KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_NANOSECONDS = "NANOSECONDS";
/**
* <p>The default value for the {@link #KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE}.</p>
*
* @see #KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_LEGACY
* @see #KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_MILLISECONDS
* @see #KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_NANOSECONDS
*/
public static final String KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_DEFAULT =
KETTLE_TIMESTAMP_NUMBER_CONVERSION_MODE_LEGACY;
/**
* This environment variable will be used to determine whether file URI strings returned from input steps are returned
* encoded (spaces and other special characters escaped) or decoded (default legacy behavior).
*/
public static final String KETTLE_RETURN_ESCAPED_URI_STRINGS = "KETTLE_RETURN_ESCAPED_URI_STRINGS";
/**
* <p>This environment variable is used to define how which calculation method is to be used by the 'Add a Checksum'
* step.</p>
* <p>Three options exist:</p>
* <ul>
* <li>{@link #KETTLE_CHECKSUM_EVALUATION_METHOD_BYTES}: calculate Checksum based on Byte representation of
* fields; as in versions since 8.1</li>
* <li>{@link #KETTLE_CHECKSUM_EVALUATION_METHOD_PENTAHO_STRINGS}: calculate Checksum based on Pentaho String
* representation of fields (applying format masks); as in versions until 7.1</li>
* <li>{@link #KETTLE_CHECKSUM_EVALUATION_METHOD_NATIVE_STRINGS}: calculate Checksum based on Native String
* representation of fields; as in version 8.0</li>
* </ul>
* <p>The default is {@value #KETTLE_CHECKSUM_EVALUATION_METHOD_DEFAULT}.</p>
*
* @see #KETTLE_CHECKSUM_EVALUATION_METHOD_BYTES
* @see #KETTLE_CHECKSUM_EVALUATION_METHOD_PENTAHO_STRINGS
* @see #KETTLE_CHECKSUM_EVALUATION_METHOD_NATIVE_STRINGS
* @see #KETTLE_CHECKSUM_EVALUATION_METHOD_DEFAULT
*/
public static final String KETTLE_DEFAULT_CHECKSUM_EVALUATION_METHOD = "KETTLE_DEFAULT_CHECKSUM_EVALUATION_METHOD";
/**
* <p>The value to use for setting the {@link #KETTLE_DEFAULT_CHECKSUM_EVALUATION_METHOD}, so that Checksum is
* calculated based on Byte representation of fields. Calculation method used by version 8.1 and after.</p>
*
* @see #KETTLE_DEFAULT_CHECKSUM_EVALUATION_METHOD
* @see #KETTLE_CHECKSUM_EVALUATION_METHOD_PENTAHO_STRINGS
* @see #KETTLE_CHECKSUM_EVALUATION_METHOD_NATIVE_STRINGS
* @see #KETTLE_CHECKSUM_EVALUATION_METHOD_DEFAULT
*/
public static final String KETTLE_CHECKSUM_EVALUATION_METHOD_BYTES = "BYTES";
/**
* <p>The value to use for setting the {@link #KETTLE_DEFAULT_CHECKSUM_EVALUATION_METHOD}, so that Checksum is
* calculated based on Pentaho String representation of fields (applying format masks). Calculation method used by
* version 7.1 and prior versions.</p>
*
* @see #KETTLE_DEFAULT_CHECKSUM_EVALUATION_METHOD
* @see #KETTLE_CHECKSUM_EVALUATION_METHOD_BYTES
* @see #KETTLE_CHECKSUM_EVALUATION_METHOD_NATIVE_STRINGS
* @see #KETTLE_CHECKSUM_EVALUATION_METHOD_DEFAULT
*/
public static final String KETTLE_CHECKSUM_EVALUATION_METHOD_PENTAHO_STRINGS = "PENTAHO_STRINGS";
/**
* <p>The value to use for setting the {@link #KETTLE_DEFAULT_CHECKSUM_EVALUATION_METHOD}, so that Checksum is
* calculated based on Native String representation of fields. Calculation method used by version 8.0.</p>
*
* @see #KETTLE_DEFAULT_CHECKSUM_EVALUATION_METHOD
* @see #KETTLE_CHECKSUM_EVALUATION_METHOD_BYTES
* @see #KETTLE_CHECKSUM_EVALUATION_METHOD_PENTAHO_STRINGS
* @see #KETTLE_CHECKSUM_EVALUATION_METHOD_DEFAULT
*/
public static final String KETTLE_CHECKSUM_EVALUATION_METHOD_NATIVE_STRINGS = "NATIVE_STRINGS";
/**
* <p>The default value for the {@link #KETTLE_DEFAULT_CHECKSUM_EVALUATION_METHOD}.</p>
*
* @see #KETTLE_DEFAULT_CHECKSUM_EVALUATION_METHOD
* @see #KETTLE_CHECKSUM_EVALUATION_METHOD_BYTES
* @see #KETTLE_CHECKSUM_EVALUATION_METHOD_PENTAHO_STRINGS
* @see #KETTLE_CHECKSUM_EVALUATION_METHOD_NATIVE_STRINGS
*/
public static final String KETTLE_CHECKSUM_EVALUATION_METHOD_DEFAULT = KETTLE_CHECKSUM_EVALUATION_METHOD_BYTES;
/**
* <p>While one assumes that a day has 24 hours, due to daylight savings time settings, it may have 23 hours (the day
* Summer time goes into effect) or 25 hours (Winter time).</p>
* <p>Imagine Summer time: when clocks reach 1:00, it goes forward 1 hour to 2:00</p>
* <p>This means that, when adding 2 hours to 0:30, one gets 3:30</p>
* <p>By setting this environment variable to {@code "N"}, DateDiff performs calculations based on local time; as so,
* the difference between these two values (3:30 and 0:30) will be 3 hours difference.</p>
* <p>Setting this environment variable to {@code "Y"}, DateDiff performs calculations based on UTC time; as so, the
* difference between these two values (3:30 and 0:30) will be 2 hours difference.</p>
* <p>The default is {@value #KETTLE_DATEDIFF_DST_AWARE_DEFAULT}.</p>
*
* @see #KETTLE_DATEDIFF_DST_AWARE_DEFAULT
*/
public static final String KETTLE_DATEDIFF_DST_AWARE = "KETTLE_DATEDIFF_DST_AWARE";
/**
* <p>The default value for the {@link #KETTLE_DATEDIFF_DST_AWARE}.</p>
*
* @see #KETTLE_DATEDIFF_DST_AWARE
*/
public static final String KETTLE_DATEDIFF_DST_AWARE_DEFAULT = "N";
/**
* If true, kettle check for new site files to update in the named cluster every time a named cluster is resolved
*/
public static final String KETTLE_AUTO_UPDATE_SITE_FILE = "KETTLE_AUTO_UPDATE_SITE_FILE";
/**
* If true, use a cache when loading Trans/Job/Step metas vs reading from file system/repository for each load.
* Note: cache is currently broken; variable spaces do not get replaced upon loads of the same meta, so parameters
* that have changed do not get updated. This should be off by default.
*/
public static final String KETTLE_USE_META_FILE_CACHE = "KETTLE_USE_META_FILE_CACHE";
public static final String KETTLE_USE_META_FILE_CACHE_DEFAULT = "N";
/**
* rounds double f to any number of places after decimal point Does arithmetic using BigDecimal class to avoid integer
* overflow while rounding
*
* @param f
* The value to round
* @param places
* The number of decimal places
* @return The rounded floating point value
*/
public static double round( double f, int places ) {
return round( f, places, java.math.BigDecimal.ROUND_HALF_EVEN );
}
/**
* rounds double f to any number of places after decimal point Does arithmetic using BigDecimal class to avoid integer
* overflow while rounding
*
* @param f
* The value to round
* @param places
* The number of decimal places
* @param roundingMode
* The mode for rounding, e.g. java.math.BigDecimal.ROUND_HALF_EVEN
* @return The rounded floating point value
*/
public static double round( double f, int places, int roundingMode ) {
// We can't round non-numbers or infinite values
//
if ( Double.isNaN( f ) || f == Double.NEGATIVE_INFINITY || f == Double.POSITIVE_INFINITY ) {
return f;
}
// Do the rounding...
//
java.math.BigDecimal bdtemp = round( java.math.BigDecimal.valueOf( f ), places, roundingMode );
return bdtemp.doubleValue();
}
/**
* rounds BigDecimal f to any number of places after decimal point Does arithmetic using BigDecimal class to avoid
* integer overflow while rounding
*
* @param f
* The value to round
* @param places
* The number of decimal places
* @param roundingMode
* The mode for rounding, e.g. java.math.BigDecimal.ROUND_HALF_EVEN
* @return The rounded floating point value
*/
public static BigDecimal round( BigDecimal f, int places, int roundingMode ) {
if ( roundingMode == ROUND_HALF_CEILING ) {
if ( f.signum() >= 0 ) {
return round( f, places, BigDecimal.ROUND_HALF_UP );
} else {
return round( f, places, BigDecimal.ROUND_HALF_DOWN );
}
} else {
return f.setScale( places, roundingMode );
}
}
/**
* rounds long f to any number of places after decimal point Does arithmetic using BigDecimal class to avoid integer
* overflow while rounding
*
* @param f
* The value to round
* @param places
* The number of decimal places
* @param roundingMode
* The mode for rounding, e.g. java.math.BigDecimal.ROUND_HALF_EVEN
* @return The rounded floating point value
*/
public static long round( long f, int places, int roundingMode ) {
if ( places >= 0 ) {
return f;
}
BigDecimal bdtemp = round( BigDecimal.valueOf( f ), places, roundingMode );
return bdtemp.longValue();
}
/*
* OLD code: caused a lot of problems with very small and very large numbers. It's a miracle it worked at all. Go
* ahead, have a laugh... public static float round(double f, int places) { float temp = (float) (f *
* (Math.pow(10, places)));
*
* temp = (Math.round(temp));
*
* temp = temp / (int) (Math.pow(10, places));
*
* return temp;
*
* }
*/
/**
* Convert a String into an integer. If the conversion fails, assign a default value.
*
* @param str
* The String to convert to an integer
* @param def
* The default value
* @return The converted value or the default.
*/
public static int toInt( String str, int def ) {
int retval;
if ( str == null ) {
retval = def;
} else {
try {
retval = Integer.parseInt( str );
} catch ( Exception e ) {
retval = def;
}
}
return retval;
}
/**
* Convert a String into a long integer. If the conversion fails, assign a default value.
*
* @param str
* The String to convert to a long integer
* @param def
* The default value
* @return The converted value or the default.
*/
public static long toLong( String str, long def ) {
long retval;
if ( str == null ) {
retval = def;
} else {
try {
retval = Long.parseLong( str );
} catch ( Exception e ) {
retval = def;
}
}
return retval;
}
/**
* Convert a String into a double. If the conversion fails, assign a default value.
*
* @param str
* The String to convert to a double
* @param def
* The default value
* @return The converted value or the default.
*/
public static double toDouble( String str, double def ) {
double retval;
if ( str == null ) {
retval = def;
} else {
try {
retval = Double.parseDouble( str );
} catch ( Exception e ) {
retval = def;
}
}
return retval;
}
/**
* Convert a String into a date. The date format is <code>yyyy/MM/dd HH:mm:ss.SSS</code>. If the conversion fails,
* assign a default value.
*
* @param str
* The String to convert into a Date
* @param def
* The default value
* @return The converted value or the default.
*/
public static Date toDate( String str, Date def ) {
SimpleDateFormat df = new SimpleDateFormat( "yyyy/MM/dd HH:mm:ss.SSS", Locale.US );
try {
return df.parse( str );
} catch ( ParseException e ) {
return def;
}
}
/**
* Determines whether or not a character is considered a space. A character is considered a space in Kettle if it is a
* space, a tab, a newline or a cariage return.
*
* @param c
* The character to verify if it is a space.
* @return true if the character is a space. false otherwise.
*/
public static boolean isSpace( char c ) {
return c == ' ' || c == '\t' || c == '\r' || c == '\n' || Character.isWhitespace( c );
}
/**
* Left trim: remove spaces to the left of a String.
*
* @param source
* The String to left trim
* @return The left trimmed String
*/
public static String ltrim( String source ) {
if ( source == null ) {
return null;
}
int from = 0;
while ( from < source.length() && isSpace( source.charAt( from ) ) ) {
from++;
}
return source.substring( from );
}
/**
* Right trim: remove spaces to the right of a string
*
* @param source
* The string to right trim
* @return The trimmed string.
*/
public static String rtrim( String source ) {
if ( source == null ) {
return null;
}
int max = source.length();
while ( max > 0 && isSpace( source.charAt( max - 1 ) ) ) {
max--;
}
return source.substring( 0, max );
}
/**
* Trims a string: removes the leading and trailing spaces of a String.
*
* @param str
* The string to trim
* @return The trimmed string.
*/
public static String trim( String str ) {
if ( str == null ) {
return null;
}
int max = str.length() - 1;
int min = 0;
while ( min <= max && isSpace( str.charAt( min ) ) ) {
min++;
}
while ( max >= 0 && isSpace( str.charAt( max ) ) ) {
max--;
}
if ( max < min ) {
return "";
}
return str.substring( min, max + 1 );
}
/**
* Right pad a string: adds spaces to a string until a certain length. If the length is smaller then the limit
* specified, the String is truncated.
*
* @param ret
* The string to pad
* @param limit
* The desired length of the padded string.
* @return The padded String.
*/
public static String rightPad( String ret, int limit ) {
if ( ret == null ) {
return rightPad( new StringBuilder(), limit );
} else {
return rightPad( new StringBuilder( ret ), limit );
}
}
/**
* Right pad a StringBuffer: adds spaces to a string until a certain length. If the length is smaller then the limit
* specified, the String is truncated.
*
* MB - New version is nearly 25% faster
*
* @param ret
* The StringBuffer to pad
* @param limit
* The desired length of the padded string.
* @return The padded String.
*/
public static String rightPad( StringBuffer ret, int limit ) {
if ( ret != null ) {
while ( ret.length() < limit ) {
ret.append( " " );
}
ret.setLength( limit );
return ret.toString();
} else {
return null;
}
}
/**
* Right pad a StringBuilder: adds spaces to a string until a certain length. If the length is smaller then the limit
* specified, the String is truncated.
*
* MB - New version is nearly 25% faster
*
* @param ret
* The StringBuilder to pad
* @param limit
* The desired length of the padded string.
* @return The padded String.
*/
public static String rightPad( StringBuilder ret, int limit ) {
if ( ret != null ) {
while ( ret.length() < limit ) {
ret.append( " " );
}
ret.setLength( limit );
return ret.toString();
} else {
return null;
}
}
/**
* Replace values in a String with another.
*
* 33% Faster using replaceAll this way than original method
*
* @param string
* The original String.
* @param repl
* The text to replace
* @param with
* The new text bit
* @return The resulting string with the text pieces replaced.
*/
public static String replace( String string, String repl, String with ) {
if ( string != null && repl != null && with != null ) {
return string.replaceAll( Pattern.quote( repl ), Matcher.quoteReplacement( with ) );
} else {
return null;
}
}
/**
* Alternate faster version of string replace using a stringbuffer as input.
*
* 33% Faster using replaceAll this way than original method
*
* @param str
* The string where we want to replace in
* @param code
* The code to search for
* @param repl
* The replacement string for code
*/
public static void repl( StringBuffer str, String code, String repl ) {
if ( ( code == null ) || ( repl == null ) || ( code.length() == 0 ) || ( repl.length() == 0 ) || ( str == null ) || ( str.length() == 0 ) ) {
return; // do nothing
}
String aString = str.toString();
str.setLength( 0 );
str.append( aString.replaceAll( Pattern.quote( code ), Matcher.quoteReplacement( repl ) ) );
}
/**
* Alternate faster version of string replace using a stringbuilder as input (non-synchronized).
*
* 33% Faster using replaceAll this way than original method
*
* @param str
* The string where we want to replace in
* @param code
* The code to search for
* @param repl
* The replacement string for code
*/
public static void repl( StringBuilder str, String code, String repl ) {
if ( ( code == null ) || ( repl == null ) || ( str == null ) ) {
return; // do nothing
}
String aString = str.toString();
str.setLength( 0 );
str.append( aString.replaceAll( Pattern.quote( code ), Matcher.quoteReplacement( repl ) ) );
}
/**
* Count the number of spaces to the left of a text. (leading)
*
* @param field
* The text to examine
* @return The number of leading spaces found.
*/
public static int nrSpacesBefore( String field ) {
int nr = 0;
int len = field.length();
while ( nr < len && field.charAt( nr ) == ' ' ) {
nr++;
}
return nr;
}
/**
* Count the number of spaces to the right of a text. (trailing)
*
* @param field
* The text to examine
* @return The number of trailing spaces found.
*/
public static int nrSpacesAfter( String field ) {
int nr = 0;
int len = field.length();
while ( nr < len && field.charAt( field.length() - 1 - nr ) == ' ' ) {
nr++;
}
return nr;
}
/**
* Checks whether or not a String consists only of spaces.
*
* @param str
* The string to check
* @return true if the string has nothing but spaces.
*/
public static boolean onlySpaces( String str ) {
for ( int i = 0; i < str.length(); i++ ) {
if ( !isSpace( str.charAt( i ) ) ) {
return false;
}
}
return true;
}
/**
* determine the OS name
*
* @return The name of the OS
*/
public static String getOS() {
return System.getProperty( "os.name" );
}
/**
* Determine the quoting character depending on the OS. Often used for shell calls, gives back " for Windows systems
* otherwise '
*
* @return quoting character
*/
public static String getQuoteCharByOS() {
if ( isWindows() ) {
return "\"";
} else {
return "'";
}
}
/**
* Quote a string depending on the OS. Often used for shell calls.
*
* @return quoted string
*/
public static String optionallyQuoteStringByOS( String string ) {
String quote = getQuoteCharByOS();
if ( Utils.isEmpty( string ) ) {
return quote;
}
// If the field already contains quotes, we don't touch it anymore, just
// return the same string...
// also return it if no spaces are found
if ( string.indexOf( quote ) >= 0 || ( string.indexOf( ' ' ) < 0 && string.indexOf( '=' ) < 0 ) ) {
return string;
} else {
return quote + string + quote;
}
}
/**
* @return True if the OS is a Windows derivate.
*/
public static boolean isWindows() {
return getOS().startsWith( "Windows" );
}
/**
* @return True if the OS is a Linux derivate.
*/
public static boolean isLinux() {
return getOS().startsWith( "Linux" );
}
/**
* @return True if the OS is an OSX derivate.
*/
public static boolean isOSX() {
return getOS().toUpperCase().contains( "OS X" );
}
/**
* @return True if KDE is in use.
*/
public static boolean isKDE() {
return StringUtils.isNotBlank( System.getenv( "KDE_SESSION_VERSION" ) );
}
private static String cachedHostname;
/**
* Determine the hostname of the machine Kettle is running on
*
* @return The hostname
*/
public static String getHostname() {
if ( cachedHostname != null ) {
return cachedHostname;
}
// In case we don't want to leave anything to doubt...
//
String systemHostname = EnvUtil.getSystemProperty( KETTLE_SYSTEM_HOSTNAME );
if ( !Utils.isEmpty( systemHostname ) ) {
cachedHostname = systemHostname;
return systemHostname;
}
String lastHostname = "localhost";
try {
Enumeration<NetworkInterface> en = NetworkInterface.getNetworkInterfaces();
while ( en.hasMoreElements() ) {
NetworkInterface nwi = en.nextElement();
Enumeration<InetAddress> ip = nwi.getInetAddresses();
while ( ip.hasMoreElements() ) {
InetAddress in = ip.nextElement();
boolean hasNewHostName = !lastHostname.equalsIgnoreCase( "localhost" );
if ( InetAddressUtils.isIPv4Address( in.getHostAddress() ) || !hasNewHostName ) {
lastHostname = in.getHostName();
}
if ( hasNewHostName && lastHostname.indexOf( ':' ) < 0 ) {
break;
}
}
}
} catch ( SocketException e ) {
// Eat exception, just return what you have
}
cachedHostname = lastHostname;
return lastHostname;
}
/**
* Determine the hostname of the machine Kettle is running on
*
* @return The hostname
*/
public static String getHostnameReal() {
// In case we don't want to leave anything to doubt...
//
String systemHostname = EnvUtil.getSystemProperty( KETTLE_SYSTEM_HOSTNAME );
if ( !Utils.isEmpty( systemHostname ) ) {
return systemHostname;
}
if ( isWindows() ) {
// Windows will always set the 'COMPUTERNAME' variable
return System.getenv( "COMPUTERNAME" );
} else {
// If it is not Windows then it is most likely a Unix-like operating system
// such as Solaris, AIX, HP-UX, Linux or MacOS.
// Most modern shells (such as Bash or derivatives) sets the
// HOSTNAME variable so lets try that first.
String hostname = System.getenv( "HOSTNAME" );
if ( hostname != null ) {
return hostname;
} else {
BufferedReader br;
try {
Process pr = Runtime.getRuntime().exec( "hostname" );
br = new BufferedReader( new InputStreamReader( pr.getInputStream() ) );
String line;
if ( ( line = br.readLine() ) != null ) {
return line;
}
pr.waitFor();
br.close();
} catch ( IOException e ) {
return getHostname();
} catch ( InterruptedException e ) {
return getHostname();
}
}
}
return getHostname();
}
/**
* Determins the IP address of the machine Kettle is running on.
*
* @return The IP address
*/
public static String getIPAddress() throws Exception {
Enumeration<NetworkInterface> enumInterfaces = NetworkInterface.getNetworkInterfaces();
while ( enumInterfaces.hasMoreElements() ) {
NetworkInterface nwi = enumInterfaces.nextElement();
Enumeration<InetAddress> ip = nwi.getInetAddresses();
while ( ip.hasMoreElements() ) {
InetAddress in = ip.nextElement();
if ( !in.isLoopbackAddress() && in.toString().indexOf( ":" ) < 0 ) {
return in.getHostAddress();
}
}
}
return "127.0.0.1";
}
/**
* Get the primary IP address tied to a network interface (excluding loop-back etc)
*
* @param networkInterfaceName
* the name of the network interface to interrogate
* @return null if the network interface or address wasn't found.
*
* @throws SocketException
* in case of a security or network error
*/
public static String getIPAddress( String networkInterfaceName ) throws SocketException {
NetworkInterface networkInterface = NetworkInterface.getByName( networkInterfaceName );
Enumeration<InetAddress> ipAddresses = networkInterface.getInetAddresses();
while ( ipAddresses.hasMoreElements() ) {
InetAddress inetAddress = ipAddresses.nextElement();
if ( !inetAddress.isLoopbackAddress() && inetAddress.toString().indexOf( ":" ) < 0 ) {
String hostname = inetAddress.getHostAddress();
return hostname;
}
}
return null;
}
/**
* Tries to determine the MAC address of the machine Kettle is running on.
*
* @return The MAC address.
*/
public static String getMACAddress() throws Exception {
String ip = getIPAddress();
String mac = "none";
String os = getOS();
String s = "";
@SuppressWarnings( "unused" )
Boolean errorOccured = false;
// System.out.println("os = "+os+", ip="+ip);
if ( os.equalsIgnoreCase( "Windows NT" )
|| os.equalsIgnoreCase( "Windows 2000" ) || os.equalsIgnoreCase( "Windows XP" )
|| os.equalsIgnoreCase( "Windows 95" ) || os.equalsIgnoreCase( "Windows 98" )
|| os.equalsIgnoreCase( "Windows Me" ) || os.startsWith( "Windows" ) ) {
try {
// System.out.println("EXEC> nbtstat -a "+ip);
Process p = Runtime.getRuntime().exec( "nbtstat -a " + ip );
// read the standard output of the command
BufferedReader stdInput = new BufferedReader( new InputStreamReader( p.getInputStream() ) );
while ( !procDone( p ) ) {
while ( ( s = stdInput.readLine() ) != null ) {
// System.out.println("NBTSTAT> "+s);
if ( s.indexOf( "MAC" ) >= 0 ) {
int idx = s.indexOf( '=' );
mac = s.substring( idx + 2 );
}
}
}
stdInput.close();
} catch ( Exception e ) {
errorOccured = true;
}
} else if ( os.equalsIgnoreCase( "Linux" ) ) {
try {
Process p = Runtime.getRuntime().exec( "/sbin/ifconfig -a" );
// read the standard output of the command
BufferedReader stdInput = new BufferedReader( new InputStreamReader( p.getInputStream() ) );
while ( !procDone( p ) ) {
while ( ( s = stdInput.readLine() ) != null ) {
int idx = s.indexOf( "HWaddr" );
if ( idx >= 0 ) {
mac = s.substring( idx + 7 );
}
}
}
stdInput.close();
} catch ( Exception e ) {
errorOccured = true;
}
} else if ( os.equalsIgnoreCase( "Solaris" ) ) {
try {
Process p = Runtime.getRuntime().exec( "/usr/sbin/ifconfig -a" );
// read the standard output of the command
BufferedReader stdInput = new BufferedReader( new InputStreamReader( p.getInputStream() ) );
while ( !procDone( p ) ) {
while ( ( s = stdInput.readLine() ) != null ) {
int idx = s.indexOf( "ether" );
if ( idx >= 0 ) {
mac = s.substring( idx + 6 );
}
}
}
stdInput.close();
} catch ( Exception e ) {
errorOccured = true;
}
} else if ( os.equalsIgnoreCase( "HP-UX" ) ) {
try {
Process p = Runtime.getRuntime().exec( "/usr/sbin/lanscan -a" );
// read the standard output of the command
BufferedReader stdInput = new BufferedReader( new InputStreamReader( p.getInputStream() ) );
while ( !procDone( p ) ) {
while ( ( s = stdInput.readLine() ) != null ) {
if ( s.indexOf( "MAC" ) >= 0 ) {
int idx = s.indexOf( "0x" );
mac = s.substring( idx + 2 );
}
}
}
stdInput.close();
} catch ( Exception e ) {
errorOccured = true;
}
}
// should do something if we got an error processing!
return Const.trim( mac );
}
private static final boolean procDone( Process p ) {
try {
p.exitValue();
return true;
} catch ( IllegalThreadStateException e ) {
return false;
}
}
/**
* Determines if the RUNNING_ON_WEBSPOON_MODE flag is set and returns its boolean value.
* This is per user-basis.
*
* @return Boolean signalig the use of Webspoon mode.
*/
public static boolean isRunningOnWebspoonMode() {
return Boolean.parseBoolean( NVL( System.getenv( "RUNNING_ON_WEBSPOON_MODE" ), NVL( System.getProperty( "RUNNING_ON_WEBSPOON_MODE" ),
"false" ) ) );
}
/**
* Looks up the user's home directory (or KETTLE_HOME) for every invocation. This is no longer a static property so
* the value may be set after this class is loaded.
*
* @return The path to the users home directory, or the System property {@code KETTLE_HOME} if set.
*/
public static String getUserHomeDirectory() {
return NVL( System.getenv( "KETTLE_HOME" ), NVL( System.getProperty( "KETTLE_HOME" ),
System.getProperty( "user.home" ) ) );
}
/**
* Determines the Kettle absolute directory in the user's home directory.
*
* @return The Kettle absolute directory.
*/
public static String getKettleDirectory() {
return getUserHomeDirectory() + FILE_SEPARATOR + getUserBaseDir();
}
/**
* Determines the Kettle user data directory in the user's home directory.
* This is per user-basis.
*
* @return The Kettle user data directory.
*/
public static String getUserDataDirectory() {
String dataDir = getKettleDirectory() + Const.FILE_SEPARATOR + "data";
return NVL( System.getenv( "WEBSPOON_USER_HOME" ), NVL( System.getProperty( "WEBSPOON_USER_HOME" ),
dataDir ) );
}
/**
* Determines the Kettle directory in the user's home directory.
*
* @return The Kettle directory.
*/
public static String getUserBaseDir() {
return BasePropertyHandler.getProperty( "userBaseDir", ".kettle" );
}
/**
* Returns the value of DI_HOME.
*/
public static String getDIHomeDirectory() {
return System.getProperty( "DI_HOME" );
}
/**
* Determines the location of the shared objects file
*
* @return the name of the shared objects file
*/
public static String getSharedObjectsFile() {
return getKettleDirectory() + FILE_SEPARATOR + SHARED_DATA_FILE;
}
/**
* Returns the path to the Kettle local (current directory) repositories XML file.
*
* @return The local repositories file.
*/
public static String getKettleLocalRepositoriesFile() {
return "repositories.xml";
}
/**
* Returns the full path to the Kettle repositories XML file.
*
* @return The Kettle repositories file.
*/
public static String getKettleUserRepositoriesFile() {
return getKettleDirectory() + FILE_SEPARATOR + getKettleLocalRepositoriesFile();
}
/**
* Returns the full path to the Kettle properties XML file.
*
* @return The Kettle properties file.
*/
public static String getKettlePropertiesFilename() {
return Const.getKettleDirectory() + FILE_SEPARATOR + Const.KETTLE_PROPERTIES;
}
/**
* Returns the path to the Kettle local (current directory) Carte password file:
* <p>
* ./pwd/kettle.pwd<br>
*
* @return The local Carte password file.
*/
public static String getKettleLocalCartePasswordFile() {
return "pwd/kettle.pwd";
}
/**
* Returns the path to the Kettle Carte password file in the home directory:
* <p>
* $KETTLE_HOME/.kettle/kettle.pwd<br>
*
* @return The Carte password file in the home directory.
*/
public static String getKettleCartePasswordFile() {
return getKettleDirectory() + FILE_SEPARATOR + "kettle.pwd";
}
/**
* Provides the base documentation url (top-level help)
*
* @return the fully qualified base documentation URL
*/
public static String getBaseDocUrl() {
return BaseMessages.getString( PKG, "Const.BaseDocUrl" );
}
/**
* Provides the base wiki documentation url (top-level pentaho-community)
*
* @return the fully qualified base wiki documentation URL
*/
public static String getBaseWikiDocUrl() {
return BaseMessages.getString( PKG, "Const.BaseWikiDocUrl" );
}
/**
* Provides the documentation url with the configured base + the given URI.
*
* @param uri
* the resource identifier for the documentation
* (eg. Products/Data_Integration/Data_Integration_Perspective/050/000)
*
* @return the fully qualified documentation URL for the given URI
*/
public static String getDocUrl( final String uri ) {
// if the uri is not empty, use it to build the URL
if ( !Utils.isEmpty( uri ) ) {
if ( uri.startsWith( WIKI_URL ) ) {
// if the link is to the old wiki replace it with the new wiki
String docWikiUrl = getBaseWikiDocUrl();
String wikiURL = uri.substring( WIKI_URL.length() );
return ( wikiURL.startsWith( "/" ) ? docWikiUrl + wikiURL.substring( 1 ) : docWikiUrl + wikiURL );
} else if ( uri.startsWith( "http" ) ) {
// use what is provided, it's already absolute
return uri;
} else {
// the uri provided needs to be assembled
String baseDocURL = getBaseDocUrl();
return ( uri.startsWith( "/" ) ? baseDocURL + uri.substring( 1 ) : baseDocURL + uri );
}
}
return getBaseDocUrl();
}
/**
* Retrieves the content of an environment variable
*
* @param variable
* The name of the environment variable
* @param deflt
* The default value in case no value was found
* @return The value of the environment variable or the value of deflt in case no variable was defined.
*/
public static String getEnvironmentVariable( String variable, String deflt ) {
return System.getProperty( variable, deflt );
}
/**
* Replaces environment variables in a string. For example if you set KETTLE_HOME as an environment variable, you can
* use %%KETTLE_HOME%% in dialogs etc. to refer to this value. This procedures looks for %%...%% pairs and replaces
* them including the name of the environment variable with the actual value. In case the variable was not set,
* nothing is replaced!
*
* @param string
* The source string where text is going to be replaced.
*
* @return The expanded string.
* @deprecated use StringUtil.environmentSubstitute(): handles both Windows and unix conventions
*/
@Deprecated
public static String replEnv( String string ) {
if ( string == null ) {
return null;
}
StringBuilder str = new StringBuilder( string );
int idx = str.indexOf( "%%" );
while ( idx >= 0 ) {
// OK, so we found a marker, look for the next one...
int to = str.indexOf( "%%", idx + 2 );
if ( to >= 0 ) {
// OK, we found the other marker also...
String marker = str.substring( idx, to + 2 );
String var = str.substring( idx + 2, to );
if ( var != null && var.length() > 0 ) {
// Get the environment variable
String newval = getEnvironmentVariable( var, null );
if ( newval != null ) {
// Replace the whole bunch
str.replace( idx, to + 2, newval );
// The last position has changed...
to += newval.length() - marker.length();
}
}
} else {
// We found the start, but NOT the ending %% without closing %%
to = idx;
}
// Look for the next variable to replace...
idx = str.indexOf( "%%", to + 1 );
}
return str.toString();
}
/**
* Replaces environment variables in an array of strings.
* <p>
* See also: replEnv(String string)
*
* @param string
* The array of strings that wants its variables to be replaced.
* @return the array with the environment variables replaced.
* @deprecated please use StringUtil.environmentSubstitute now.
*/
@Deprecated
public static String[] replEnv( String[] string ) {
String[] retval = new String[string.length];
for ( int i = 0; i < string.length; i++ ) {
retval[i] = Const.replEnv( string[i] );
}
return retval;
}
/**
* Implements Oracle style NVL function
*
* @param source
* The source argument
* @param def
* The default value in case source is null or the length of the string is 0
* @return source if source is not null, otherwise return def
*/
public static String NVL( String source, String def ) {
if ( source == null || source.length() == 0 ) {
return def;
}
return source;
}
/**
* Return empty string "" in case the given parameter is null, otherwise return the same value.
*
* @param source
* The source value to check for null.
* @return empty string if source is null, otherwise simply return the source value.
*/
public static String nullToEmpty( String source ) {
if ( source == null ) {
return EMPTY_STRING;
}
return source;
}
/**
* Search for a string in an array of strings and return the index.
*
* @param lookup
* The string to search for
* @param array
* The array of strings to look in
* @return The index of a search string in an array of strings. -1 if not found.
*/
public static int indexOfString( String lookup, String[] array ) {
if ( array == null ) {
return -1;
}
if ( lookup == null ) {
return -1;
}
for ( int i = 0; i < array.length; i++ ) {
if ( lookup.equalsIgnoreCase( array[i] ) ) {
return i;
}
}
return -1;
}
/**
* Search for strings in an array of strings and return the indexes.
*
* @param lookup
* The strings to search for
* @param array
* The array of strings to look in
* @return The indexes of strings in an array of strings. -1 if not found.
*/
public static int[] indexsOfStrings( String[] lookup, String[] array ) {
int[] indexes = new int[lookup.length];
for ( int i = 0; i < indexes.length; i++ ) {
indexes[i] = indexOfString( lookup[i], array );
}
return indexes;
}
/**
* Search for strings in an array of strings and return the indexes. If a string is not found, the index is not
* returned.
*
* @param lookup
* The strings to search for
* @param array
* The array of strings to look in
* @return The indexes of strings in an array of strings. Only existing indexes are returned (no -1)
*/
public static int[] indexsOfFoundStrings( String[] lookup, String[] array ) {
List<Integer> indexesList = new ArrayList<>();
for ( int i = 0; i < lookup.length; i++ ) {
int idx = indexOfString( lookup[i], array );
if ( idx >= 0 ) {
indexesList.add( Integer.valueOf( idx ) );
}
}
int[] indexes = new int[indexesList.size()];
for ( int i = 0; i < indexesList.size(); i++ ) {
indexes[i] = ( indexesList.get( i ) ).intValue();
}
return indexes;
}
/**
* Search for a string in a list of strings and return the index.
*
* @param lookup
* The string to search for
* @param list
* The ArrayList of strings to look in
* @return The index of a search string in an array of strings. -1 if not found.
*/
public static int indexOfString( String lookup, List<String> list ) {
if ( list == null ) {
return -1;
}
for ( int i = 0; i < list.size(); i++ ) {
String compare = list.get( i );
if ( lookup.equalsIgnoreCase( compare ) ) {
return i;
}
}
return -1;
}
/**
* Sort the strings of an array in alphabetical order.
*
* @param input
* The array of strings to sort.
* @return The sorted array of strings.
*/
public static String[] sortStrings( String[] input ) {
Arrays.sort( input );
return input;
}
/**
* Convert strings separated by a string into an array of strings.
* <p>
* <code>
Example: a;b;c;d ==> new String[] { a, b, c, d }
* </code>
*
* <p>
* <b>NOTE: this differs from String.split() in a way that the built-in method uses regular expressions and this one
* does not.</b>
*
* @param string
* The string to split
* @param separator
* The separator used.
* @return the string split into an array of strings
*/
public static String[] splitString( String string, String separator ) {
/*
* 0123456 Example a;b;c;d --> new String[] { a, b, c, d }
*/
// System.out.println("splitString ["+path+"] using ["+separator+"]");
List<String> list = new ArrayList<>();
if ( string == null || string.length() == 0 ) {
return new String[] {};
}
int sepLen = separator.length();
int from = 0;
int end = string.length() - sepLen + 1;
for ( int i = from; i < end; i += sepLen ) {
if ( string.substring( i, i + sepLen ).equalsIgnoreCase( separator ) ) {
// OK, we found a separator, the string to add to the list
// is [from, i[
list.add( nullToEmpty( string.substring( from, i ) ) );
from = i + sepLen;
}
}
// Wait, if the string didn't end with a separator, we still have information at the end of the string...
// In our example that would be "d"...
if ( from + sepLen <= string.length() ) {
list.add( nullToEmpty( string.substring( from, string.length() ) ) );
}
return list.toArray( new String[list.size()] );
}
/**
* Convert strings separated by a character into an array of strings.
* <p>
* <code>
Example: a;b;c;d == new String[] { a, b, c, d }
* </code>
*
* @param string
* The string to split
* @param separator
* The separator used.
* @return the string split into an array of strings
*/
public static String[] splitString( String string, char separator ) {
return splitString( string, separator, false );
}
/**
* Convert strings separated by a character into an array of strings.
* <p>
* <code>
Example: a;b;c;d == new String[] { a, b, c, d }
* </code>
*
* @param string
* The string to split
* @param separator
* The separator used.
* @param escape
* in case the separator can be escaped (\;) The escape characters are NOT removed!
* @return the string split into an array of strings
*/
public static String[] splitString( String string, char separator, boolean escape ) {
/*
* 0123456 Example a;b;c;d --> new String[] { a, b, c, d }
*/
// System.out.println("splitString ["+path+"] using ["+separator+"]");
List<String> list = new ArrayList<>();
if ( string == null || string.length() == 0 ) {
return new String[] {};
}
int from = 0;
int end = string.length();
for ( int i = from; i < end; i += 1 ) {
boolean found = string.charAt( i ) == separator;
if ( found && escape && i > 0 ) {
found &= string.charAt( i - 1 ) != '\\';
}
if ( found ) {
// OK, we found a separator, the string to add to the list
// is [from, i[
list.add( nullToEmpty( string.substring( from, i ) ) );
from = i + 1;
}
}
// Wait, if the string didn't end with a separator, we still have information at the end of the string...
// In our example that would be "d"...
if ( from + 1 <= string.length() ) {
list.add( nullToEmpty( string.substring( from, string.length() ) ) );
}
return list.toArray( new String[list.size()] );
}
/**
* Convert strings separated by a string into an array of strings.
* <p>
* <code>
* Example /a/b/c --> new String[] { a, b, c }
* </code>
*
* @param path
* The string to split
* @param separator
* The separator used.
* @return the string split into an array of strings
*/
public static String[] splitPath( String path, String separator ) {
//
// Example /a/b/c --> new String[] { a, b, c }
//
// Make sure training slashes are removed
//
// Example /a/b/c/ --> new String[] { a, b, c }
//
// Check for empty paths...
//
if ( path == null || path.length() == 0 || path.equals( separator ) ) {
return new String[] {};
}
// lose trailing separators
//
while ( path.endsWith( separator ) ) {
path = path.substring( 0, path.length() - 1 );
}
int sepLen = separator.length();
int nr_separators = 1;
int from = path.startsWith( separator ) ? sepLen : 0;
for ( int i = from; i < path.length(); i += sepLen ) {
if ( path.substring( i, i + sepLen ).equalsIgnoreCase( separator ) ) {
nr_separators++;
}
}
String[] spath = new String[nr_separators];
int nr = 0;
for ( int i = from; i < path.length(); i += sepLen ) {
if ( path.substring( i, i + sepLen ).equalsIgnoreCase( separator ) ) {
spath[nr] = path.substring( from, i );
nr++;
from = i + sepLen;
}
}
if ( nr < spath.length ) {
spath[nr] = path.substring( from );
}
//
// a --> { a }
//
if ( spath.length == 0 && path.length() > 0 ) {
spath = new String[] { path };
}
return spath;
}
/**
* Split the given string using the given delimiter and enclosure strings.
*
* The delimiter and enclosures are not regular expressions (regexes); rather they are literal strings that will be
* quoted so as not to be treated like regexes.
*
* This method expects that the data contains an even number of enclosure strings in the input; otherwise the results
* are undefined
*
* @param stringToSplit
* the String to split
* @param delimiter
* the delimiter string
* @param enclosure
* the enclosure string
* @return an array of strings split on the delimiter (ignoring those in enclosures), or null if the string to split
* is null.
*/
public static String[] splitString( String stringToSplit, String delimiter, String enclosure ) {
return splitString( stringToSplit, delimiter, enclosure, false );
}
/**
* Split the given string using the given delimiter and enclosure strings.
*
* The delimiter and enclosures are not regular expressions (regexes); rather they are literal strings that will be
* quoted so as not to be treated like regexes.
*
* This method expects that the data contains an even number of enclosure strings in the input; otherwise the results
* are undefined
*
* @param stringToSplit
* the String to split
* @param delimiter
* the delimiter string
* @param enclosure
* the enclosure string
* @param removeEnclosure
* removes enclosure from split result
* @return an array of strings split on the delimiter (ignoring those in enclosures), or null if the string to split
* is null.
*/
public static String[] splitString( String stringToSplit, String delimiter, String enclosure, boolean removeEnclosure ) {
ArrayList<String> splitList = null;
// Handle "bad input" cases
if ( stringToSplit == null ) {
return null;
}
if ( delimiter == null ) {
return ( new String[] { stringToSplit } );
}
// Split the string on the delimiter, we'll build the "real" results from the partial results
String[] delimiterSplit = stringToSplit.split( Pattern.quote( delimiter ) );
// At this point, if the enclosure is null or empty, we will return the delimiter split
if ( Utils.isEmpty( enclosure ) ) {
return delimiterSplit;
}
// Keep track of partial splits and concatenate them into a legit split
StringBuilder concatSplit = null;
if ( delimiterSplit != null && delimiterSplit.length > 0 ) {
// We'll have at least one result so create the result list object
splitList = new ArrayList<>();
// Proceed through the partial splits, concatenating if the splits are within the enclosure
for ( String currentSplit : delimiterSplit ) {
if ( !currentSplit.contains( enclosure ) ) {
// If we are currently concatenating a split, we are inside an enclosure. Since this
// split doesn't contain an enclosure, we can concatenate it (with a delimiter in front).
// If we're not concatenating, the split is fine so add it to the result list.
if ( concatSplit != null ) {
concatSplit.append( delimiter );
concatSplit.append( currentSplit );
} else {
splitList.add( currentSplit );
}
} else {
// Find number of enclosures in the split, and whether that number is odd or even.
int numEnclosures = StringUtils.countMatches( currentSplit, enclosure );
boolean oddNumberOfEnclosures = ( numEnclosures % 2 != 0 );
boolean addSplit = false;
// This split contains an enclosure, so either start or finish concatenating
if ( concatSplit == null ) {
concatSplit = new StringBuilder( currentSplit ); // start concatenation
addSplit = !oddNumberOfEnclosures;
} else {
// Check to make sure a new enclosure hasn't started within this split. This method expects
// that there are no non-delimiter characters between a delimiter and a starting enclosure.
// At this point in the code, the split shouldn't start with the enclosure, so add a delimiter
concatSplit.append( delimiter );
// Add the current split to the concatenated split
concatSplit.append( currentSplit );
// If the number of enclosures is odd, the enclosure is closed so add the split to the list
// and reset the "concatSplit" buffer. Otherwise continue
addSplit = oddNumberOfEnclosures;
}
// Check if enclosure is also using inside data
if ( addSplit || numEnclosures > 2 ) {
String splitResult = concatSplit.toString();
//remove enclosure from resulting split
if ( removeEnclosure ) {
splitResult = removeEnclosure( splitResult, enclosure );
}
splitList.add( splitResult );
concatSplit = null;
addSplit = false;
}
}
}
}
// Return list as array
return splitList.toArray( new String[splitList.size()] );
}
private static final String WIKI_URL = "http://wiki.pentaho.com";
private static String removeEnclosure( String stringToSplit, String enclosure ) {
int firstIndex = stringToSplit.indexOf( enclosure );
int lastIndex = stringToSplit.lastIndexOf( enclosure );
if ( firstIndex == lastIndex ) {
return stringToSplit;
}
StrBuilder strBuilder = new StrBuilder( stringToSplit );
strBuilder.replace( firstIndex, enclosure.length() + firstIndex, "" );
strBuilder.replace( lastIndex - enclosure.length(), lastIndex, "" );
return strBuilder.toString();
}
/**
* Sorts the array of Strings, determines the uniquely occurring strings.
*
* @param strings
* the array that you want to do a distinct on
* @return a sorted array of uniquely occurring strings
*/
public static String[] getDistinctStrings( String[] strings ) {
if ( strings == null ) {
return null;
}
if ( strings.length == 0 ) {
return new String[] {};
}
String[] sorted = sortStrings( strings );
List<String> result = new ArrayList<>();
String previous = "";
for ( int i = 0; i < sorted.length; i++ ) {
if ( !sorted[i].equalsIgnoreCase( previous ) ) {
result.add( sorted[i] );
}
previous = sorted[i];
}
return result.toArray( new String[result.size()] );
}
/**
* Returns a string of the stack trace of the specified exception
*/
public static String getStackTracker( Throwable e ) {
return getClassicStackTrace( e );
}
public static String getClassicStackTrace( Throwable e ) {
StringWriter stringWriter = new StringWriter();
PrintWriter printWriter = new PrintWriter( stringWriter );
e.printStackTrace( printWriter );
String string = stringWriter.toString();
try {
stringWriter.close();
} catch ( IOException ioe ) {
// is this really required?
}
return string;
}
public static String getCustomStackTrace( Throwable aThrowable ) {
final StringBuilder result = new StringBuilder();
String errorMessage = aThrowable.toString();
result.append( errorMessage );
if ( !errorMessage.contains( Const.CR ) ) {
result.append( CR );
}
// add each element of the stack trace
//
for ( StackTraceElement element : aThrowable.getStackTrace() ) {
result.append( element );
result.append( CR );
}
return result.toString();
}
/**
* Check if the string supplied is empty. A String is empty when it is null or when the length is 0
*
* @param val
* The value to check
* @return true if the string supplied is empty
* @deprecated
* @see org.pentaho.di.core.util.Utils#isEmpty(CharSequence)
*/
@Deprecated
public static boolean isEmpty( String val ) {
return Utils.isEmpty( val );
}
/**
* Check if the stringBuffer supplied is empty. A StringBuffer is empty when it is null or when the length is 0
*
* @param val
* The stringBuffer to check
* @return true if the stringBuffer supplied is empty
* @deprecated
* @see org.pentaho.di.core.util.Utils#isEmpty(CharSequence)
*/
@Deprecated
public static boolean isEmpty( StringBuffer val ) {
return Utils.isEmpty( val );
}
/**
* Check if the string array supplied is empty. A String array is empty when it is null or when the number of elements
* is 0
*
* @param vals
* The string array to check
* @return true if the string array supplied is empty
* @deprecated
* @see org.pentaho.di.core.util.Utils#isEmpty(CharSequence[])
*/
@Deprecated
public static boolean isEmpty( String[] vals ) {
return Utils.isEmpty( vals );
}
/**
* Check if the CharSequence supplied is empty. A CharSequence is empty when it is null or when the length is 0
*
* @param val
* The stringBuffer to check
* @return true if the stringBuffer supplied is empty
* @deprecated
* @see org.pentaho.di.core.util.Utils#isEmpty(CharSequence)
*/
@Deprecated
public static boolean isEmpty( CharSequence val ) {
return Utils.isEmpty( val );
}
/**
* Check if the CharSequence array supplied is empty. A CharSequence array is empty when it is null or when the number of elements
* is 0
*
* @param vals
* The string array to check
* @return true if the string array supplied is empty
* @deprecated
* @see org.pentaho.di.core.util.Utils#isEmpty(CharSequence[])
*/
@Deprecated
public static boolean isEmpty( CharSequence[] vals ) {
return Utils.isEmpty( vals );
}
/**
* Check if the array supplied is empty. An array is empty when it is null or when the length is 0
*
* @param array
* The array to check
* @return true if the array supplied is empty
* @deprecated
* @see org.pentaho.di.core.util.Utils#isEmpty(Object[])
*/
@Deprecated
public static boolean isEmpty( Object[] array ) {
return Utils.isEmpty( array );
}
/**
* Check if the list supplied is empty. An array is empty when it is null or when the length is 0
*
* @param list
* the list to check
* @return true if the supplied list is empty
* @deprecated
* @see org.pentaho.di.core.util.Utils#isEmpty(List)
*/
@Deprecated
public static boolean isEmpty( List<?> list ) {
return Utils.isEmpty( list );
}
/**
* @return a new ClassLoader
*/
public static ClassLoader createNewClassLoader() throws KettleException {
try {
// Nothing really in URL, everything is in scope.
URL[] urls = new URL[] {};
URLClassLoader ucl = new URLClassLoader( urls );
return ucl;
} catch ( Exception e ) {
throw new KettleException( "Unexpected error during classloader creation", e );
}
}
/**
* Utility class for use in JavaScript to create a new byte array. This is surprisingly difficult to do in JavaScript.
*
* @return a new java byte array
*/
public static byte[] createByteArray( int size ) {
return new byte[size];
}
/**
* Sets the first character of each word in upper-case.
*
* @param string
* The strings to convert to initcap
* @return the input string but with the first character of each word converted to upper-case.
*/
public static String initCap( String string ) {
StringBuilder change = new StringBuilder( string );
boolean new_word;
int i;
char lower, upper, ch;
new_word = true;
for ( i = 0; i < string.length(); i++ ) {
lower = change.substring( i, i + 1 ).toLowerCase().charAt( 0 ); // Lowercase is default.
upper = change.substring( i, i + 1 ).toUpperCase().charAt( 0 ); // Uppercase for new words.
ch = upper;
if ( new_word ) {
change.setCharAt( i, upper );
} else {
change.setCharAt( i, lower );
}
new_word = false;
// Cast to (int) is required for extended characters (SB)
if ( !Character.isLetterOrDigit( (int) ch ) && ch != '_' ) {
new_word = true;
}
}
return change.toString();
}
/**
* Create a valid filename using a name We remove all special characters, spaces, etc.
*
* @param name
* The name to use as a base for the filename
* @return a valid filename
*/
public static String createFilename( String name ) {
StringBuilder filename = new StringBuilder();
for ( int i = 0; i < name.length(); i++ ) {
char c = name.charAt( i );
if ( Character.isUnicodeIdentifierPart( c ) ) {
filename.append( c );
} else if ( Character.isWhitespace( c ) ) {
filename.append( '_' );
}
}
return filename.toString().toLowerCase();
}
public static String createFilename( String directory, String name, String extension ) {
if ( directory.endsWith( Const.FILE_SEPARATOR ) ) {
return directory + createFilename( name ) + extension;
} else {
return directory + Const.FILE_SEPARATOR + createFilename( name ) + extension;
}
}
public static String createName( String filename ) {
if ( Utils.isEmpty( filename ) ) {
return filename;
}
String pureFilename = filenameOnly( filename );
if ( pureFilename.endsWith( ".ktr" ) || pureFilename.endsWith( ".kjb" ) || pureFilename.endsWith( ".xml" ) ) {
pureFilename = pureFilename.substring( 0, pureFilename.length() - 4 );
}
StringBuilder sb = new StringBuilder();
for ( int i = 0; i < pureFilename.length(); i++ ) {
char c = pureFilename.charAt( i );
if ( Character.isUnicodeIdentifierPart( c ) ) {
sb.append( c );
} else if ( Character.isWhitespace( c ) ) {
sb.append( ' ' );
} else if ( c == '-' ) {
sb.append( c );
}
}
return sb.toString();
}
/**
* <p>
* Returns the pure filename of a filename with full path. E.g. if passed parameter is
* <code>/opt/tomcat/logs/catalina.out</code> this method returns <code>catalina.out</code>. The method works with the
* Environment variable <i>System.getProperty("file.separator")</i>, so on linux/Unix it will check for the last
* occurrence of a frontslash, on windows for the last occurrence of a backslash.
* </p>
*
* <p>
* To make this OS independent, the method could check for the last occurrence of a frontslash and backslash and use
* the higher value of both. Should work, since these characters aren't allowed in filenames on neither OS types (or
* said differently: Neither linux nor windows can carry frontslashes OR backslashes in filenames). Just a suggestion
* of an improvement ...
* </p>
*
* @param sFullPath
* @return
*/
public static String filenameOnly( String sFullPath ) {
if ( Utils.isEmpty( sFullPath ) ) {
return sFullPath;
}
int idx = sFullPath.lastIndexOf( FILE_SEPARATOR );
if ( idx != -1 ) {
return sFullPath.substring( idx + 1 );
} else {
idx = sFullPath.lastIndexOf( '/' ); // URL, VFS/**/
if ( idx != -1 ) {
return sFullPath.substring( idx + 1 );
} else {
return sFullPath;
}
}
}
/**
* Returning the localized date conversion formats. They get created once on first request.
*
* @return
*/
public static String[] getDateFormats() {
if ( dateFormats == null ) {
int dateFormatsCount = toInt( BaseMessages.getString( PKG, "Const.DateFormat.Count" ), 0 );
dateFormats = new String[dateFormatsCount];
for ( int i = 1; i <= dateFormatsCount; i++ ) {
dateFormats[i - 1] = BaseMessages.getString( PKG, "Const.DateFormat" + Integer.toString( i ) );
}
}
return dateFormats;
}
/**
* Returning the localized date conversion formats without time. They get created once on first request.
*
* @return
*/
public static String[] getTimelessDateFormats() {
if ( dateTimelessFormats == null ) {
List<String> dateFormats = Arrays.asList( Const.getDateFormats() );
dateFormats = dateFormats.stream()
.filter( date -> !date.toLowerCase().contains( "hh" ) )
.collect( Collectors.toList() );
dateTimelessFormats = dateFormats.toArray( new String[dateFormats.size()] );
}
return dateTimelessFormats;
}
/**
* Returning the localized number conversion formats. They get created once on first request.
*
* @return
*/
public static String[] getNumberFormats() {
if ( numberFormats == null ) {
int numberFormatsCount = toInt( BaseMessages.getString( PKG, "Const.NumberFormat.Count" ), 0 );
numberFormats = new String[numberFormatsCount + 1];
numberFormats[0] = DEFAULT_NUMBER_FORMAT;
for ( int i = 1; i <= numberFormatsCount; i++ ) {
numberFormats[i] = BaseMessages.getString( PKG, "Const.NumberFormat" + Integer.toString( i ) );
}
}
return numberFormats;
}
/**
* @return An array of all default conversion formats, to be used in dialogs etc.
*/
public static String[] getConversionFormats() {
String[] dats = Const.getDateFormats();
String[] nums = Const.getNumberFormats();
int totsize = dats.length + nums.length;
String[] formats = new String[totsize];
for ( int x = 0; x < dats.length; x++ ) {
formats[x] = dats[x];
}
for ( int x = 0; x < nums.length; x++ ) {
formats[dats.length + x] = nums[x];
}
return formats;
}
public static String[] getTransformationAndJobFilterNames() {
if ( STRING_TRANS_AND_JOB_FILTER_NAMES == null ) {
STRING_TRANS_AND_JOB_FILTER_NAMES =
new String[] {
BaseMessages.getString( PKG, "Const.FileFilter.TransformationJob" ),
BaseMessages.getString( PKG, "Const.FileFilter.Transformations" ),
BaseMessages.getString( PKG, "Const.FileFilter.Jobs" ),
BaseMessages.getString( PKG, "Const.FileFilter.XML" ),
BaseMessages.getString( PKG, "Const.FileFilter.All" ) };
}
return STRING_TRANS_AND_JOB_FILTER_NAMES;
}
public static String[] getTransformationFilterNames() {
if ( STRING_TRANS_FILTER_NAMES == null ) {
STRING_TRANS_FILTER_NAMES =
new String[] {
BaseMessages.getString( PKG, "Const.FileFilter.Transformations" ),
BaseMessages.getString( PKG, "Const.FileFilter.XML" ),
BaseMessages.getString( PKG, "Const.FileFilter.All" ) };
}
return STRING_TRANS_FILTER_NAMES;
}
public static String[] getJobFilterNames() {
if ( STRING_JOB_FILTER_NAMES == null ) {
STRING_JOB_FILTER_NAMES =
new String[] {
BaseMessages.getString( PKG, "Const.FileFilter.Jobs" ),
BaseMessages.getString( PKG, "Const.FileFilter.XML" ),
BaseMessages.getString( PKG, "Const.FileFilter.All" ) };
}
return STRING_JOB_FILTER_NAMES;
}
/**
* Return the current time as nano-seconds.
*
* @return time as nano-seconds.
*/
public static long nanoTime() {
return new Date().getTime() * 1000;
}
/**
* Return the input string trimmed as specified.
*
* @param string
* String to be trimmed
* @param trimType
* Type of trimming
*
* @return Trimmed string.
*/
public static String trimToType( String string, int trimType ) {
switch ( trimType ) {
case ValueMetaInterface.TRIM_TYPE_BOTH:
return trim( string );
case ValueMetaInterface.TRIM_TYPE_LEFT:
return ltrim( string );
case ValueMetaInterface.TRIM_TYPE_RIGHT:
return rtrim( string );
case ValueMetaInterface.TRIM_TYPE_NONE:
default:
return string;
}
}
/**
* Trims a Date by resetting the time part to zero
* @param date a Date object to trim (reset time to zero)
* @return a Date object with time part reset to zero
*/
public static Date trimDate( Date date ) {
Calendar calendar = Calendar.getInstance();
calendar.setTime( date );
calendar.set( Calendar.MILLISECOND, 0 );
calendar.set( Calendar.SECOND, 0 );
calendar.set( Calendar.MINUTE, 0 );
calendar.set( Calendar.HOUR_OF_DAY, 0 );
return calendar.getTime();
}
/**
* implemented to help prevent errors in matching up pluggable LAF directories and paths/files eliminating malformed
* URLs - duplicate file separators or missing file separators.
*
* @param dir
* @param file
* @return concatenated string representing a file url
*/
public static String safeAppendDirectory( String dir, String file ) {
boolean dirHasSeparator = ( ( dir.lastIndexOf( FILE_SEPARATOR ) ) == dir.length() - 1 );
boolean fileHasSeparator = ( file.indexOf( FILE_SEPARATOR ) == 0 );
if ( ( dirHasSeparator && !fileHasSeparator ) || ( !dirHasSeparator && fileHasSeparator ) ) {
return dir + file;
}
if ( dirHasSeparator && fileHasSeparator ) {
return dir + file.substring( 1 );
}
return dir + FILE_SEPARATOR + file;
}
/**
* Create an array of Strings consisting of spaces. The index of a String in the array determines the number of spaces
* in that string.
*
* @return array of 'space' Strings.
*/
public static String[] getEmptyPaddedStrings() {
if ( emptyPaddedSpacesStrings == null ) {
emptyPaddedSpacesStrings = new String[250];
for ( int i = 0; i < emptyPaddedSpacesStrings.length; i++ ) {
emptyPaddedSpacesStrings[i] = rightPad( "", i );
}
}
return emptyPaddedSpacesStrings;
}
/**
* Return the percentage of free memory for this JVM.
*
* @return Percentage of free memory.
*/
public static int getPercentageFreeMemory() {
Runtime runtime = Runtime.getRuntime();
long maxMemory = runtime.maxMemory();
long allocatedMemory = runtime.totalMemory();
long freeMemory = runtime.freeMemory();
long totalFreeMemory = ( freeMemory + ( maxMemory - allocatedMemory ) );
return (int) Math.round( 100 * (double) totalFreeMemory / maxMemory );
}
/**
* Return non digits only.
*
* @return non digits in a string.
*/
public static String removeDigits( String input ) {
if ( Utils.isEmpty( input ) ) {
return null;
}
StringBuilder digitsOnly = new StringBuilder();
char c;
for ( int i = 0; i < input.length(); i++ ) {
c = input.charAt( i );
if ( !Character.isDigit( c ) ) {
digitsOnly.append( c );
}
}
return digitsOnly.toString();
}
/**
* Return digits only.
*
* @return digits in a string.
*/
public static String getDigitsOnly( String input ) {
if ( Utils.isEmpty( input ) ) {
return null;
}
StringBuilder digitsOnly = new StringBuilder();
char c;
for ( int i = 0; i < input.length(); i++ ) {
c = input.charAt( i );
if ( Character.isDigit( c ) ) {
digitsOnly.append( c );
}
}
return digitsOnly.toString();
}
/**
* Remove time from a date.
*
* @return a date without hour.
*/
public static Date removeTimeFromDate( Date input ) {
if ( input == null ) {
return null;
}
// Get an instance of the Calendar.
Calendar calendar = Calendar.getInstance();
// Make sure the calendar will not perform automatic correction.
calendar.setLenient( false );
// Set the time of the calendar to the given date.
calendar.setTime( input );
// Remove the hours, minutes, seconds and milliseconds.
calendar.set( Calendar.HOUR_OF_DAY, 0 );
calendar.set( Calendar.MINUTE, 0 );
calendar.set( Calendar.SECOND, 0 );
calendar.set( Calendar.MILLISECOND, 0 );
// Return the date again.
return calendar.getTime();
}
/**
* Escape XML content. i.e. replace characters with &values;
*
* @param content
* content
* @return escaped content
*/
public static String escapeXML( String content ) {
if ( Utils.isEmpty( content ) ) {
return content;
}
return StringEscapeUtils.escapeXml( content );
}
/**
* Escape HTML content. i.e. replace characters with &values;
*
* @param content
* content
* @return escaped content
*/
public static String escapeHtml( String content ) {
if ( Utils.isEmpty( content ) ) {
return content;
}
return StringEscapeUtils.escapeHtml( content );
}
/**
* UnEscape HTML content. i.e. replace characters with &values;
*
* @param content
* content
* @return unescaped content
*/
public static String unEscapeHtml( String content ) {
if ( Utils.isEmpty( content ) ) {
return content;
}
return StringEscapeUtils.unescapeHtml( content );
}
/**
* UnEscape XML content. i.e. replace characters with &values;
*
* @param content
* content
* @return unescaped content
*/
public static String unEscapeXml( String content ) {
if ( Utils.isEmpty( content ) ) {
return content;
}
return StringEscapeUtils.unescapeXml( content );
}
/**
* Escape SQL content. i.e. replace characters with &values;
*
* @param content
* content
* @return escaped content
*/
public static String escapeSQL( String content ) {
if ( Utils.isEmpty( content ) ) {
return content;
}
return StringEscapeUtils.escapeSql( content );
}
/**
* Remove CR / LF from String - Better performance version
* - Doesn't NPE
* - 40 times faster on an empty string
* - 2 times faster on a mixed string
* - 25% faster on 2 char string with only CRLF in it
*
* @param in
* input
* @return cleaned string
*/
public static String removeCRLF( String in ) {
if ( ( in != null ) && ( in.length() > 0 ) ) {
int inLen = in.length(), posn = 0;
char[] tmp = new char[ inLen ];
char ch;
for ( int i = 0; i < inLen; i++ ) {
ch = in.charAt( i );
if ( ( ch != '\n' && ch != '\r' ) ) {
tmp[posn] = ch;
posn++;
}
}
return new String( tmp, 0, posn );
} else {
return "";
}
}
/**
* Remove Character from String - Better performance version
* - Doesn't NPE
* - 40 times faster on an empty string
* - 2 times faster on a mixed string
* - 25% faster on 2 char string with only CR/LF/TAB in it
*
* @param in
* input
* @return cleaned string
*/
public static String removeChar( String in, char badChar ) {
if ( ( in != null ) && ( in.length() > 0 ) ) {
int inLen = in.length(), posn = 0;
char[] tmp = new char[ inLen ];
char ch;
for ( int i = 0; i < inLen; i++ ) {
ch = in.charAt( i );
if ( ch != badChar ) {
tmp[posn] = ch;
posn++;
}
}
return new String( tmp, 0, posn );
} else {
return "";
}
}
/**
* Remove CR / LF from String
*
* @param in
* input
* @return cleaned string
*/
public static String removeCR( String in ) {
return removeChar( in, '\r' );
} // removeCR
/**
* Remove CR / LF from String
*
* @param in
* input
* @return cleaned string
*/
public static String removeLF( String in ) {
return removeChar( in, '\n' );
} // removeCRLF
/**
* Remove horizontal tab from string
*
* @param in
* input
* @return cleaned string
*/
public static String removeTAB( String in ) {
return removeChar( in, '\t' );
}
/**
* Add time to an input date
*
* @param input
* the date
* @param time
* the time to add (in string)
* @param dateFormat
* the time format
* @return date = input + time
*/
public static Date addTimeToDate( Date input, String time, String dateFormat ) throws Exception {
if ( Utils.isEmpty( time ) ) {
return input;
}
if ( input == null ) {
return null;
}
String dateformatString = NVL( dateFormat, "HH:mm:ss" );
int t = decodeTime( time, dateformatString );
return new Date( input.getTime() + t );
}
// Decodes a time value in specified date format and returns it as milliseconds since midnight.
public static int decodeTime( String s, String dateFormat ) throws Exception {
SimpleDateFormat f = new SimpleDateFormat( dateFormat );
TimeZone utcTimeZone = TimeZone.getTimeZone( "UTC" );
f.setTimeZone( utcTimeZone );
f.setLenient( false );
ParsePosition p = new ParsePosition( 0 );
Date d = f.parse( s, p );
if ( d == null ) {
throw new Exception( "Invalid time value " + dateFormat + ": \"" + s + "\"." );
}
return (int) d.getTime();
}
/**
* Get the number of occurrences of searchFor in string.
*
* @param string
* String to be searched
* @param searchFor
* to be counted string
* @return number of occurrences
*/
public static int getOccurenceString( String string, String searchFor ) {
if ( string == null || string.length() == 0 ) {
return 0;
}
int counter = 0;
int len = searchFor.length();
if ( len > 0 ) {
int start = string.indexOf( searchFor );
while ( start != -1 ) {
counter++;
start = string.indexOf( searchFor, start + len );
}
}
return counter;
}
public static String[] GetAvailableFontNames() {
GraphicsEnvironment ge = GraphicsEnvironment.getLocalGraphicsEnvironment();
Font[] fonts = ge.getAllFonts();
String[] FontName = new String[fonts.length];
for ( int i = 0; i < fonts.length; i++ ) {
FontName[i] = fonts[i].getFontName();
}
return FontName;
}
public static String getKettlePropertiesFileHeader() {
StringBuilder out = new StringBuilder();
out.append( BaseMessages.getString( PKG, "Props.Kettle.Properties.Sample.Line01", BuildVersion
.getInstance().getVersion() )
+ CR );
out.append( BaseMessages.getString( PKG, "Props.Kettle.Properties.Sample.Line02" ) + CR );
out.append( BaseMessages.getString( PKG, "Props.Kettle.Properties.Sample.Line03" ) + CR );
out.append( BaseMessages.getString( PKG, "Props.Kettle.Properties.Sample.Line04" ) + CR );
out.append( BaseMessages.getString( PKG, "Props.Kettle.Properties.Sample.Line05" ) + CR );
out.append( BaseMessages.getString( PKG, "Props.Kettle.Properties.Sample.Line06" ) + CR );
out.append( BaseMessages.getString( PKG, "Props.Kettle.Properties.Sample.Line07" ) + CR );
out.append( BaseMessages.getString( PKG, "Props.Kettle.Properties.Sample.Line08" ) + CR );
out.append( BaseMessages.getString( PKG, "Props.Kettle.Properties.Sample.Line09" ) + CR );
out.append( BaseMessages.getString( PKG, "Props.Kettle.Properties.Sample.Line10" ) + CR );
return out.toString();
}
/**
* Mask XML content. i.e. protect with CDATA;
*
* @param content
* content
* @return protected content
*/
public static String protectXMLCDATA( String content ) {
if ( Utils.isEmpty( content ) ) {
return content;
}
return "<![CDATA[" + content + "]]>";
}
/**
* Get the number of occurrences of searchFor in string.
*
* @param string
* String to be searched
* @param searchFor
* to be counted string
* @return number of occurrences
*/
public static int getOcuranceString( String string, String searchFor ) {
if ( string == null || string.length() == 0 ) {
return 0;
}
Pattern p = Pattern.compile( searchFor );
Matcher m = p.matcher( string );
int count = 0;
while ( m.find() ) {
++count;
}
return count;
}
/**
* Mask XML content. i.e. replace characters with &values;
*
* @param content
* content
* @return masked content
*/
public static String escapeXml( String content ) {
if ( Utils.isEmpty( content ) ) {
return content;
}
return StringEscapeUtils.escapeXml( content );
}
/**
* Convert a string containing a URI with escaped special characters and return the decoded version depending on
* system property settings.
* @param uri
* @return decoded URI string
*/
public static String optionallyDecodeUriString( String uri ) {
boolean decodeUri = !System.getProperty( KETTLE_RETURN_ESCAPED_URI_STRINGS, "N" )
.equalsIgnoreCase( "Y" );
if ( decodeUri ) {
try {
return UriParser.decode( uri );
} catch ( FileSystemException e ) {
// return the raw string if the URI is malformed (bad escape sequence)
return uri;
}
} else {
return uri;
}
}
/**
* New method avoids string concatenation is between 20% and > 2000% faster
* depending on length of the string to pad, and the size to pad it to.
* For larger amounts to pad, (e.g. pad a 4 character string out to 20 places)
* this is orders of magnitude faster.
*
* @param valueToPad
* the string to pad
* @param filler
* the pad string to fill with
* @param size
* the size to pad to
* @return
* the new string, padded to the left
*
* Note - The original method was flawed in a few cases:
*
* 1- The filler could be a string of any length - and the returned
* string was not necessarily limited to size. So a 3 character pad
* of an 11 character string could end up being 17 characters long.
* 2- For a pad of zero characters ("") the former method would enter
* an infinite loop.
* 3- For a null pad, it would throw an NPE
* 4- For a null valueToPad, it would throw an NPE
*/
public static String Lpad( String valueToPad, String filler, int size ) {
if ( ( size == 0 ) || ( valueToPad == null ) || ( filler == null ) ) {
return valueToPad;
}
int vSize = valueToPad.length();
int fSize = filler.length();
// This next if ensures previous behavior, but prevents infinite loop
// if "" is passed in as a filler.
if ( ( vSize >= size ) || ( fSize == 0 ) ) {
return valueToPad;
}
int tgt = ( size - vSize );
StringBuilder sb = new StringBuilder( size );
sb.append( filler );
while ( sb.length() < tgt ) {
// instead of adding one character at a time, this
// is exponential - much fewer times in loop
sb.append( sb );
}
sb.append( valueToPad );
return sb.substring( Math.max( 0, sb.length() - size ) ); // this makes sure you have the right size string returned.
}
/**
* New method avoids string concatenation is between 50% and > 2000% faster
* depending on length of the string to pad, and the size to pad it to.
* For larger amounts to pad, (e.g. pad a 4 character string out to 20 places)
* this is orders of magnitude faster.
*
* @param valueToPad
* the string to pad
* @param filler
* the pad string to fill with
* @param size
* the size to pad to
* @return
* The string, padded to the right
*
* 1- The filler can still be a string of any length - and the returned
* string was not necessarily limited to size. So a 3 character pad
* of an 11 character string with a size of 15 could end up being 17
* characters long (instead of the "asked for 15").
* 2- For a pad of zero characters ("") the former method would enter
* an infinite loop.
* 3- For a null pad, it would throw an NPE
* 4- For a null valueToPad, it would throw an NPE
*/
public static String Rpad( String valueToPad, String filler, int size ) {
if ( ( size == 0 ) || ( valueToPad == null ) || ( filler == null ) ) {
return valueToPad;
}
int vSize = valueToPad.length();
int fSize = filler.length();
// This next if ensures previous behavior, but prevents infinite loop
// if "" is passed in as a filler.
if ( ( vSize >= size ) || ( fSize == 0 ) ) {
return valueToPad;
}
int tgt = ( size - vSize );
StringBuilder sb1 = new StringBuilder( size );
sb1.append( filler );
while ( sb1.length() < tgt ) {
// instead of adding one character at a time, this
// is exponential - much fewer times in loop
sb1.append( sb1 );
}
StringBuilder sb = new StringBuilder( valueToPad );
sb.append( sb1 );
return sb.substring( 0, size );
}
public static boolean classIsOrExtends( Class<?> clazz, Class<?> superClass ) {
if ( clazz.equals( Object.class ) ) {
return false;
}
return clazz.equals( superClass ) || classIsOrExtends( clazz.getSuperclass(), superClass );
}
public static String getDeprecatedPrefix() {
return " " + BaseMessages.getString( PKG, "Const.Deprecated" );
}
}
| {'content_hash': 'affbb1d54d3142713ab55fb5c9027134', 'timestamp': '', 'source': 'github', 'line_count': 4087, 'max_line_length': 169, 'avg_line_length': 34.308294592610714, 'alnum_prop': 0.6577472221826014, 'repo_name': 'rmansoor/pentaho-kettle', 'id': '154732589a8a076243b0e31b7582859658eb60c5', 'size': '141130', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'core/src/main/java/org/pentaho/di/core/Const.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '45618'}, {'name': 'CSS', 'bytes': '37792'}, {'name': 'GAP', 'bytes': '4005'}, {'name': 'HTML', 'bytes': '41671'}, {'name': 'Java', 'bytes': '45347869'}, {'name': 'JavaScript', 'bytes': '511961'}, {'name': 'Shell', 'bytes': '48803'}]} |
import {MDCTopAppBar} from '@material/top-app-bar/index';
import React, {Component} from 'react';
import '../styles/TopAppBarFrameCatalog.scss';
const propToVariant = {
'short': {title: 'Short', variant: 'mdc-top-app-bar--short'},
'short-collapsed': {title: 'Short - Always Collapsed', variant: 'mdc-top-app-bar--short mdc-top-app-bar--short-collapsed'},
'fixed': {title: 'Fixed', variant: 'mdc-top-app-bar--fixed'},
'prominent': {title: 'Prominent', variant: 'mdc-top-app-bar--prominent'},
'dense': {title: 'Dense', variant: 'mdc-top-app-bar--dense'},
'standard': {title: 'Standard', variant: ''},
};
const loremIpsum = `Lorem ipsum dolor sit amet, consectetur adipiscing elit,
sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut
enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut
aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in
voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint
occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit
anim id est laborum.`;
class TopAppBarFramePage extends Component {
topAppBar = null;
initTopAppBar = topAppBarEle => this.topAppBar = topAppBarEle && new MDCTopAppBar(topAppBarEle);
componentWillUnmount() {
if (this.topAppBar) {
this.topAppBar.destroy();
}
}
render() {
const {match} = this.props;
return (
<div className='top-app-bar__frame'>
{this.getVariant(propToVariant[match.params.type])}
</div>
);
}
getVariant(type = propToVariant.standard) {
return (
<div className='demo-frame'>
<header className={`mdc-top-app-bar ${type.variant}`} style={{top: 0}} ref={this.initTopAppBar}>
<div className='mdc-top-app-bar__row'>
<section className='mdc-top-app-bar__section mdc-top-app-bar__section--align-start'>
<button className='mdc-icon-button material-icons mdc-top-app-bar__navigation-icon'>menu</button>
<span className='mdc-top-app-bar__title'>{type.title}</span>
</section>
{this.getIcons(type.variant)}
</div>
</header>
<div className={this.getOffset(type)}>
<div>
<p>
{loremIpsum}
</p>
<p>
{loremIpsum}
</p>
<p>
{loremIpsum}
</p>
<p>
{loremIpsum}
</p>
</div>
</div>
</div>
);
}
getOffset(type = propToVariant.standard) {
let offsetClass = `${type.variant}-fixed-adjust`;
if(type === propToVariant.standard || type === propToVariant.fixed) {
offsetClass = 'mdc-top-app-bar--fixed-adjust';
} else if (type === propToVariant['short-collapsed']) {
offsetClass = 'mdc-top-app-bar--short-fixed-adjust'
}
return offsetClass;
}
getIcons(variant) {
const isShort = variant.indexOf('mdc-top-app-bar--short') !== -1;
const topAppBarIconsClasses = 'mdc-icon-button material-icons mdc-top-app-bar__action-item';
const topAppBarIconSectionClasses = 'mdc-top-app-bar__section mdc-top-app-bar__section--align-end';
if (isShort) {
return (
<section className={topAppBarIconSectionClasses}>
<button className={topAppBarIconsClasses} aria-label='Download'>file_download</button>
</section>
);
} else {
return (
<section className={topAppBarIconSectionClasses}>
<button className={topAppBarIconsClasses} aria-label='Download'>file_download</button>
<button className={topAppBarIconsClasses} aria-label='Print this page'>print</button>
<button className={topAppBarIconsClasses} aria-label='Bookmark this page'>bookmark</button>
</section>
);
}
}
}
export default TopAppBarFramePage;
| {'content_hash': '585a39ec3fa7310decd41003e1b4a225', 'timestamp': '', 'source': 'github', 'line_count': 110, 'max_line_length': 125, 'avg_line_length': 35.445454545454545, 'alnum_prop': 0.6283662477558348, 'repo_name': 'material-components/material-components-web-catalog', 'id': '65f0adb91306fc98f4719f2485c35e39987044c8', 'size': '3899', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/frame/TopAppBarFramePage.js', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'HTML', 'bytes': '2028'}, {'name': 'JavaScript', 'bytes': '248220'}, {'name': 'SCSS', 'bytes': '17937'}]} |
using System;
using System.Collections;
using System.Text;
using System.Text.RegularExpressions;
namespace ICSharpCode.SharpZipLib.Core
{
/// <summary>
/// NameFilter is a string matching class which allows for both positive and negative
/// matching.
/// A filter is a sequence of independant <see cref="Regex">regular expressions</see> separated by semi-colons ';'.
/// To include a semi-colon it may be quoted as in \;. Each expression can be prefixed by a plus '+' sign or
/// a minus '-' sign to denote the expression is intended to include or exclude names.
/// If neither a plus or minus sign is found include is the default.
/// A given name is tested for inclusion before checking exclusions. Only names matching an include spec
/// and not matching an exclude spec are deemed to match the filter.
/// An empty filter matches any name.
/// </summary>
/// <example>The following expression includes all name ending in '.dat' with the exception of 'dummy.dat'
/// "+\.dat$;-^dummy\.dat$"
/// </example>
internal class NameFilter : IScanFilter
{
#region Constructors
/// <summary>
/// Construct an instance based on the filter expression passed
/// </summary>
/// <param name="filter">The filter expression.</param>
public NameFilter(string filter)
{
filter_ = filter;
inclusions_ = new ArrayList();
exclusions_ = new ArrayList();
Compile();
}
#endregion
/// <summary>
/// Test a string to see if it is a valid regular expression.
/// </summary>
/// <param name="expression">The expression to test.</param>
/// <returns>True if expression is a valid <see cref="System.Text.RegularExpressions.Regex"/> false otherwise.</returns>
public static bool IsValidExpression(string expression)
{
bool result = true;
try {
Regex exp = new Regex(expression, RegexOptions.IgnoreCase | RegexOptions.Singleline);
}
catch (ArgumentException) {
result = false;
}
return result;
}
/// <summary>
/// Test an expression to see if it is valid as a filter.
/// </summary>
/// <param name="toTest">The filter expression to test.</param>
/// <returns>True if the expression is valid, false otherwise.</returns>
public static bool IsValidFilterExpression(string toTest)
{
bool result = true;
try {
if (toTest != null) {
string[] items = SplitQuoted(toTest);
for (int i = 0; i < items.Length; ++i) {
if ((items[i] != null) && (items[i].Length > 0)) {
string toCompile;
if (items[i][0] == '+') {
toCompile = items[i].Substring(1, items[i].Length - 1);
}
else if (items[i][0] == '-') {
toCompile = items[i].Substring(1, items[i].Length - 1);
}
else {
toCompile = items[i];
}
Regex testRegex = new Regex(toCompile, RegexOptions.IgnoreCase | RegexOptions.Singleline);
}
}
}
}
catch (ArgumentException) {
result = false;
}
return result;
}
/// <summary>
/// Split a string into its component pieces
/// </summary>
/// <param name="original">The original string</param>
/// <returns>Returns an array of <see cref="T:System.String"/> values containing the individual filter elements.</returns>
public static string[] SplitQuoted(string original)
{
char escape = '\\';
char[] separators = { ';' };
ArrayList result = new ArrayList();
if ((original != null) && (original.Length > 0)) {
int endIndex = -1;
StringBuilder b = new StringBuilder();
while (endIndex < original.Length) {
endIndex += 1;
if (endIndex >= original.Length) {
result.Add(b.ToString());
}
else if (original[endIndex] == escape) {
endIndex += 1;
if (endIndex >= original.Length) {
#if NETCF_1_0
throw new ArgumentException("Missing terminating escape character");
#else
throw new ArgumentException("Missing terminating escape character", "original");
#endif
}
// include escape if this is not an escaped separator
if (Array.IndexOf(separators, original[endIndex]) < 0)
b.Append(escape);
b.Append(original[endIndex]);
}
else {
if (Array.IndexOf(separators, original[endIndex]) >= 0) {
result.Add(b.ToString());
b.Length = 0;
}
else {
b.Append(original[endIndex]);
}
}
}
}
return (string[])result.ToArray(typeof(string));
}
/// <summary>
/// Convert this filter to its string equivalent.
/// </summary>
/// <returns>The string equivalent for this filter.</returns>
public override string ToString()
{
return filter_;
}
/// <summary>
/// Test a value to see if it is included by the filter.
/// </summary>
/// <param name="name">The value to test.</param>
/// <returns>True if the value is included, false otherwise.</returns>
public bool IsIncluded(string name)
{
bool result = false;
if ( inclusions_.Count == 0 ) {
result = true;
}
else {
foreach ( Regex r in inclusions_ ) {
if ( r.IsMatch(name) ) {
result = true;
break;
}
}
}
return result;
}
/// <summary>
/// Test a value to see if it is excluded by the filter.
/// </summary>
/// <param name="name">The value to test.</param>
/// <returns>True if the value is excluded, false otherwise.</returns>
public bool IsExcluded(string name)
{
bool result = false;
foreach ( Regex r in exclusions_ ) {
if ( r.IsMatch(name) ) {
result = true;
break;
}
}
return result;
}
#region IScanFilter Members
/// <summary>
/// Test a value to see if it matches the filter.
/// </summary>
/// <param name="name">The value to test.</param>
/// <returns>True if the value matches, false otherwise.</returns>
public bool IsMatch(string name)
{
return (IsIncluded(name) && !IsExcluded(name));
}
#endregion
/// <summary>
/// Compile this filter.
/// </summary>
void Compile()
{
// TODO: Check to see if combining RE's makes it faster/smaller.
// simple scheme would be to have one RE for inclusion and one for exclusion.
if ( filter_ == null ) {
return;
}
string[] items = SplitQuoted(filter_);
for ( int i = 0; i < items.Length; ++i ) {
if ( (items[i] != null) && (items[i].Length > 0) ) {
bool include = (items[i][0] != '-');
string toCompile;
if ( items[i][0] == '+' ) {
toCompile = items[i].Substring(1, items[i].Length - 1);
}
else if ( items[i][0] == '-' ) {
toCompile = items[i].Substring(1, items[i].Length - 1);
}
else {
toCompile = items[i];
}
// NOTE: Regular expressions can fail to compile here for a number of reasons that cause an exception
// these are left unhandled here as the caller is responsible for ensuring all is valid.
// several functions IsValidFilterExpression and IsValidExpression are provided for such checking
if ( include ) {
inclusions_.Add(new Regex(toCompile, RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.Singleline));
}
else {
exclusions_.Add(new Regex(toCompile, RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.Singleline));
}
}
}
}
#region Instance Fields
string filter_;
ArrayList inclusions_;
ArrayList exclusions_;
#endregion
}
}
| {'content_hash': '539af5acfe8706d9c350138933c32fe1', 'timestamp': '', 'source': 'github', 'line_count': 250, 'max_line_length': 124, 'avg_line_length': 30.776, 'alnum_prop': 0.5999480114374838, 'repo_name': 'zmaruo/CocosSharp', 'id': 'ce51e216ece3da350bd5c59c7e30023acf5149f7', 'size': '9535', 'binary': False, 'copies': '6', 'ref': 'refs/heads/master', 'path': 'src/external lib/SharpZipLib/src/Core/NameFilter.cs', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C#', 'bytes': '10125413'}, {'name': 'Mask', 'bytes': '11462'}, {'name': 'NSIS', 'bytes': '14653'}, {'name': 'PowerShell', 'bytes': '2658'}, {'name': 'Python', 'bytes': '4067'}]} |
(function (root, factory) {
if (root === undefined && window !== undefined) root = window;
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module unless amdModuleId is set
define(["jquery"], function (a0) {
return (factory(a0));
});
} else if (typeof module === 'object' && module.exports) {
// Node. Does not work with strict CommonJS, but
// only CommonJS-like environments that support module.exports,
// like Node.
module.exports = factory(require("jquery"));
} else {
factory(root["jQuery"]);
}
}(this, function (jQuery) {
(function ($) {
$.fn.selectpicker.defaults = {
noneSelectedText: 'Nothing selected',
noneResultsText: 'No results match {0}',
countSelectedText: function (numSelected, numTotal) {
return (numSelected == 1) ? "{0} item selected" : "{0} items selected";
},
maxOptionsText: function (numAll, numGroup) {
return [
(numAll == 1) ? 'Limit reached ({n} item max)' : 'Limit reached ({n} items max)',
(numGroup == 1) ? 'Group limit reached ({n} item max)' : 'Group limit reached ({n} items max)'
];
},
selectAllText: 'Select All',
deselectAllText: 'Deselect All',
multipleSeparator: ', '
};
})(jQuery);
}));
| {'content_hash': 'de6f854b113f278930fda3e3d8e1ecdb', 'timestamp': '', 'source': 'github', 'line_count': 40, 'max_line_length': 102, 'avg_line_length': 33.225, 'alnum_prop': 0.5906696764484575, 'repo_name': 'ocadni/citychrone', 'id': '940be9d5ee83f523c4f8316269341530450b6beb', 'size': '1572', 'binary': False, 'copies': '7', 'ref': 'refs/heads/master', 'path': 'node_modules/bootstrap-select/docs/docs/dist/js/i18n/defaults-en_US.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '46395'}, {'name': 'HTML', 'bytes': '18635'}, {'name': 'JavaScript', 'bytes': '183571'}, {'name': 'Jupyter Notebook', 'bytes': '72'}, {'name': 'Shell', 'bytes': '296'}]} |
import functools
import inspect
__all__ = ("golang", "defer", "panic", "recover")
class GolangContext(object):
"""Golang context"""
def __init__(self):
self.defers = []
self.panic = None
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.do_defers()
# just throw the last uncatched panic
panic(self.panic)
def do_defers(self):
"""Call all defered functions in a reverse order. So, the \
function which called firstly in parent function will called\
lastly in this function"""
__golang_context__ = self
for defer in __golang_context__.defers:
try:
fn, args, kwargs, filename, lineno = defer
fn(*args, **kwargs)
except BaseException as ex:
__golang_context__.panic = ex
ex = __golang_context__.panic
def golang(func):
"""Golang context that allow python function to use\
some properties in golang. Usually, just use it as a
decorator.
:param func: function that wants to use golang properties.
:type func: function
"""
@functools.wraps(func)
def _golang_wrapper(*args, **kwargs):
ret = None
with GolangContext() as __golang_context__:
try:
ret = func(*args, **kwargs)
except Exception as ex:
__golang_context__.panic = ex
return ret
return _golang_wrapper
def defer(fn, *args, **kwargs):
"""Defer calling the specific function to the end of \
parent function anyway.
:param fn: function to call.
:type fn: function
:param \*args: args that pass to fn.
:type \*args: list
:param \*\*kwargs: kwargs that pass to fn.
:type \*\*kwargs: dict
:rtype: None
"""
frames = inspect.stack()
parent_frame = frames[1][0]
golang_frame = None
golang_context = None
try:
for frame in frames:
if '_golang_wrapper' in frame:
golang_frame = frame[0]
golang_context = golang_frame.f_locals.get('__golang_context__')
break
if not golang_frame or not golang_context:
raise RuntimeError('no golang context found.')
golang_context.defers.insert(
0, (fn, args, kwargs, parent_frame.f_code.co_filename, parent_frame.f_lineno))
finally:
del parent_frame
del golang_frame
del frames
def panic(ex):
"""Throw a panic
:param ex: panic is exception.
:type ex: exception
"""
if ex:
raise ex
def recover():
"""Recover from a panic in a golang context.
:rtype: exception. if there has no panic, return `None`.
"""
frames = inspect.stack()
defer_frame = None
golang_context = None
for frame in frames:
if 'do_defers' in frame:
defer_frame = frame[0]
golang_context = defer_frame.f_locals.get('__golang_context__')
break
if not defer_frame or not golang_context:
return None
panic = golang_context.panic
golang_context.panic = None
return panic
| {'content_hash': 'a13681126c255bb8e545ef43aa4824f5', 'timestamp': '', 'source': 'github', 'line_count': 124, 'max_line_length': 90, 'avg_line_length': 25.774193548387096, 'alnum_prop': 0.5766583229036295, 'repo_name': '404Room/go-in-python', 'id': '640f0b60b86a371f323aec6f20a91a86d3a958a6', 'size': '3221', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'gopy/go.py', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Makefile', 'bytes': '172'}, {'name': 'Python', 'bytes': '5907'}]} |
<?php
use \Magento\Framework\Component\ComponentRegistrar;
ComponentRegistrar::register(ComponentRegistrar::MODULE, 'Botamp_Botamp', __DIR__);
| {'content_hash': 'b28753b75b905d0d6c2822a145341b3f', 'timestamp': '', 'source': 'github', 'line_count': 5, 'max_line_length': 83, 'avg_line_length': 29.0, 'alnum_prop': 0.7862068965517242, 'repo_name': 'botamp/botamp-magento', 'id': '2eb858fcede59285f9d855201cd9bb78972c46bb', 'size': '145', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'registration.php', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'HTML', 'bytes': '1805'}, {'name': 'JavaScript', 'bytes': '2732'}, {'name': 'PHP', 'bytes': '28295'}, {'name': 'Shell', 'bytes': '147'}]} |
<html>
<body>
<script>
if (window.testRunner) {
testRunner.waitUntilDone();
testRunner.dumpAsText();
testRunner.setCanOpenWindows();
testRunner.setCloseRemainingWindowsWhenComplete(true);
}
window.addEventListener("message", function (e) {
if (window.testRunner)
testRunner.notifyDone();
}, false);
</script>
<p>This test opens a window that loads insecure HTML5 audio and video. We should
trigger a mixed content callback because the main frame in the window is HTTPS but is
displaying insecure content.</p>
<script>
window.addEventListener('load', function () {
window.open("https://127.0.0.1:8443/security/mixedContent/resources/frame-with-insecure-audio-video.html");
});
</script>
</body>
</html>
| {'content_hash': '7d83f1fdc138f2f026e48d1f06dbd92c', 'timestamp': '', 'source': 'github', 'line_count': 26, 'max_line_length': 111, 'avg_line_length': 28.346153846153847, 'alnum_prop': 0.7299864314789688, 'repo_name': 'vadimtk/chrome4sdp', 'id': '9d425deeca0519447459a4e63bc5e60a2c22da17', 'size': '737', 'binary': False, 'copies': '44', 'ref': 'refs/heads/master', 'path': 'third_party/WebKit/LayoutTests/http/tests/security/mixedContent/insecure-audio-video-in-main-frame.html', 'mode': '33188', 'license': 'bsd-3-clause', 'language': []} |
<html xmlns="http://www.w3.org/1999/xhtml"><head><title>Zend Gdata API Documentation</title><meta http-equiv="Content-Type" content="text/html; charset=utf-8"></meta><link rel="stylesheet" href="css/black-tie/jquery-ui-1.8.2.custom.css" type="text/css"></link><link rel="stylesheet" href="css/jquery.treeview.css" type="text/css"></link><link rel="stylesheet" href="css/theme.css" type="text/css"></link><script type="text/javascript" src="js/jquery-1.4.2.min.js"></script><script type="text/javascript" src="js/jquery-ui-1.8.2.custom.min.js"></script><script type="text/javascript" src="js/jquery.cookie.js"></script><script type="text/javascript" src="js/jquery.treeview.js"></script><script type="text/javascript">
$(document).ready(function() {
$(".filetree").treeview({
collapsed: true,
persist: "cookie"
});
$("#accordion").accordion({
collapsible: true,
autoHeight: false,
fillSpace: true
});
$(".tabs").tabs();
});
</script></head><body><div xmlns="" class="content">
<div class="sub-page-main-header-api-documentation"><h2>API Documentation</h2></div>
<div class="dotted-line"></div>
</div>
<div xmlns="" id="content">
<script type="text/javascript" src="js/menu.js"></script><script>
$(document).ready(function() {
$('a.gripper').click(function() {
$(this).nextAll('div.code-tabs').slideToggle();
$(this).children('img').toggle();
return false;
});
$('div.code-tabs').hide();
$('a.gripper').show();
$('div.file-nav').show();
});
</script><h1 class="file">Gdata/Calendar/Extension/Timezone.php</h1>
<div class="file-nav"><ul id="file-nav">
<li><a href="#top">Global</a></li>
<li>
<a href="#classes"><img src="images/icons/class.png" height="14">
Classes
</a><ul><li><a href="#%5CZend_Gdata_Calendar_Extension_Timezone">\Zend_Gdata_Calendar_Extension_Timezone</a></li></ul>
</li>
</ul></div>
<a name="top"></a><div id="file-description">
<p class="short-description">Zend Framework</p>
<div class="long-description"><p>LICENSE</p>
<p>This source file is subject to the new BSD license that is bundled
with this package in the file LICENSE.txt.
It is also available through the world-wide-web at this URL:
http://framework.zend.com/license/new-bsd
If you did not receive a copy of the license and are unable to
obtain it through the world-wide-web, please send an email
to [email protected] so we can send you a copy immediately.</p>
</div>
</div>
<dl class="file-info">
<dt>category</dt>
<dd>Zend
</dd>
<dt>copyright</dt>
<dd>Copyright (c) 2005-2011 Zend Technologies USA Inc. (http://www.zend.com)
</dd>
<dt>license</dt>
<dd>
<a href="http://framework.zend.com/license/new-bsd">New BSD License</a>
</dd>
<dt>package</dt>
<dd>Zend_Gdata
</dd>
<dt>subpackage</dt>
<dd>Calendar
</dd>
<dt>version</dt>
<dd>$Id: Timezone.php 23775 2011-03-01 17:25:24Z ralph $
</dd>
</dl>
<a name="classes"></a><a id="\Zend_Gdata_Calendar_Extension_Timezone"></a><h2 class="class">\Zend_Gdata_Calendar_Extension_Timezone<div class="to-top"><a href="#top">jump to top</a></div>
</h2>
<div class="class">
<p class="short-description">Represents the gCal:timezone element used by the Calendar data API</p>
<div class="long-description">
</div>
<dl class="class-info">
<dt>Extends from</dt>
<dd><a href="db_Gdata_Extension.html#%5CZend_Gdata_Extension">\Zend_Gdata_Extension</a></dd>
<dt>category</dt>
<dd>Zend
</dd>
<dt>copyright</dt>
<dd>Copyright (c) 2005-2011 Zend Technologies USA Inc. (http://www.zend.com)
</dd>
<dt>license</dt>
<dd>
<a href="http://framework.zend.com/license/new-bsd">New BSD License</a>
</dd>
<dt>package</dt>
<dd>Zend_Gdata
</dd>
<dt>subpackage</dt>
<dd>Calendar
</dd>
</dl>
<h3>Properties</h3>
<div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::$_rootElement"></a><div class="property">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/property.png" alt="Property"><img src="images/icons/visibility_protected.png" style="margin-right: 5px" alt="protected">string
<span class="highlight">$_rootElement</span>= 'timezone'
</code><div class="description">
<p class="short-description"></p>The XML element name, including prefix if desired</div>
<div class="code-tabs">
<div class="long-description">
</div>
<strong>Default value</strong><code>timezone</code><strong>Details</strong><dl class="property-info">
<dt>Type</dt>
<dd>string</dd>
</dl>
</div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::$_rootNamespace"></a><div class="property">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/property.png" alt="Property"><img src="images/icons/visibility_protected.png" style="margin-right: 5px" alt="protected">string
<span class="highlight">$_rootNamespace</span>= 'gCal'
</code><div class="description">
<p class="short-description"></p>The XML namespace prefix</div>
<div class="code-tabs">
<div class="long-description">
</div>
<strong>Default value</strong><code>gCal</code><strong>Details</strong><dl class="property-info">
<dt>Type</dt>
<dd>string</dd>
</dl>
</div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::$_value"></a><div class="property">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/property.png" alt="Property"><img src="images/icons/visibility_protected.png" style="margin-right: 5px" alt="protected">
<span class="highlight">$_value</span>= 'null'
</code><div class="description"></div>
<div class="code-tabs">
<strong>Default value</strong><code>null</code><strong>Details</strong><dl class="property-info">
<dt>Type</dt>
<dd></dd>
</dl>
</div>
<div class="clear"></div>
</div>
</div>
<h3>Methods</h3>
<div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::__construct()"></a><div class="method">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/method.png" alt="method"><img src="images/icons/visibility_public.png" style="margin-right: 5px" alt="public"><span class="highlight">__construct</span><span class="nb-faded-text">(
string $value
=
null
)
</span>
:
void</code><div class="description"><p class="short_description">Constructs a new Zend_Gdata_Calendar_Extension_Timezone object.</p></div>
<div class="code-tabs">
<div class="long-description">
</div>
<strong>Parameters</strong><table class="argument-info">
<thead><tr>
<th>Name</th>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<th>$value</th>
<td>string</td>
<td><em><p>(optional) The text content of the element.</p>
</em></td>
</tr>
</table>
</div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::__get()"></a><div class="method inherited_from ">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/method.png" alt="method"><img src="images/icons/visibility_public.png" style="margin-right: 5px" alt="public"><span class="highlight">__get</span><span class="nb-faded-text">(
string $name
)
</span>
:
void</code><div class="description">
<p class="short_description">Magic getter to allow access like $entry->foo to call $entry->getFoo()
Alternatively, if no getFoo() is defined, but a $_foo protected variable
is defined, this is returned.</p>
<small>Inherited from:
<a href="db_Gdata_App_Base.html#%5CZend_Gdata_App_Base::__get()">\Zend_Gdata_App_Base::__get()</a></small>
</div>
<div class="code-tabs">
<div class="long-description"><p>TODO Remove ability to bypass getFoo() methods??</p>
</div>
<strong>Parameters</strong><table class="argument-info">
<thead><tr>
<th>Name</th>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<th>$name</th>
<td>string</td>
<td><em>The variable name sought</em></td>
</tr>
</table>
</div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::__toString()"></a><div class="method">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/method.png" alt="method"><img src="images/icons/visibility_public.png" style="margin-right: 5px" alt="public"><span class="highlight">__toString</span><span class="nb-faded-text">(
)
</span>
:
void</code><div class="description"><p class="short_description">Magic toString method allows using this directly via echo
Works best in PHP >= 4.2.0</p></div>
<div class="code-tabs"><div class="long-description">
</div></div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::encode()"></a><div class="method inherited_from ">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/method.png" alt="method"><img src="images/icons/visibility_public.png" style="margin-right: 5px" alt="public"><span class="highlight">encode</span><span class="nb-faded-text">(
)
</span>
:
string</code><div class="description">
<p class="short_description">Alias for saveXML()</p>
<small>Inherited from:
<a href="db_Gdata_App_Base.html#%5CZend_Gdata_App_Base::encode()">\Zend_Gdata_App_Base::encode()</a></small>
</div>
<div class="code-tabs">
<div class="long-description"><p>Can be overridden by children to provide more complex representations
of entries.</p>
</div>
<strong>Returns</strong><table class="argument-info">
<thead><tr>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<td>string</td>
<td>Encoded string content</td>
</tr>
</table>
</div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::flushNamespaceLookupCache()"></a><div class="method inherited_from ">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/method.png" alt="method"><img src="images/icons/visibility_public.png" style="margin-right: 5px" alt="public"><span class="highlight">flushNamespaceLookupCache</span><span class="nb-faded-text">(
)
</span>
:
void</code><div class="description">
<span class="attribute">static</span><p class="short_description">Flush namespace lookup cache.</p>
<small>Inherited from:
<a href="db_Gdata_App_Base.html#%5CZend_Gdata_App_Base::flushNamespaceLookupCache()">\Zend_Gdata_App_Base::flushNamespaceLookupCache()</a></small>
</div>
<div class="code-tabs"><div class="long-description"><p>Empties the namespace lookup cache. Call this function if you have
added data to the namespace lookup table that contradicts values that
may have been cached during a previous call to lookupNamespace().</p>
</div></div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::getDOM()"></a><div class="method">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/method.png" alt="method"><img src="images/icons/visibility_public.png" style="margin-right: 5px" alt="public"><span class="highlight">getDOM</span><span class="nb-faded-text">(
<a href="http://php.net/manual/en/class.domdocument.php">\DOMDocument</a> $doc
=
null, $majorVersion
=
1, $minorVersion
=
null
)
</span>
:
<a href="http://php.net/manual/en/class.domelement.php">\DOMElement</a></code><div class="description"><p class="short_description">Retrieves a DOMElement which corresponds to this element and all
child properties. This is used to build an entry back into a DOM
and eventually XML text for sending to the server upon updates, or
for application storage/persistence.</p></div>
<div class="code-tabs">
<div class="long-description">
</div>
<strong>Parameters</strong><table class="argument-info">
<thead><tr>
<th>Name</th>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<th>$doc</th>
<td><a href="http://php.net/manual/en/class.domdocument.php">\DOMDocument</a></td>
<td><em>The DOMDocument used to construct DOMElements</em></td>
</tr>
<tr>
<th>$majorVersion</th>
<td></td>
<td><em></em></td>
</tr>
<tr>
<th>$minorVersion</th>
<td></td>
<td><em></em></td>
</tr>
</table>
<strong>Returns</strong><table class="argument-info">
<thead><tr>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<td><a href="http://php.net/manual/en/class.domelement.php">\DOMElement</a></td>
<td>The DOMElement representing this element and all child properties.</td>
</tr>
</table>
</div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::getExtensionAttributes()"></a><div class="method inherited_from ">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/method.png" alt="method"><img src="images/icons/visibility_public.png" style="margin-right: 5px" alt="public"><span class="highlight">getExtensionAttributes</span><span class="nb-faded-text">(
)
</span>
:
array</code><div class="description">
<p class="short_description">Returns an array of all extension attributes not transformed into data
model properties during parsing of the XML. Each element of the array
is a hashed array of the format:
array('namespaceUri' => string, 'name' => string, 'value' => string);</p>
<small>Inherited from:
<a href="db_Gdata_App_Base.html#%5CZend_Gdata_App_Base::getExtensionAttributes()">\Zend_Gdata_App_Base::getExtensionAttributes()</a></small>
</div>
<div class="code-tabs">
<div class="long-description">
</div>
<strong>Returns</strong><table class="argument-info">
<thead><tr>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<td>array</td>
<td>All extension attributes</td>
</tr>
</table>
</div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::getExtensionElements()"></a><div class="method inherited_from ">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/method.png" alt="method"><img src="images/icons/visibility_public.png" style="margin-right: 5px" alt="public"><span class="highlight">getExtensionElements</span><span class="nb-faded-text">(
)
</span>
:
array</code><div class="description">
<p class="short_description">Returns an array of all elements not matched to data model classes
during the parsing of the XML</p>
<small>Inherited from:
<a href="db_Gdata_App_Base.html#%5CZend_Gdata_App_Base::getExtensionElements()">\Zend_Gdata_App_Base::getExtensionElements()</a></small>
</div>
<div class="code-tabs">
<div class="long-description">
</div>
<strong>Returns</strong><table class="argument-info">
<thead><tr>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<td>array</td>
<td>All elements not matched to data model classes during parsing</td>
</tr>
</table>
</div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::getText()"></a><div class="method inherited_from ">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/method.png" alt="method"><img src="images/icons/visibility_public.png" style="margin-right: 5px" alt="public"><span class="highlight">getText</span><span class="nb-faded-text">(
$trim
=
true
)
</span>
:
string</code><div class="description">
<p class="short_description">Returns the child text node of this element
This represents any raw text contained within the XML element</p>
<small>Inherited from:
<a href="db_Gdata_App_Base.html#%5CZend_Gdata_App_Base::getText()">\Zend_Gdata_App_Base::getText()</a></small>
</div>
<div class="code-tabs">
<div class="long-description">
</div>
<strong>Parameters</strong><table class="argument-info">
<thead><tr>
<th>Name</th>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<th>$trim</th>
<td></td>
<td><em></em></td>
</tr>
</table>
<strong>Returns</strong><table class="argument-info">
<thead><tr>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<td>string</td>
<td>Child text node</td>
</tr>
</table>
</div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::getValue()"></a><div class="method">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/method.png" alt="method"><img src="images/icons/visibility_public.png" style="margin-right: 5px" alt="public"><span class="highlight">getValue</span><span class="nb-faded-text">(
)
</span>
:
string</code><div class="description"><p class="short_description">Get the value for this element's value attribute.</p></div>
<div class="code-tabs">
<div class="long-description">
</div>
<strong>Returns</strong><table class="argument-info">
<thead><tr>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<td>string</td>
<td>The value associated with this attribute.</td>
</tr>
</table>
</div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::getXML()"></a><div class="method inherited_from ">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/method.png" alt="method"><img src="images/icons/visibility_public.png" style="margin-right: 5px" alt="public"><span class="highlight">getXML</span><span class="nb-faded-text">(
)
</span>
:
string</code><div class="description">
<p class="short_description">Alias for saveXML() returns XML content for this element and all
children</p>
<small>Inherited from:
<a href="db_Gdata_App_Base.html#%5CZend_Gdata_App_Base::getXML()">\Zend_Gdata_App_Base::getXML()</a></small>
</div>
<div class="code-tabs">
<div class="long-description">
</div>
<strong>Returns</strong><table class="argument-info">
<thead><tr>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<td>string</td>
<td>XML content</td>
</tr>
</table>
</div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::lookupNamespace()"></a><div class="method inherited_from ">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/method.png" alt="method"><img src="images/icons/visibility_public.png" style="margin-right: 5px" alt="public"><span class="highlight">lookupNamespace</span><span class="nb-faded-text">(
string $prefix, integer $majorVersion
=
1, integer $minorVersion
=
null
)
</span>
:
string</code><div class="description">
<p class="short_description">Get the full version of a namespace prefix</p>
<small>Inherited from:
<a href="db_Gdata_App_Base.html#%5CZend_Gdata_App_Base::lookupNamespace()">\Zend_Gdata_App_Base::lookupNamespace()</a></small>
</div>
<div class="code-tabs">
<div class="long-description"><p>Looks up a prefix (atom:, etc.) in the list of registered
namespaces and returns the full namespace URI if
available. Returns the prefix, unmodified, if it's not
registered.</p>
</div>
<strong>Parameters</strong><table class="argument-info">
<thead><tr>
<th>Name</th>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<th>$prefix</th>
<td>string</td>
<td><em>The namespace prefix to lookup.</em></td>
</tr>
<tr>
<th>$majorVersion</th>
<td>integer</td>
<td><em><p>The major protocol version in effect. Defaults to '1'.</p>
</em></td>
</tr>
<tr>
<th>$minorVersion</th>
<td>integer</td>
<td><em><p>The minor protocol version in effect. Defaults to null (use latest).</p>
</em></td>
</tr>
</table>
<strong>Returns</strong><table class="argument-info">
<thead><tr>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<td>string</td>
<td></td>
</tr>
</table>
</div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::registerAllNamespaces()"></a><div class="method inherited_from ">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/method.png" alt="method"><img src="images/icons/visibility_public.png" style="margin-right: 5px" alt="public"><span class="highlight">registerAllNamespaces</span><span class="nb-faded-text">(
array $namespaceArray
)
</span>
:
void</code><div class="description">
<p class="short_description">Add an array of namespaces to the registered list.</p>
<small>Inherited from:
<a href="db_Gdata_App_Base.html#%5CZend_Gdata_App_Base::registerAllNamespaces()">\Zend_Gdata_App_Base::registerAllNamespaces()</a></small>
</div>
<div class="code-tabs">
<div class="long-description"><p>Takes an array in the format of:
namespace prefix, namespace URI, major protocol version,
minor protocol version and adds them with calls to ->registerNamespace()</p>
</div>
<strong>Parameters</strong><table class="argument-info">
<thead><tr>
<th>Name</th>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<th>$namespaceArray</th>
<td>array</td>
<td><em>An array of namespaces.</em></td>
</tr>
</table>
</div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::registerNamespace()"></a><div class="method inherited_from ">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/method.png" alt="method"><img src="images/icons/visibility_public.png" style="margin-right: 5px" alt="public"><span class="highlight">registerNamespace</span><span class="nb-faded-text">(
string $prefix, string $namespaceUri, integer $majorVersion
=
1, integer $minorVersion
=
0
)
</span>
:
void</code><div class="description">
<p class="short_description">Add a namespace and prefix to the registered list</p>
<small>Inherited from:
<a href="db_Gdata_App_Base.html#%5CZend_Gdata_App_Base::registerNamespace()">\Zend_Gdata_App_Base::registerNamespace()</a></small>
</div>
<div class="code-tabs">
<div class="long-description"><p>Takes a prefix and a full namespace URI and adds them to the
list of registered namespaces for use by
$this->lookupNamespace().</p>
<p>WARNING: Currently, registering a namespace will NOT invalidate any
memoized data stored in $_namespaceLookupCache. Under normal
use, this behavior is acceptable. If you are adding
contradictory data to the namespace lookup table, you must
call flushNamespaceLookupCache().</p>
</div>
<strong>Parameters</strong><table class="argument-info">
<thead><tr>
<th>Name</th>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<th>$prefix</th>
<td>string</td>
<td><em>The namespace prefix</em></td>
</tr>
<tr>
<th>$namespaceUri</th>
<td>string</td>
<td><em>The full namespace URI</em></td>
</tr>
<tr>
<th>$majorVersion</th>
<td>integer</td>
<td><em><p>The major protocol version in effect. Defaults to '1'.</p>
</em></td>
</tr>
<tr>
<th>$minorVersion</th>
<td>integer</td>
<td><em><p>The minor protocol version in effect. Defaults to null (use latest).</p>
</em></td>
</tr>
</table>
</div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::saveXML()"></a><div class="method inherited_from ">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/method.png" alt="method"><img src="images/icons/visibility_public.png" style="margin-right: 5px" alt="public"><span class="highlight">saveXML</span><span class="nb-faded-text">(
)
</span>
:
string</code><div class="description">
<p class="short_description">Converts this element and all children into XML text using getDOM()</p>
<small>Inherited from:
<a href="db_Gdata_App_Base.html#%5CZend_Gdata_App_Base::saveXML()">\Zend_Gdata_App_Base::saveXML()</a></small>
</div>
<div class="code-tabs">
<div class="long-description">
</div>
<strong>Returns</strong><table class="argument-info">
<thead><tr>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<td>string</td>
<td>XML content</td>
</tr>
</table>
</div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::setExtensionAttributes()"></a><div class="method inherited_from ">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/method.png" alt="method"><img src="images/icons/visibility_public.png" style="margin-right: 5px" alt="public"><span class="highlight">setExtensionAttributes</span><span class="nb-faded-text">(
array $value
)
</span>
:
<a href="db_Gdata_App_Base.html#%5CZend_Gdata_App_Base">\Zend_Gdata_App_Base</a></code><div class="description">
<p class="short_description">Sets an array of all extension attributes not transformed into data
model properties during parsing of the XML. Each element of the array
is a hashed array of the format:
array('namespaceUri' => string, 'name' => string, 'value' => string);
This can be used to add arbitrary attributes to any data model element</p>
<small>Inherited from:
<a href="db_Gdata_App_Base.html#%5CZend_Gdata_App_Base::setExtensionAttributes()">\Zend_Gdata_App_Base::setExtensionAttributes()</a></small>
</div>
<div class="code-tabs">
<div class="long-description">
</div>
<strong>Parameters</strong><table class="argument-info">
<thead><tr>
<th>Name</th>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<th>$value</th>
<td>array</td>
<td><em>All extension attributes</em></td>
</tr>
</table>
<strong>Returns</strong><table class="argument-info">
<thead><tr>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<td><a href="db_Gdata_App_Base.html#%5CZend_Gdata_App_Base">\Zend_Gdata_App_Base</a></td>
<td>Returns an object of the same type as 'this' to provide a fluent interface.</td>
</tr>
</table>
</div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::setExtensionElements()"></a><div class="method inherited_from ">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/method.png" alt="method"><img src="images/icons/visibility_public.png" style="margin-right: 5px" alt="public"><span class="highlight">setExtensionElements</span><span class="nb-faded-text">(
array $value
)
</span>
:
<a href="db_Gdata_App_Base.html#%5CZend_Gdata_App_Base">\Zend_Gdata_App_Base</a></code><div class="description">
<p class="short_description">Sets an array of all elements not matched to data model classes
during the parsing of the XML. This method can be used to add arbitrary
child XML elements to any data model class.</p>
<small>Inherited from:
<a href="db_Gdata_App_Base.html#%5CZend_Gdata_App_Base::setExtensionElements()">\Zend_Gdata_App_Base::setExtensionElements()</a></small>
</div>
<div class="code-tabs">
<div class="long-description">
</div>
<strong>Parameters</strong><table class="argument-info">
<thead><tr>
<th>Name</th>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<th>$value</th>
<td>array</td>
<td><em>All extension elements</em></td>
</tr>
</table>
<strong>Returns</strong><table class="argument-info">
<thead><tr>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<td><a href="db_Gdata_App_Base.html#%5CZend_Gdata_App_Base">\Zend_Gdata_App_Base</a></td>
<td>Returns an object of the same type as 'this' to provide a fluent interface.</td>
</tr>
</table>
</div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::setText()"></a><div class="method inherited_from ">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/method.png" alt="method"><img src="images/icons/visibility_public.png" style="margin-right: 5px" alt="public"><span class="highlight">setText</span><span class="nb-faded-text">(
string $value
)
</span>
:
<a href="db_Gdata_App_Base.html#%5CZend_Gdata_App_Base">\Zend_Gdata_App_Base</a></code><div class="description">
<p class="short_description">Sets the child text node of this element
This represents any raw text contained within the XML element</p>
<small>Inherited from:
<a href="db_Gdata_App_Base.html#%5CZend_Gdata_App_Base::setText()">\Zend_Gdata_App_Base::setText()</a></small>
</div>
<div class="code-tabs">
<div class="long-description">
</div>
<strong>Parameters</strong><table class="argument-info">
<thead><tr>
<th>Name</th>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<th>$value</th>
<td>string</td>
<td><em>Child text node</em></td>
</tr>
</table>
<strong>Returns</strong><table class="argument-info">
<thead><tr>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<td><a href="db_Gdata_App_Base.html#%5CZend_Gdata_App_Base">\Zend_Gdata_App_Base</a></td>
<td>Returns an object of the same type as 'this' to provide a fluent interface.</td>
</tr>
</table>
</div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::setValue()"></a><div class="method">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/method.png" alt="method"><img src="images/icons/visibility_public.png" style="margin-right: 5px" alt="public"><span class="highlight">setValue</span><span class="nb-faded-text">(
string $value
)
</span>
:
<a href="db_Gdata_Calendar_Extension_Timezone.html#%5CZend_Gdata_Calendar_Extension_Timezone">\Zend_Gdata_Calendar_Extension_Timezone</a></code><div class="description"><p class="short_description">Set the value for this element's value attribute.</p></div>
<div class="code-tabs">
<div class="long-description">
</div>
<strong>Parameters</strong><table class="argument-info">
<thead><tr>
<th>Name</th>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<th>$value</th>
<td>string</td>
<td><em>The desired value for this attribute.</em></td>
</tr>
</table>
<strong>Returns</strong><table class="argument-info">
<thead><tr>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<td><a href="db_Gdata_Calendar_Extension_Timezone.html#%5CZend_Gdata_Calendar_Extension_Timezone">\Zend_Gdata_Calendar_Extension_Timezone</a></td>
<td>The element being modified.</td>
</tr>
</table>
</div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::takeAttributeFromDOM()"></a><div class="method">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/method.png" alt="method"><img src="images/icons/visibility_protected.png" style="margin-right: 5px" alt="protected"><span class="highlight">takeAttributeFromDOM</span><span class="nb-faded-text">(
<a href="http://php.net/manual/en/class.domnode.php">\DOMNode</a> $attribute
)
</span>
:
void</code><div class="description"><p class="short_description">Given a DOMNode representing an attribute, tries to map the data into
instance members. If no mapping is defined, the name and value are
stored in an array.</p></div>
<div class="code-tabs">
<div class="long-description">
</div>
<strong>Parameters</strong><table class="argument-info">
<thead><tr>
<th>Name</th>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<th>$attribute</th>
<td><a href="http://php.net/manual/en/class.domnode.php">\DOMNode</a></td>
<td><em>The DOMNode attribute needed to be handled</em></td>
</tr>
</table>
</div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::takeChildFromDOM()"></a><div class="method inherited_from ">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/method.png" alt="method"><img src="images/icons/visibility_protected.png" style="margin-right: 5px" alt="protected"><span class="highlight">takeChildFromDOM</span><span class="nb-faded-text">(
<a href="http://php.net/manual/en/class.domnode.php">\DOMNode</a> $child
)
</span>
:
void</code><div class="description">
<p class="short_description">Given a child DOMNode, tries to determine how to map the data into
object instance members. If no mapping is defined, Extension_Element
objects are created and stored in an array.</p>
<small>Inherited from:
<a href="db_Gdata_App_Base.html#%5CZend_Gdata_App_Base::takeChildFromDOM()">\Zend_Gdata_App_Base::takeChildFromDOM()</a></small>
</div>
<div class="code-tabs">
<div class="long-description">
</div>
<strong>Parameters</strong><table class="argument-info">
<thead><tr>
<th>Name</th>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<th>$child</th>
<td><a href="http://php.net/manual/en/class.domnode.php">\DOMNode</a></td>
<td><em>The DOMNode needed to be handled</em></td>
</tr>
</table>
</div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::transferFromDOM()"></a><div class="method inherited_from ">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/method.png" alt="method"><img src="images/icons/visibility_public.png" style="margin-right: 5px" alt="public"><span class="highlight">transferFromDOM</span><span class="nb-faded-text">(
<a href="http://php.net/manual/en/class.domnode.php">\DOMNode</a> $node
)
</span>
:
void</code><div class="description">
<p class="short_description">Transfers each child and attribute into member variables.</p>
<small>Inherited from:
<a href="db_Gdata_App_Base.html#%5CZend_Gdata_App_Base::transferFromDOM()">\Zend_Gdata_App_Base::transferFromDOM()</a></small>
</div>
<div class="code-tabs">
<div class="long-description"><p>This is called when XML is received over the wire and the data
model needs to be built to represent this XML.</p>
</div>
<strong>Parameters</strong><table class="argument-info">
<thead><tr>
<th>Name</th>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<th>$node</th>
<td><a href="http://php.net/manual/en/class.domnode.php">\DOMNode</a></td>
<td><em><p>The DOMNode that represents this object's data</p>
</em></td>
</tr>
</table>
</div>
<div class="clear"></div>
</div>
<a id="\Zend_Gdata_Calendar_Extension_Timezone::transferFromXML()"></a><div class="method inherited_from ">
<a href="#" class="gripper"><img src="images/icons/arrow_right.png"><img src="images/icons/arrow_down.png" style="display: none;"></a><code><img src="images/icons/method.png" alt="method"><img src="images/icons/visibility_public.png" style="margin-right: 5px" alt="public"><span class="highlight">transferFromXML</span><span class="nb-faded-text">(
string $xml
)
</span>
:
void</code><div class="description">
<p class="short_description">Parses the provided XML text and generates data model classes for
each know element by turning the XML text into a DOM tree and calling
transferFromDOM($element). The first data model element with the same
name as $this->_rootElement is used and the child elements are
recursively parsed.</p>
<small>Inherited from:
<a href="db_Gdata_App_Base.html#%5CZend_Gdata_App_Base::transferFromXML()">\Zend_Gdata_App_Base::transferFromXML()</a></small>
</div>
<div class="code-tabs">
<div class="long-description">
</div>
<strong>Parameters</strong><table class="argument-info">
<thead><tr>
<th>Name</th>
<th>Type</th>
<th>Description</th>
</tr></thead>
<tr>
<th>$xml</th>
<td>string</td>
<td><em>The XML text to parse</em></td>
</tr>
</table>
</div>
<div class="clear"></div>
</div>
</div>
</div>
</div>
<small xmlns="" class="footer">Documentation was generated by <a href="http://docblox-project.org">DocBlox 0.13.3</a>.
</small></body></html>
| {'content_hash': 'ecd3f128ec7303c44e39dfa23b50a4d2', 'timestamp': '', 'source': 'github', 'line_count': 939, 'max_line_length': 717, 'avg_line_length': 40.674121405750796, 'alnum_prop': 0.6611420941010133, 'repo_name': 'Amasy/Airtel_Klub254', 'id': 'bb2bfa2bbeb9bfbf78adea079ddb2dfa48a19c05', 'size': '38229', 'binary': False, 'copies': '5', 'ref': 'refs/heads/master', 'path': 'documentation/api/core/db_Gdata_Calendar_Extension_Timezone.html', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'JavaScript', 'bytes': '83719'}, {'name': 'PHP', 'bytes': '2932818'}]} |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.IO.BACnet;
using System.IO;
namespace BaCSharp
{
public class BacnetFile:BaCSharpObject
{
[BaCSharpType(BacnetApplicationTags.BACNET_APPLICATION_TAG_CHARACTER_STRING)]
public virtual String PROP_FILE_TYPE
{
get { return "Binary"; }
}
public bool m_PROP_READ_ONLY;
[BaCSharpType(BacnetApplicationTags.BACNET_APPLICATION_TAG_BOOLEAN)]
public virtual bool PROP_READ_ONLY
{
get { return m_PROP_READ_ONLY; }
}
[BaCSharpType(BacnetApplicationTags.BACNET_APPLICATION_TAG_BOOLEAN)]
public virtual bool PROP_ARCHIVE
{
get { return false; }
}
[BaCSharpType(BacnetApplicationTags.BACNET_APPLICATION_TAG_ENUMERATED)]
public virtual uint PROP_FILE_ACCESS_METHOD
{
get { return 1; } // FILE_STREAM_ACCESS
}
[BaCSharpType(BacnetApplicationTags.BACNET_APPLICATION_TAG_UNSIGNED_INT)]
public virtual uint PROP_FILE_SIZE
{
get {
try
{
FileInfo fi = new FileInfo(FilePath);
return (uint)fi.Length;
}
catch { return 0; } // no way to return -1 or other
}
}
public String FilePath;
public BacnetFile(int ObjId, String ObjName, String Description, String FilePath, bool ReadOnly)
: base(new BacnetObjectId(BacnetObjectTypes.OBJECT_FILE, (uint)ObjId), ObjName, Description)
{
m_PROP_READ_ONLY = ReadOnly;
this.FilePath = FilePath;
}
public BacnetFile() { }
public virtual byte[] ReadFileBlock(int position, int quantity)
{
try
{
byte[] b=new byte[quantity];
FileStream fs = new FileStream(FilePath, FileMode.Open, FileAccess.Read);
fs.Seek(position, SeekOrigin.Begin);
fs.Read(b,0, quantity);
fs.Close();
return b;
}
catch{}
return null;
}
public virtual bool WriteFileBlock(byte[] block, int position, int quantity)
{
try
{
FileStream fs;
if (position==0)
fs= new FileStream(FilePath, FileMode.Create);
else
fs = new FileStream(FilePath, FileMode.Append);
fs.Write(block, 0, quantity);
fs.Close();
return true;
}
catch { }
return false;
}
}
}
| {'content_hash': '0188d728963d877f385c363a5cd984cc', 'timestamp': '', 'source': 'github', 'line_count': 93, 'max_line_length': 105, 'avg_line_length': 29.731182795698924, 'alnum_prop': 0.5312839059674502, 'repo_name': 'temcocontrols/T3000_Building_Automation_System', 'id': 'd1dae3dba9345e23e0592d4e5d3fbd8e47b1d580', 'size': '4072', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'BacnetExplore/CodeExamples/AnotherStorageImplementation/BacnetObjects/BacnetFile.cs', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Batchfile', 'bytes': '555'}, {'name': 'C', 'bytes': '4704332'}, {'name': 'C#', 'bytes': '9337521'}, {'name': 'C++', 'bytes': '15665753'}, {'name': 'CMake', 'bytes': '169395'}, {'name': 'CSS', 'bytes': '111688'}, {'name': 'Dockerfile', 'bytes': '210'}, {'name': 'HTML', 'bytes': '125316'}, {'name': 'Inno Setup', 'bytes': '5879'}, {'name': 'JavaScript', 'bytes': '1789138'}, {'name': 'Makefile', 'bytes': '6851'}, {'name': 'Meson', 'bytes': '7623'}, {'name': 'NASL', 'bytes': '14427'}, {'name': 'Objective-C', 'bytes': '7094'}, {'name': 'Perl', 'bytes': '40922'}, {'name': 'PowerShell', 'bytes': '4726'}, {'name': 'Python', 'bytes': '1992'}, {'name': 'Shell', 'bytes': '143'}]} |
#pragma once
#include <aws/iam/IAM_EXPORTS.h>
#include <aws/core/utils/memory/stl/AWSString.h>
namespace Aws
{
namespace IAM
{
namespace Model
{
enum class EncodingType
{
NOT_SET,
SSH,
PEM
};
namespace EncodingTypeMapper
{
AWS_IAM_API EncodingType GetEncodingTypeForName(const Aws::String& name);
AWS_IAM_API Aws::String GetNameForEncodingType(EncodingType value);
} // namespace EncodingTypeMapper
} // namespace Model
} // namespace IAM
} // namespace Aws
| {'content_hash': '70450efd5be747ee2f339368a0cc4e3a', 'timestamp': '', 'source': 'github', 'line_count': 27, 'max_line_length': 73, 'avg_line_length': 17.77777777777778, 'alnum_prop': 0.7354166666666667, 'repo_name': 'bizzarri/aws-sdk-cpp', 'id': 'd6904c15a557ba8200324819b72afa483903293a', 'size': '1051', 'binary': False, 'copies': '9', 'ref': 'refs/heads/master', 'path': 'aws-cpp-sdk-iam/include/aws/iam/model/encodingType.h', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'C', 'bytes': '7365'}, {'name': 'C++', 'bytes': '30634548'}, {'name': 'CMake', 'bytes': '226208'}, {'name': 'Java', 'bytes': '3886'}, {'name': 'Python', 'bytes': '21069'}]} |
package org.apache.ignite.internal.jdbc;
import org.apache.ignite.internal.client.*;
import org.apache.ignite.internal.util.typedef.internal.*;
import java.io.*;
import java.math.*;
import java.net.*;
import java.sql.*;
import java.sql.Date;
import java.util.*;
/**
* JDBC result set implementation.
*/
public class JdbcResultSet implements ResultSet {
/** Task name. */
private static final String TASK_NAME =
"org.apache.ignite.internal.processors.cache.query.jdbc.GridCacheQueryJdbcTask";
/** Statement. */
private final JdbcStatement stmt;
/** Node ID. */
private final UUID nodeId;
/** Future ID. */
private final UUID futId;
/** Table names. */
private final List<String> tbls;
/** Column names. */
private final List<String> cols;
/** Class names. */
private final List<String> types;
/** Fields iterator. */
private Iterator<List<Object>> fields;
/** Finished flag. */
private boolean finished;
/** Current position. */
private int pos;
/** Current. */
private List<Object> curr;
/** Closed flag. */
private boolean closed;
/** Was {@code NULL} flag. */
private boolean wasNull;
/** Fetch size. */
private int fetchSize;
/**
* Creates new result set.
*
* @param stmt Statement.
* @param nodeId Node ID.
* @param futId Future ID.
* @param tbls Table names.
* @param cols Column names.
* @param types Types.
* @param fields Fields.
* @param finished Finished flag.
* @param fetchSize Fetch size.
*/
JdbcResultSet(JdbcStatement stmt, UUID nodeId, UUID futId,
List<String> tbls, List<String> cols, List<String> types,
Collection<List<Object>> fields, boolean finished, int fetchSize) {
assert stmt != null;
assert nodeId != null;
assert futId != null;
assert tbls != null;
assert cols != null;
assert types != null;
assert fields != null;
assert fetchSize > 0;
this.stmt = stmt;
this.nodeId = nodeId;
this.futId = futId;
this.tbls = tbls;
this.cols = cols;
this.types = types;
this.fetchSize = fetchSize;
this.fields = fields.iterator();
this.finished = finished;
}
/**
* Creates new result set with predefined fields.
* Result set created with this constructor will
* never execute remote tasks.
*
* @param stmt Statement.
* @param tbls Table names.
* @param cols Column names.
* @param types Types.
* @param fields Fields.
*/
JdbcResultSet(JdbcStatement stmt, List<String> tbls, List<String> cols,
List<String> types, Collection<List<Object>> fields) {
assert stmt != null;
assert tbls != null;
assert cols != null;
assert types != null;
assert fields != null;
this.stmt = stmt;
this.tbls = tbls;
this.cols = cols;
this.types = types;
this.fields = fields.iterator();
nodeId = null;
futId = null;
// Prevent task execution.
finished = true;
}
/** {@inheritDoc} */
@Override public boolean next() throws SQLException {
ensureNotClosed();
if (fields == null && !finished) {
assert nodeId != null;
assert futId != null;
try {
GridClientCompute compute = stmt.connection().client().compute();
GridClientCompute prj = compute.projection(compute.node(nodeId));
byte[] packet = prj.execute(TASK_NAME, JdbcUtils.marshalArgument(
JdbcUtils.taskArgument(nodeId, futId, fetchSize, stmt.getMaxRows())));
byte status = packet[0];
byte[] data = new byte[packet.length - 1];
U.arrayCopy(packet, 1, data, 0, data.length);
if (status == 1)
throw JdbcUtils.unmarshalError(data);
else {
List<?> msg = JdbcUtils.unmarshal(data);
assert msg.size() == 2;
fields = ((Collection<List<Object>>)msg.get(0)).iterator();
finished = (Boolean)msg.get(1);
}
}
catch (GridClientException e) {
throw new SQLException("Failed to query Ignite.", e);
}
}
if (fields != null && fields.hasNext()) {
curr = fields.next();
if (!fields.hasNext())
fields = null;
pos++;
return true;
}
else {
curr = null;
return false;
}
}
/** {@inheritDoc} */
@Override public void close() throws SQLException {
closed = true;
}
/** {@inheritDoc} */
@Override public boolean wasNull() throws SQLException {
return wasNull;
}
/** {@inheritDoc} */
@Override public String getString(int colIdx) throws SQLException {
return getTypedValue(colIdx, String.class);
}
/** {@inheritDoc} */
@Override public boolean getBoolean(int colIdx) throws SQLException {
Boolean val = getTypedValue(colIdx, Boolean.class);
return val != null ? val : false;
}
/** {@inheritDoc} */
@Override public byte getByte(int colIdx) throws SQLException {
Byte val = getTypedValue(colIdx, Byte.class);
return val != null ? val : 0;
}
/** {@inheritDoc} */
@Override public short getShort(int colIdx) throws SQLException {
Short val = getTypedValue(colIdx, Short.class);
return val != null ? val : 0;
}
/** {@inheritDoc} */
@Override public int getInt(int colIdx) throws SQLException {
Integer val = getTypedValue(colIdx, Integer.class);
return val != null ? val : 0;
}
/** {@inheritDoc} */
@Override public long getLong(int colIdx) throws SQLException {
Long val = getTypedValue(colIdx, Long.class);
return val != null ? val : 0;
}
/** {@inheritDoc} */
@Override public float getFloat(int colIdx) throws SQLException {
Float val = getTypedValue(colIdx, Float.class);
return val != null ? val : 0;
}
/** {@inheritDoc} */
@Override public double getDouble(int colIdx) throws SQLException {
Double val = getTypedValue(colIdx, Double.class);
return val != null ? val : 0;
}
/** {@inheritDoc} */
@Override public BigDecimal getBigDecimal(int colIdx, int scale) throws SQLException {
return getTypedValue(colIdx, BigDecimal.class);
}
/** {@inheritDoc} */
@Override public byte[] getBytes(int colIdx) throws SQLException {
return getTypedValue(colIdx, byte[].class);
}
/** {@inheritDoc} */
@Override public Date getDate(int colIdx) throws SQLException {
return getTypedValue(colIdx, Date.class);
}
/** {@inheritDoc} */
@Override public Time getTime(int colIdx) throws SQLException {
return getTypedValue(colIdx, Time.class);
}
/** {@inheritDoc} */
@Override public Timestamp getTimestamp(int colIdx) throws SQLException {
return getTypedValue(colIdx, Timestamp.class);
}
/** {@inheritDoc} */
@Override public InputStream getAsciiStream(int colIdx) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Streams are not supported.");
}
/** {@inheritDoc} */
@Override public InputStream getUnicodeStream(int colIdx) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Streams are not supported.");
}
/** {@inheritDoc} */
@Override public InputStream getBinaryStream(int colIdx) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Stream are not supported.");
}
/** {@inheritDoc} */
@Override public String getString(String colLb) throws SQLException {
return getTypedValue(colLb, String.class);
}
/** {@inheritDoc} */
@Override public boolean getBoolean(String colLb) throws SQLException {
Boolean val = getTypedValue(colLb, Boolean.class);
return val != null ? val : false;
}
/** {@inheritDoc} */
@Override public byte getByte(String colLb) throws SQLException {
Byte val = getTypedValue(colLb, Byte.class);
return val != null ? val : 0;
}
/** {@inheritDoc} */
@Override public short getShort(String colLb) throws SQLException {
Short val = getTypedValue(colLb, Short.class);
return val != null ? val : 0;
}
/** {@inheritDoc} */
@Override public int getInt(String colLb) throws SQLException {
Integer val = getTypedValue(colLb, Integer.class);
return val != null ? val : 0;
}
/** {@inheritDoc} */
@Override public long getLong(String colLb) throws SQLException {
Long val = getTypedValue(colLb, Long.class);
return val != null ? val : 0;
}
/** {@inheritDoc} */
@Override public float getFloat(String colLb) throws SQLException {
Float val = getTypedValue(colLb, Float.class);
return val != null ? val : 0;
}
/** {@inheritDoc} */
@Override public double getDouble(String colLb) throws SQLException {
Double val = getTypedValue(colLb, Double.class);
return val != null ? val : 0;
}
/** {@inheritDoc} */
@Override public BigDecimal getBigDecimal(String colLb, int scale) throws SQLException {
return getTypedValue(colLb, BigDecimal.class);
}
/** {@inheritDoc} */
@Override public byte[] getBytes(String colLb) throws SQLException {
return getTypedValue(colLb, byte[].class);
}
/** {@inheritDoc} */
@Override public Date getDate(String colLb) throws SQLException {
return getTypedValue(colLb, Date.class);
}
/** {@inheritDoc} */
@Override public Time getTime(String colLb) throws SQLException {
return getTypedValue(colLb, Time.class);
}
/** {@inheritDoc} */
@Override public Timestamp getTimestamp(String colLb) throws SQLException {
return getTypedValue(colLb, Timestamp.class);
}
/** {@inheritDoc} */
@Override public InputStream getAsciiStream(String colLb) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Streams are not supported.");
}
/** {@inheritDoc} */
@Override public InputStream getUnicodeStream(String colLb) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Streams are not supported.");
}
/** {@inheritDoc} */
@Override public InputStream getBinaryStream(String colLb) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Streams are not supported.");
}
/** {@inheritDoc} */
@Override public SQLWarning getWarnings() throws SQLException {
ensureNotClosed();
return null;
}
/** {@inheritDoc} */
@Override public void clearWarnings() throws SQLException {
ensureNotClosed();
}
/** {@inheritDoc} */
@Override public String getCursorName() throws SQLException {
ensureNotClosed();
return null;
}
/** {@inheritDoc} */
@Override public ResultSetMetaData getMetaData() throws SQLException {
ensureNotClosed();
return new JdbcResultSetMetadata(tbls, cols, types);
}
/** {@inheritDoc} */
@Override public Object getObject(int colIdx) throws SQLException {
return getTypedValue(colIdx, Object.class);
}
/** {@inheritDoc} */
@Override public Object getObject(String colLb) throws SQLException {
return getTypedValue(colLb, Object.class);
}
/** {@inheritDoc} */
@Override public int findColumn(String colLb) throws SQLException {
ensureNotClosed();
int idx = cols.indexOf(colLb.toUpperCase());
if (idx == -1)
throw new SQLException("Column not found: " + colLb);
return idx + 1;
}
/** {@inheritDoc} */
@Override public Reader getCharacterStream(int colIdx) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Streams are not supported.");
}
/** {@inheritDoc} */
@Override public Reader getCharacterStream(String colLb) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Streams are not supported.");
}
/** {@inheritDoc} */
@Override public BigDecimal getBigDecimal(int colIdx) throws SQLException {
return getTypedValue(colIdx, BigDecimal.class);
}
/** {@inheritDoc} */
@Override public BigDecimal getBigDecimal(String colLb) throws SQLException {
return getTypedValue(colLb, BigDecimal.class);
}
/** {@inheritDoc} */
@Override public boolean isBeforeFirst() throws SQLException {
ensureNotClosed();
return pos < 1;
}
/** {@inheritDoc} */
@Override public boolean isAfterLast() throws SQLException {
ensureNotClosed();
return finished && fields == null && curr == null;
}
/** {@inheritDoc} */
@Override public boolean isFirst() throws SQLException {
ensureNotClosed();
return pos == 1;
}
/** {@inheritDoc} */
@Override public boolean isLast() throws SQLException {
ensureNotClosed();
return finished && fields == null && curr != null;
}
/** {@inheritDoc} */
@Override public void beforeFirst() throws SQLException {
ensureNotClosed();
throw new SQLException("Result set is forward-only.");
}
/** {@inheritDoc} */
@Override public void afterLast() throws SQLException {
ensureNotClosed();
throw new SQLException("Result set is forward-only.");
}
/** {@inheritDoc} */
@Override public boolean first() throws SQLException {
ensureNotClosed();
throw new SQLException("Result set is forward-only.");
}
/** {@inheritDoc} */
@Override public boolean last() throws SQLException {
ensureNotClosed();
throw new SQLException("Result set is forward-only.");
}
/** {@inheritDoc} */
@Override public int getRow() throws SQLException {
ensureNotClosed();
return isAfterLast() ? 0 : pos;
}
/** {@inheritDoc} */
@Override public boolean absolute(int row) throws SQLException {
ensureNotClosed();
throw new SQLException("Result set is forward-only.");
}
/** {@inheritDoc} */
@Override public boolean relative(int rows) throws SQLException {
ensureNotClosed();
throw new SQLException("Result set is forward-only.");
}
/** {@inheritDoc} */
@Override public boolean previous() throws SQLException {
ensureNotClosed();
throw new SQLException("Result set is forward-only.");
}
/** {@inheritDoc} */
@Override public void setFetchDirection(int direction) throws SQLException {
ensureNotClosed();
if (direction != FETCH_FORWARD)
throw new SQLFeatureNotSupportedException("Only forward direction is supported");
}
/** {@inheritDoc} */
@Override public int getFetchDirection() throws SQLException {
ensureNotClosed();
return FETCH_FORWARD;
}
/** {@inheritDoc} */
@Override public void setFetchSize(int fetchSize) throws SQLException {
ensureNotClosed();
if (fetchSize <= 0)
throw new SQLException("Fetch size must be greater than zero.");
this.fetchSize = fetchSize;
}
/** {@inheritDoc} */
@Override public int getFetchSize() throws SQLException {
ensureNotClosed();
return fetchSize;
}
/** {@inheritDoc} */
@Override public int getType() throws SQLException {
ensureNotClosed();
return stmt.getResultSetType();
}
/** {@inheritDoc} */
@Override public int getConcurrency() throws SQLException {
ensureNotClosed();
return CONCUR_READ_ONLY;
}
/** {@inheritDoc} */
@Override public boolean rowUpdated() throws SQLException {
ensureNotClosed();
return false;
}
/** {@inheritDoc} */
@Override public boolean rowInserted() throws SQLException {
ensureNotClosed();
return false;
}
/** {@inheritDoc} */
@Override public boolean rowDeleted() throws SQLException {
ensureNotClosed();
return false;
}
/** {@inheritDoc} */
@Override public void updateNull(int colIdx) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateBoolean(int colIdx, boolean x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateByte(int colIdx, byte x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateShort(int colIdx, short x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateInt(int colIdx, int x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateLong(int colIdx, long x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateFloat(int colIdx, float x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateDouble(int colIdx, double x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateBigDecimal(int colIdx, BigDecimal x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateString(int colIdx, String x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateBytes(int colIdx, byte[] x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateDate(int colIdx, Date x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateTime(int colIdx, Time x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateTimestamp(int colIdx, Timestamp x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateAsciiStream(int colIdx, InputStream x, int len) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateBinaryStream(int colIdx, InputStream x, int len) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateCharacterStream(int colIdx, Reader x, int len) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateObject(int colIdx, Object x, int scaleOrLen) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateObject(int colIdx, Object x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateNull(String colLb) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateBoolean(String colLb, boolean x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateByte(String colLb, byte x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateShort(String colLb, short x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateInt(String colLb, int x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateLong(String colLb, long x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateFloat(String colLb, float x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateDouble(String colLb, double x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateBigDecimal(String colLb, BigDecimal x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateString(String colLb, String x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateBytes(String colLb, byte[] x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateDate(String colLb, Date x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateTime(String colLb, Time x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateTimestamp(String colLb, Timestamp x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateAsciiStream(String colLb, InputStream x, int len) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateBinaryStream(String colLb, InputStream x, int len) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateCharacterStream(String colLb, Reader reader, int len) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateObject(String colLb, Object x, int scaleOrLen) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateObject(String colLb, Object x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void insertRow() throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateRow() throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void deleteRow() throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void refreshRow() throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Row refreshing is not supported.");
}
/** {@inheritDoc} */
@Override public void cancelRowUpdates() throws SQLException {
ensureNotClosed();
}
/** {@inheritDoc} */
@Override public void moveToInsertRow() throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void moveToCurrentRow() throws SQLException {
ensureNotClosed();
}
/** {@inheritDoc} */
@Override public Statement getStatement() throws SQLException {
ensureNotClosed();
return stmt;
}
/** {@inheritDoc} */
@Override public Object getObject(int colIdx, Map<String, Class<?>> map) throws SQLException {
return getTypedValue(colIdx, Object.class);
}
/** {@inheritDoc} */
@Override public Ref getRef(int colIdx) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public Blob getBlob(int colIdx) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public Clob getClob(int colIdx) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public Array getArray(int colIdx) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public Object getObject(String colLb, Map<String, Class<?>> map) throws SQLException {
return getTypedValue(colLb, Object.class);
}
/** {@inheritDoc} */
@Override public Ref getRef(String colLb) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public Blob getBlob(String colLb) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public Clob getClob(String colLb) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public Array getArray(String colLb) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public Date getDate(int colIdx, Calendar cal) throws SQLException {
return getTypedValue(colIdx, Date.class);
}
/** {@inheritDoc} */
@Override public Date getDate(String colLb, Calendar cal) throws SQLException {
return getTypedValue(colLb, Date.class);
}
/** {@inheritDoc} */
@Override public Time getTime(int colIdx, Calendar cal) throws SQLException {
return getTypedValue(colIdx, Time.class);
}
/** {@inheritDoc} */
@Override public Time getTime(String colLb, Calendar cal) throws SQLException {
return getTypedValue(colLb, Time.class);
}
/** {@inheritDoc} */
@Override public Timestamp getTimestamp(int colIdx, Calendar cal) throws SQLException {
return getTypedValue(colIdx, Timestamp.class);
}
/** {@inheritDoc} */
@Override public Timestamp getTimestamp(String colLb, Calendar cal) throws SQLException {
return getTypedValue(colLb, Timestamp.class);
}
/** {@inheritDoc} */
@Override public URL getURL(int colIdx) throws SQLException {
return getTypedValue(colIdx, URL.class);
}
/** {@inheritDoc} */
@Override public URL getURL(String colLb) throws SQLException {
return getTypedValue(colLb, URL.class);
}
/** {@inheritDoc} */
@Override public void updateRef(int colIdx, Ref x) throws SQLException {
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateRef(String colLb, Ref x) throws SQLException {
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateBlob(int colIdx, Blob x) throws SQLException {
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateBlob(String colLb, Blob x) throws SQLException {
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateClob(int colIdx, Clob x) throws SQLException {
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateClob(String colLb, Clob x) throws SQLException {
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateArray(int colIdx, Array x) throws SQLException {
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateArray(String colLb, Array x) throws SQLException {
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public RowId getRowId(int colIdx) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public RowId getRowId(String colLb) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public void updateRowId(int colIdx, RowId x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateRowId(String colLb, RowId x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public int getHoldability() throws SQLException {
ensureNotClosed();
return HOLD_CURSORS_OVER_COMMIT;
}
/** {@inheritDoc} */
@Override public boolean isClosed() throws SQLException {
return closed;
}
/** {@inheritDoc} */
@Override public void updateNString(int colIdx, String nStr) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateNString(String colLb, String nStr) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateNClob(int colIdx, NClob nClob) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateNClob(String colLb, NClob nClob) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public NClob getNClob(int colIdx) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public NClob getNClob(String colLb) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public SQLXML getSQLXML(int colIdx) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public SQLXML getSQLXML(String colLb) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public void updateSQLXML(int colIdx, SQLXML xmlObj) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateSQLXML(String colLb, SQLXML xmlObj) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public String getNString(int colIdx) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public String getNString(String colLb) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public Reader getNCharacterStream(int colIdx) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public Reader getNCharacterStream(String colLb) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("SQL-specific types are not supported.");
}
/** {@inheritDoc} */
@Override public void updateNCharacterStream(int colIdx, Reader x, long len) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateNCharacterStream(String colLb, Reader reader, long len) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateAsciiStream(int colIdx, InputStream x, long len) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateBinaryStream(int colIdx, InputStream x, long len) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateCharacterStream(int colIdx, Reader x, long len) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateAsciiStream(String colLb, InputStream x, long len) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateBinaryStream(String colLb, InputStream x, long len) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateCharacterStream(String colLb, Reader reader, long len) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateBlob(int colIdx, InputStream inputStream, long len) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateBlob(String colLb, InputStream inputStream, long len) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateClob(int colIdx, Reader reader, long len) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateClob(String colLb, Reader reader, long len) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateNClob(int colIdx, Reader reader, long len) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateNClob(String colLb, Reader reader, long len) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateNCharacterStream(int colIdx, Reader x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateNCharacterStream(String colLb, Reader reader) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateAsciiStream(int colIdx, InputStream x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateBinaryStream(int colIdx, InputStream x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateCharacterStream(int colIdx, Reader x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateAsciiStream(String colLb, InputStream x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateBinaryStream(String colLb, InputStream x) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateCharacterStream(String colLb, Reader reader) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateBlob(int colIdx, InputStream inputStream) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateBlob(String colLb, InputStream inputStream) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateClob(int colIdx, Reader reader) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateClob(String colLb, Reader reader) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateNClob(int colIdx, Reader reader) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public void updateNClob(String colLb, Reader reader) throws SQLException {
ensureNotClosed();
throw new SQLFeatureNotSupportedException("Updates are not supported.");
}
/** {@inheritDoc} */
@Override public <T> T unwrap(Class<T> iface) throws SQLException {
if (!isWrapperFor(iface))
throw new SQLException("Result set is not a wrapper for " + iface.getName());
return (T)this;
}
/** {@inheritDoc} */
@Override public boolean isWrapperFor(Class<?> iface) throws SQLException {
return iface != null && iface == ResultSet.class;
}
/** {@inheritDoc} */
@Override public <T> T getObject(int colIdx, Class<T> type) throws SQLException {
return getTypedValue(colIdx, type);
}
/** {@inheritDoc} */
@Override public <T> T getObject(String colLb, Class<T> type) throws SQLException {
return getTypedValue(colLb, type);
}
/**
* Gets casted field value by label.
*
* @param colLb Column label.
* @param cls Value class.
* @return Casted field value.
* @throws SQLException In case of error.
*/
private <T> T getTypedValue(String colLb, Class<T> cls) throws SQLException {
ensureNotClosed();
ensureHasCurrentRow();
int colIdx = cols.indexOf(colLb.toUpperCase()) + 1;
if (colIdx <= 0)
throw new SQLException("Invalid column label: " + colLb);
return getTypedValue(colIdx, cls);
}
/**
* Gets casted field value by index.
*
* @param colIdx Column index.
* @param cls Value class.
* @return Casted field value.
* @throws SQLException In case of error.
*/
private <T> T getTypedValue(int colIdx, Class<T> cls) throws SQLException {
ensureNotClosed();
ensureHasCurrentRow();
try {
T val = cls == String.class ? (T)String.valueOf(curr.get(colIdx - 1)) : (T)curr.get(colIdx - 1);
wasNull = val == null;
return val;
}
catch (IndexOutOfBoundsException ignored) {
throw new SQLException("Invalid column index: " + colIdx);
}
catch (ClassCastException ignored) {
throw new SQLException("Value is an not instance of " + cls.getName());
}
}
/**
* Ensures that result set is not closed.
*
* @throws SQLException If result set is closed.
*/
private void ensureNotClosed() throws SQLException {
if (closed)
throw new SQLException("Result set is closed.");
}
/**
* Ensures that result set is positioned on a row.
*
* @throws SQLException If result set is not positioned on a row.
*/
private void ensureHasCurrentRow() throws SQLException {
if (curr == null)
throw new SQLException("Result set is not positioned on a row.");
}
}
| {'content_hash': 'bb783667e17bcf01c69bb586094c73ad', 'timestamp': '', 'source': 'github', 'line_count': 1504, 'max_line_length': 109, 'avg_line_length': 30.33377659574468, 'alnum_prop': 0.6427819911446232, 'repo_name': 'akuznetsov-gridgain/ignite', 'id': 'ba09e1d4890698f2d3d4309819fda26dff515bcc', 'size': '46424', 'binary': False, 'copies': '11', 'ref': 'refs/heads/master', 'path': 'modules/core/src/main/java/org/apache/ignite/internal/jdbc/JdbcResultSet.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '31291'}, {'name': 'C++', 'bytes': '28098'}, {'name': 'CSS', 'bytes': '17517'}, {'name': 'Groovy', 'bytes': '15102'}, {'name': 'HTML', 'bytes': '4669'}, {'name': 'Java', 'bytes': '18401553'}, {'name': 'JavaScript', 'bytes': '1085'}, {'name': 'PHP', 'bytes': '11079'}, {'name': 'Scala', 'bytes': '653258'}, {'name': 'Shell', 'bytes': '392571'}]} |
<?php
/**
* Created by PhpStorm.
* User: exodus4d
* Date: 28.04.15
* Time: 21:27
*/
namespace Exodus4D\Pathfinder\Controller;
use Exodus4D\Pathfinder\Controller\Ccp as Ccp;
use Exodus4D\Pathfinder\Lib\Config;
use Exodus4D\Pathfinder\Lib\Resource;
class AppController extends Controller {
/**
* @param \Base $f3
* @param $params
* @return bool
*/
public function beforeroute(\Base $f3, $params) : bool {
// page title
$f3->set('tplPageTitle', Config::getPathfinderData('name'));
// main page content
$f3->set('tplPageContent', Config::getPathfinderData('view.login'));
// body element class
$f3->set('tplBodyClass', 'pf-landing');
// JS main file
$f3->set('tplJsView', 'login');
if($return = parent::beforeroute($f3, $params)){
// href for SSO Auth
$f3->set('tplAuthType', $f3->get('BASE') . $f3->alias( 'sso', ['action' => 'requestAuthorization'] ));
// characters from cookies
$f3->set('cookieCharacters', $this->getCookieByName(self::COOKIE_PREFIX_CHARACTER, true));
$f3->set('getCharacterGrid', function($characters){
return ( ((12 / count($characters)) <= 3) ? 3 : (12 / count($characters)) );
});
}
return $return;
}
/**
* event handler after routing
* @param \Base $f3
*/
public function afterroute(\Base $f3){
parent::afterroute($f3);
// clear all SSO related temp data
if($f3->exists(Ccp\Sso::SESSION_KEY_SSO)){
$f3->clear(Ccp\Sso::SESSION_KEY_SSO);
}
}
/**
* show main login (index) page
* @param \Base $f3
*/
public function init(\Base $f3){
$resource = Resource::instance();
$resource->register('script', 'app/login');
$resource->register('script', 'app/mappage', 'prefetch');
$resource->register('image', 'sso/signature.png');
$resource->register('image', 'sso/gameplay.png');
}
} | {'content_hash': 'ea7303e4c34c965b81e4fa76c9517c8e', 'timestamp': '', 'source': 'github', 'line_count': 75, 'max_line_length': 114, 'avg_line_length': 27.466666666666665, 'alnum_prop': 0.5645631067961165, 'repo_name': 'exodus4d/pathfinder', 'id': 'b9880f4cda252eeb721e4f4e86bcf1bb13c73053', 'size': '2060', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'app/Controller/AppController.php', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '378365'}, {'name': 'HTML', 'bytes': '453443'}, {'name': 'JavaScript', 'bytes': '2609425'}, {'name': 'PHP', 'bytes': '997459'}]} |
<div class="viewport">
<div class="title-bar bg">
</div>
<div class="horizontal-layout">
<div class="project-container"></div>
<div class="code-container bg" outlet="@codeContainerEl">
<div class="expand-collapse-button" onclick="@expandCollapseCodeContainer"></div>
</div>
<div class="view-container"></div>
</div>
</div> | {'content_hash': '2e6a6b35e0ca18b8466aad1cd9705f61', 'timestamp': '', 'source': 'github', 'line_count': 12, 'max_line_length': 84, 'avg_line_length': 29.25, 'alnum_prop': 0.6552706552706553, 'repo_name': 'eladyarkoni/stackjs', 'id': '4eb6c0a2b73542335b761f575b7f46ddf92758d3', 'size': '351', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'app/templates/application_view.html', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '34641'}, {'name': 'HTML', 'bytes': '32538'}, {'name': 'JavaScript', 'bytes': '713295'}]} |
var selftest = require('../tool-testing/selftest.js');
var Sandbox = selftest.Sandbox;
var files = require('../fs/files.js');
var utils = require('../utils/utils.js');
var archinfo = require('../utils/archinfo.js');
var _ = require('underscore');
selftest.define("wipe all packages", function () {
var s = new Sandbox({
warehouse: {
v1: { tool: "[email protected]", recommended: true },
v2: { tool: "[email protected]", recommended: true },
v3: { tool: "[email protected]", recommended: true }
}
});
var meteorToolVersion = function (v) {
return {
_id: 'VID' + v.replace(/\./g, ''),
packageName: 'meteor-tool',
testName: null,
version: v,
publishedBy: null,
description: 'The Meteor command-line tool',
git: undefined,
dependencies: { meteor: { constraint: null, references: [{ arch: 'os' }, { arch: 'web.browser' }, { arch: 'web.cordova' }] } },
source: null,
lastUpdated: null,
published: null,
isTest: false,
debugOnly: false,
prodOnly: false,
containsPlugins: false
};
};
var meteorToolBuild = function (v) {
return {
buildArchitectures: archinfo.host(),
versionId: 'VID' + v.replace(/\./g, ''),
_id: utils.randomToken()
};
};
// insert the new tool versions into the catalog
s.warehouseOfficialCatalog.insertData({
syncToken: {},
formatVersion: "1.0",
collections: {
packages: [],
versions: [meteorToolVersion('33.0.1'), meteorToolVersion('33.0.2'), meteorToolVersion('33.0.3')],
builds: [meteorToolBuild('33.0.1'), meteorToolBuild('33.0.2'), meteorToolBuild('33.0.3')],
releaseTracks: [],
releaseVersions: []
}
});
// help warehouse faking by copying the meteor-tool 3 times and introducing 3
// fake versions (identical in code to the one we are running)
var latestMeteorToolVersion =
files.readLinkToMeteorScript(files.pathJoin(s.warehouse, 'meteor')).split('/');
latestMeteorToolVersion = latestMeteorToolVersion[latestMeteorToolVersion.length - 3];
var prefix = files.pathJoin(s.warehouse, 'packages', 'meteor-tool');
var copyTool = function (srcVersion, dstVersion) {
if (process.platform === 'win32') {
// just copy the files
files.cp_r(
files.pathJoin(prefix, srcVersion),
files.pathJoin(prefix, dstVersion), {
preserveSymlinks: true
});
} else {
// figure out what the symlink links to and copy the folder *and* the
// symlink
var srcFullVersion = files.readlink(files.pathJoin(prefix, srcVersion));
var dstFullVersion = srcFullVersion.replace(srcVersion, dstVersion);
// copy the hidden folder
files.cp_r(
files.pathJoin(prefix, srcFullVersion),
files.pathJoin(prefix, dstFullVersion), {
preserveSymlinks: true
});
// link to it
files.symlink(
dstFullVersion,
files.pathJoin(prefix, dstVersion));
}
var replaceVersionInFile = function (filename) {
var filePath = files.pathJoin(prefix, dstVersion, filename);
files.writeFile(
filePath,
files.readFile(filePath, 'utf8')
.replace(new RegExp(srcVersion, 'g'), dstVersion));
};
// "fix" the isopack.json and unibuild.json files (they contain the versions)
replaceVersionInFile('isopack.json');
replaceVersionInFile('unipackage.json');
};
copyTool(latestMeteorToolVersion, '33.0.3');
copyTool(latestMeteorToolVersion, '33.0.2');
copyTool(latestMeteorToolVersion, '33.0.1');
// since the warehouse faking system is weak and under-developed, add more
// faking, such as making the v3 the latest version
files.linkToMeteorScript(
files.pathJoin('packages', 'meteor-tool', '33.0.3', 'mt-' + archinfo.host(), 'meteor'),
files.pathJoin(s.warehouse, 'meteor'));
var run;
run = s.run('--release', 'v1', 'admin', 'wipe-all-packages');
run.waitSecs(15);
run.expectExit(0);
// OK, wiped all packages, now let's go and check that everything is removed
// except for the tool we are running right now and the latest tool. i.e. v1
// and v3
var notHidden = function (f) { return f[0] !== '.'; };
var meteorToolDirs = _.filter(files.readdir(prefix), notHidden);
selftest.expectTrue(meteorToolDirs.length === 2);
_.each(meteorToolDirs, function (f) {
var fPath = files.pathJoin(prefix, f);
if (process.platform === 'win32') {
// this is a dir
selftest.expectTrue(files.lstat(fPath).isDirectory());
} else {
// this is a symlink to a dir and this dir exists
selftest.expectTrue(files.lstat(fPath).isSymbolicLink());
selftest.expectTrue(files.exists(files.pathJoin(prefix, files.readlink(fPath))));
}
// check that the version is either the running one, or the latest one
selftest.expectTrue(_.contains(['33.0.1', '33.0.3'], f));
});
// Check that all other packages are wiped
_.each(files.readdir(files.pathJoin(s.warehouse, 'packages')), function (p) {
if (p[0] === '.') {
return;
}
if (p === 'meteor-tool') {
return;
}
var contents = files.readdir(files.pathJoin(s.warehouse, 'packages', p));
contents = _.filter(contents, notHidden);
selftest.expectTrue(contents.length === 0);
});
});
| {'content_hash': '164fc59cf4a92878121ca30b53db58c7', 'timestamp': '', 'source': 'github', 'line_count': 153, 'max_line_length': 133, 'avg_line_length': 34.908496732026144, 'alnum_prop': 0.63639767833739, 'repo_name': 'benstoltz/meteor', 'id': 'ae7993ff195c75f343a1fdc562113348f76dbedd', 'size': '5341', 'binary': False, 'copies': '7', 'ref': 'refs/heads/devel', 'path': 'tools/tests/wipe-all-packages.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Batchfile', 'bytes': '6618'}, {'name': 'C', 'bytes': '235849'}, {'name': 'C#', 'bytes': '21958'}, {'name': 'C++', 'bytes': '203156'}, {'name': 'CSS', 'bytes': '8703'}, {'name': 'CoffeeScript', 'bytes': '33946'}, {'name': 'HTML', 'bytes': '65111'}, {'name': 'JavaScript', 'bytes': '5409417'}, {'name': 'PowerShell', 'bytes': '7720'}, {'name': 'Ruby', 'bytes': '2477'}, {'name': 'Shell', 'bytes': '36863'}]} |
package org.codehaus.griffon.runtime.scaffolding;
import griffon.plugins.scaffolding.CommandObject;
import org.codehaus.griffon.runtime.validation.AbstractValidateable;
/**
* @author Andres Almiray
*/
public abstract class AbstractCommandObject extends AbstractValidateable implements CommandObject {
}
| {'content_hash': '9771f3bbe72dc1917d37d45be01ccaa0', 'timestamp': '', 'source': 'github', 'line_count': 12, 'max_line_length': 99, 'avg_line_length': 25.75, 'alnum_prop': 0.8317152103559871, 'repo_name': 'griffon/griffon-scaffolding-plugin', 'id': '1e1269953f58d7f2f2cce35b3b1163a19ebef926', 'size': '929', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/main/org/codehaus/griffon/runtime/scaffolding/AbstractCommandObject.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Groovy', 'bytes': '4746'}, {'name': 'Java', 'bytes': '124929'}]} |
from pandac.PandaModules import *
from direct.interval.IntervalGlobal import *
from direct.particles import ParticleEffect, Particles, ForceGroup
from PooledEffect import PooledEffect
from EffectController import EffectController
class SparksTrail(PooledEffect, EffectController):
def __init__(self):
PooledEffect.__init__(self)
EffectController.__init__(self)
model = loader.loadModel('phase_4/models/props/tt_m_efx_ext_particleCards')
self.card = model.find('**/tt_t_efx_ext_particleStars')
self.cardScale = 64.0
self.effectColor = Vec4(1, 1, 1, 1)
self.effectScale = 1.0
self.lifespan = 1.0
if not SparksTrail.particleDummy:
SparksTrail.particleDummy = render.attachNewNode(ModelNode('SparksTrailParticleDummy'))
SparksTrail.particleDummy.setDepthWrite(0)
SparksTrail.particleDummy.setLightOff()
SparksTrail.particleDummy.setFogOff()
self.f = ParticleEffect.ParticleEffect('SparksTrail')
self.f.reparentTo(self)
self.p0 = Particles.Particles('particles-1')
self.p0.setFactory('ZSpinParticleFactory')
self.p0.setRenderer('SpriteParticleRenderer')
self.p0.setEmitter('PointEmitter')
self.f.addParticles(self.p0)
self.p0.setPoolSize(64)
self.p0.setBirthRate(0.02)
self.p0.setLitterSize(1)
self.p0.setLitterSpread(0)
self.p0.setSystemLifespan(0.0)
self.p0.setLocalVelocityFlag(0)
self.p0.setSystemGrowsOlderFlag(0)
self.p0.factory.setLifespanBase(0.5)
self.p0.factory.setLifespanSpread(0.1)
self.p0.factory.setMassBase(1.0)
self.p0.factory.setMassSpread(0.0)
self.p0.factory.setTerminalVelocityBase(400.0)
self.p0.factory.setTerminalVelocitySpread(0.0)
self.p0.factory.setInitialAngle(0.0)
self.p0.factory.setInitialAngleSpread(90.0)
self.p0.factory.enableAngularVelocity(1)
self.p0.factory.setAngularVelocity(0.0)
self.p0.factory.setAngularVelocitySpread(25.0)
self.p0.renderer.setAlphaMode(BaseParticleRenderer.PRALPHAOUT)
self.p0.renderer.setUserAlpha(1.0)
self.p0.renderer.setFromNode(self.card)
self.p0.renderer.setColor(Vec4(1.0, 1.0, 1.0, 1.0))
self.p0.renderer.setXScaleFlag(1)
self.p0.renderer.setYScaleFlag(1)
self.p0.renderer.setAnimAngleFlag(1)
self.p0.renderer.setNonanimatedTheta(0.0)
self.p0.renderer.setAlphaBlendMethod(BaseParticleRenderer.PPBLENDLINEAR)
self.p0.renderer.setAlphaDisable(0)
self.p0.renderer.setColorBlendMode(ColorBlendAttrib.MAdd, ColorBlendAttrib.OIncomingAlpha, ColorBlendAttrib.OOne)
self.p0.emitter.setEmissionType(BaseParticleEmitter.ETRADIATE)
self.p0.emitter.setAmplitudeSpread(0.0)
self.p0.emitter.setOffsetForce(Vec3(0.0, 0.0, -2.0))
self.p0.emitter.setExplicitLaunchVector(Vec3(1.0, 0.0, 0.0))
self.p0.emitter.setRadiateOrigin(Point3(0.0, 0.0, 0.0))
self.setEffectScale(self.effectScale)
def createTrack(self):
self.startEffect = Sequence(Func(self.p0.setBirthRate, 0.01), Func(self.p0.clearToInitial), Func(self.f.start, self, self.particleDummy))
self.endEffect = Sequence(Func(self.p0.setBirthRate, 100.0), Wait(1.0), Func(self.cleanUpEffect))
self.track = Sequence(self.startEffect, Wait(1.0), self.endEffect)
def setEffectColor(self, color):
self.effectColor = color
self.p0.renderer.setColor(self.effectColor)
def setEffectScale(self, scale):
self.effectScale = scale
self.p0.renderer.setInitialXScale(0.1 * self.cardScale * scale)
self.p0.renderer.setFinalXScale(0.2 * self.cardScale * scale)
self.p0.renderer.setInitialYScale(0.1 * self.cardScale * scale)
self.p0.renderer.setFinalYScale(0.2 * self.cardScale * scale)
self.p0.emitter.setAmplitude(20.0 * scale)
def cleanUpEffect(self):
EffectController.cleanUpEffect(self)
if self.pool and self.pool.isUsed(self):
self.pool.checkin(self)
def destroy(self):
EffectController.destroy(self)
PooledEffect.destroy(self)
| {'content_hash': 'fb5f2d6e044de03be89940303d5ce329', 'timestamp': '', 'source': 'github', 'line_count': 90, 'max_line_length': 145, 'avg_line_length': 47.06666666666667, 'alnum_prop': 0.690982058545798, 'repo_name': 'ToonTownInfiniteRepo/ToontownInfinite', 'id': 'f1bf0e88b05b3a386e0bff615bc281513ce8c1a5', 'size': '4236', 'binary': False, 'copies': '6', 'ref': 'refs/heads/master', 'path': 'toontown/effects/SparksTrail.py', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C', 'bytes': '1703277'}, {'name': 'C#', 'bytes': '9892'}, {'name': 'C++', 'bytes': '5468044'}, {'name': 'Emacs Lisp', 'bytes': '210083'}, {'name': 'F#', 'bytes': '4611'}, {'name': 'JavaScript', 'bytes': '7003'}, {'name': 'Objective-C', 'bytes': '23212'}, {'name': 'Puppet', 'bytes': '5245'}, {'name': 'Python', 'bytes': '34010215'}, {'name': 'Shell', 'bytes': '11192'}, {'name': 'Tcl', 'bytes': '1981257'}]} |
THREAD_ANNOTATION_ATTRIBUTE__(acquired_after(__VA_ARGS__))
#define ACQUIRED_BEFORE(...) \
THREAD_ANNOTATION_ATTRIBUTE__(acquired_before(__VA_ARGS__))
// Document a function that expects a mutex to be held prior to entry.
// The mutex is expected to be held both on entry to and exit from the
// function.
#define EXCLUSIVE_LOCKS_REQUIRED(...) \
THREAD_ANNOTATION_ATTRIBUTE__(exclusive_locks_required(__VA_ARGS__))
#define SHARED_LOCKS_REQUIRED(...) \
THREAD_ANNOTATION_ATTRIBUTE__(shared_locks_required(__VA_ARGS__))
// Document the locks acquired in the body of the function. These locks
// non-reentrant).
#define LOCKS_EXCLUDED(...) \
THREAD_ANNOTATION_ATTRIBUTE__(locks_excluded(__VA_ARGS__))
// Document a function that returns a mutex without acquiring it. For example,
// a public getter method that returns a pointer to a private mutex should
// be annotated with LOCK_RETURNED.
#define LOCK_RETURNED(x) \
THREAD_ANNOTATION_ATTRIBUTE__(lock_returned(x))
// Document if a class/type is a lockable type (such as the Mutex class).
#define LOCKABLE \
THREAD_ANNOTATION_ATTRIBUTE__(lockable)
// Document if a class does RAII locking (such as the MutexLock class).
// The constructor should use LOCK_FUNCTION to specify the mutex that is
// acquired, and the destructor should use UNLOCK_FUNCTION with no arguments;
// the analysis will assume that the destructor unlocks whatever the
// constructor locked.
#define SCOPED_LOCKABLE \
THREAD_ANNOTATION_ATTRIBUTE__(scoped_lockable)
// Document functions that acquire a lock in the body of a function, and do
// not release it.
#define EXCLUSIVE_LOCK_FUNCTION(...) \
THREAD_ANNOTATION_ATTRIBUTE__(exclusive_lock_function(__VA_ARGS__))
#define SHARED_LOCK_FUNCTION(...) \
THREAD_ANNOTATION_ATTRIBUTE__(shared_lock_function(__VA_ARGS__))
// Document functions that expect a lock to be held on entry to the function,
// and release it in the body of the function.
#define UNLOCK_FUNCTION(...) \
THREAD_ANNOTATION_ATTRIBUTE__(unlock_function(__VA_ARGS__))
// Document functions that try to acquire a lock, and return success or failure.
// The first argument should be true, for functions that return true on success,
// or false, for functions that return false on success.
#define EXCLUSIVE_TRYLOCK_FUNCTION(...) \
THREAD_ANNOTATION_ATTRIBUTE__(exclusive_trylock_function(__VA_ARGS__))
#define SHARED_TRYLOCK_FUNCTION(...) \
THREAD_ANNOTATION_ATTRIBUTE__(shared_trylock_function(__VA_ARGS__))
// Document functions that dynamically check to see if a lock is held, and fail
// if it is not held.
#define ASSERT_EXCLUSIVE_LOCK(...) \
THREAD_ANNOTATION_ATTRIBUTE__(assert_exclusive_lock(__VA_ARGS__))
#define ASSERT_SHARED_LOCK(...) \
THREAD_ANNOTATION_ATTRIBUTE__(assert_shared_lock(__VA_ARGS__))
// Turns off thread safety checking within the body of a particular function.
// This is used as an escape hatch for cases where either (a) the function
// is correct, but the locking is more complicated than the analyzer can handle,
// or (b) the function contains race conditions that are known to be benign.
#define NO_THREAD_SAFETY_ANALYSIS \
THREAD_ANNOTATION_ATTRIBUTE__(no_thread_safety_analysis)
// Deprecated.
// NO_THREAD_SAFETY_ANALYSIS_OPT is a workaround for bugs gcc annotalysis.
#define NO_THREAD_SAFETY_ANALYSIS_OPT
// TS_UNCHECKED should be placed around lock expressions that are not valid
// C++ syntax, but which are present for documentation purposes. These
// annotations will be ignored by the analysis.
#define TS_UNCHECKED(x) ""
// Deprecated.
// This is used to pass different annotations to gcc and clang, in cases where
// gcc would reject a lock expression (e.g. &MyClass::mu_) that is accepted
// by clang. This is seldom needed, since GCC usually ignores invalid lock
// expressions except in certain cases, such as LOCK_RETURNED.
// TODO(user): remove all uses of this macro from google.
#define TS_CLANG_ONLY(CLANG_EXPR, GCC_EXPR) CLANG_EXPR
// TS_FIXME is used to mark lock expressions that are not valid C++ syntax.
// It is used by automated tools to mark and disable invalid expressions.
// The annotation should either be fixed, or changed to TS_UNCHECKED.
#define TS_FIXME(x) ""
// Like NO_THREAD_SAFETY_ANALYSIS, this turns off checking within the body of
// a particular function. However, this attribute is used to mark functions
// that are incorrect and need to be fixed. It is used by automated tools to
// avoid breaking the build when the analysis is updated.
// Code owners are expected to eventually fix the routine.
#define NO_THREAD_SAFETY_ANALYSIS_FIXME NO_THREAD_SAFETY_ANALYSIS
// Similar to NO_THREAD_SAFETY_ANALYSIS_FIXME, this macro marks a GUARDED_BY
// annotation that needs to be fixed, because it is producing thread safety
// warning. It disables the GUARDED_BY.
#define GUARDED_BY_FIXME(x)
// Disables warnings for a single read operation. This can be used to do racy
// reads of guarded data members, in cases where the race is benign.
#define TS_UNCHECKED_READ(x) thread_safety_analysis::ts_unchecked_read(x)
namespace thread_safety_analysis {
// Takes a reference to a guarded data member, and returns an unguarded
// reference.
template <class T>
inline const T& ts_unchecked_read(const T& v) NO_THREAD_SAFETY_ANALYSIS {
return v;
}
template <class T>
inline T& ts_unchecked_read(T& v) NO_THREAD_SAFETY_ANALYSIS {
return v;
}
} // namespace thread_safety_analysis
#endif // BASE_THREAD_ANNOTATIONS_H_
| {'content_hash': '13268aa23bce66cf218e0eda309ba5cd', 'timestamp': '', 'source': 'github', 'line_count': 130, 'max_line_length': 80, 'avg_line_length': 42.2, 'alnum_prop': 0.7468100619759388, 'repo_name': 'BradJensen/supersonic', 'id': 'b1bd2e16e0a321e5d081f133b666cdf9cc4cab73', 'size': '9233', 'binary': False, 'copies': '20', 'ref': 'refs/heads/master', 'path': 'supersonic/utils/thread_annotations.h', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'C', 'bytes': '256274'}, {'name': 'C++', 'bytes': '4141906'}, {'name': 'Makefile', 'bytes': '2914'}, {'name': 'Protocol Buffer', 'bytes': '23907'}, {'name': 'Shell', 'bytes': '51264'}]} |
import sys
import os
from datetime import datetime
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
# 'sphinxcontrib.spelling',
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
# 'sphinxarg.ext',
]
# spelling options
spelling_lang = 'en_US'
spelling_show_suggestions = True
spelling_word_list_filename = 'spelling_wordlist.txt'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'ArgParseInator'
copyright = u'2014-{} ellethee'.format(datetime.today().year)
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
execfile("../../argparseinator/__init__.py")
version = __version__
# The full version, including alpha/beta/rc tags.
release = __version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# html_theme = 'default'
try:
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
except:
pass
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'ArgParseInatordoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
'preamble': "".join((
'\DeclareUnicodeCharacter{00A0}{ }', # NO-BREAK SPACE
'\DeclareUnicodeCharacter{251C}{+}', # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
'\DeclareUnicodeCharacter{2514}{+}', # BOX DRAWINGS LIGHT UP AND RIGHT
)),
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'ArgParseInator.tex', u'ArgParseInator Documentation',
u'ellethee <[email protected]>', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'argparseinator', u'ArgParseInator Documentation',
[u'ellethee <[email protected]>'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'ArgParseInator', u'ArgParseInator Documentation',
u'ellethee <[email protected]>', 'ArgParseInator', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/2': None}
| {'content_hash': '2c5597185c762d78ef53bc35090e4762', 'timestamp': '', 'source': 'github', 'line_count': 271, 'max_line_length': 89, 'avg_line_length': 32.20664206642066, 'alnum_prop': 0.706576535288726, 'repo_name': 'ellethee/argparseinator', 'id': '01db5c54b1bd7619249e82e65032649c87453339', 'size': '9155', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'docs/source/conf.py', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Python', 'bytes': '52165'}]} |
/* eslint max-nested-callbacks:[1] */
'use strict';
var gStreamify = require('../');
var Stream = require('stream');
var streamtest = require('streamtest');
var gutil = require('gulp-util');
var assert = require('assert');
describe('gulp-streamify', function() {
// Simple plugin appending test to contents for test purposes
function pluginFunction() {
var pluginStream = new Stream.Transform({ objectMode: true });
pluginStream._transform = function(file, unused, cb) {
assert(file.contents instanceof Buffer);
file.contents = Buffer.concat([file.contents, new Buffer('test')]);
pluginStream.push(file);
cb();
};
return pluginStream;
}
streamtest.versions.forEach(function(version) {
describe('for ' + version + ' streams', function() {
it('should pass null files through', function(done) {
gStreamify(streamtest[version].fromObjects([
new gutil.File({
cwd: '/home/nfroidure/',
base: '/home/nfroidure/test',
path: '/home/nfroidure/test/file.js',
contents: null,
}),
new gutil.File({
cwd: '/home/nfroidure/',
base: '/home/nfroidure/test',
path: '/home/nfroidure/test/file2.js',
contents: null,
}),
])).pipe(streamtest[version].toObjects(function(err, objs) {
if(err) {
return done(err);
}
assert.equal(objs.length, 2);
done();
}));
});
it('should reemit errors', function(done) {
var passStream = new Stream.PassThrough({ objectMode: true });
var stream = gStreamify(passStream);
var inputError = new Error('ich bin ein error');
stream.on('error', function(error) {
assert.equal(error, inputError);
done();
});
passStream.emit('error', inputError);
});
describe('in stream mode', function() {
it('should work with sync streams and sync contents', function(done) {
var pluginStream = pluginFunction();
var inputStream = new Stream.PassThrough({ objectMode: true });
var fakeFile = new gutil.File({
cwd: '/home/nfroidure/',
base: '/home/nfroidure/test',
path: '/home/nfroidure/test/file.js',
contents: new Stream.PassThrough(),
});
var fakeFile2 = new gutil.File({
cwd: '/home/nfroidure/',
base: '/home/nfroidure/test',
path: '/home/nfroidure/test/file2.js',
contents: new Stream.PassThrough(),
});
inputStream
.pipe(gStreamify(pluginStream))
.pipe(streamtest[version].toObjects(function(err, files) {
if(err) {
return done(err);
}
assert.equal(files.length, 2);
assert.equal(files[0].cwd, '/home/nfroidure/');
assert.equal(files[0].base, '/home/nfroidure/test');
assert.equal(files[0].path, '/home/nfroidure/test/file.js');
assert.equal(files[1].cwd, '/home/nfroidure/');
assert.equal(files[1].base, '/home/nfroidure/test');
assert.equal(files[1].path, '/home/nfroidure/test/file2.js');
files[0].pipe(streamtest[version].toText(function(err2, text) {
if(err2) {
return done(err2);
}
assert.equal(text, 'plipplaptest');
files[1].pipe(streamtest[version].toText(function(err3, text2) {
if(err3) {
return done(err3);
}
assert.equal(text2, 'ploppluptest');
done();
}));
}));
}));
inputStream.write(fakeFile);
inputStream.write(fakeFile2);
inputStream.end();
fakeFile.contents.write('plip');
fakeFile.contents.write('plap');
fakeFile.contents.end();
fakeFile2.contents.write('plop');
fakeFile2.contents.write('plup');
fakeFile2.contents.end();
});
it('should work with sync streams and async contents', function(done) {
var pluginStream = pluginFunction();
var inputStream = new Stream.PassThrough({ objectMode: true });
var fakeFile = new gutil.File({
cwd: '/home/nfroidure/',
base: '/home/nfroidure/test',
path: '/home/nfroidure/test/file.js',
contents: streamtest.v2.fromChunks(['plip', 'plap']),
});
var fakeFile2 = new gutil.File({
cwd: '/home/nfroidure/',
base: '/home/nfroidure/test',
path: '/home/nfroidure/test/file2.js',
contents: streamtest.v2.fromChunks(['plop', 'plup']),
});
inputStream
.pipe(gStreamify(pluginStream))
.pipe(streamtest[version].toObjects(function(err, files) {
if(err) {
return done(err);
}
assert.equal(files.length, 2);
assert.equal(files[0].cwd, '/home/nfroidure/');
assert.equal(files[0].base, '/home/nfroidure/test');
assert.equal(files[0].path, '/home/nfroidure/test/file.js');
assert.equal(files[1].cwd, '/home/nfroidure/');
assert.equal(files[1].base, '/home/nfroidure/test');
assert.equal(files[1].path, '/home/nfroidure/test/file2.js');
files[0].pipe(streamtest[version].toText(function(err2, text) {
if(err2) {
return done(err2);
}
assert.equal(text, 'plipplaptest');
files[1].pipe(streamtest[version].toText(function(err3, text2) {
if(err3) {
return done(err3);
}
assert.equal(text2, 'ploppluptest');
done();
}));
}));
}));
inputStream.write(fakeFile);
inputStream.write(fakeFile2);
inputStream.end();
});
it('should work with async streams and async contents', function(done) {
var pluginStream = pluginFunction();
streamtest[version].fromObjects([
new gutil.File({
cwd: '/home/nfroidure/',
base: '/home/nfroidure/test',
path: '/home/nfroidure/test/file.js',
contents: streamtest.v2.fromChunks(['plip', 'plap']),
}),
new gutil.File({
cwd: '/home/nfroidure/',
base: '/home/nfroidure/test',
path: '/home/nfroidure/test/file2.js',
contents: streamtest.v2.fromChunks(['plip', 'plup']),
}),
])
.pipe(gStreamify(pluginStream))
.pipe(streamtest[version].toObjects(function(err, files) {
if(err) {
return done(err);
}
assert.equal(files.length, 2);
assert.equal(files[0].cwd, '/home/nfroidure/');
assert.equal(files[0].base, '/home/nfroidure/test');
assert.equal(files[0].path, '/home/nfroidure/test/file.js');
assert.equal(files[1].cwd, '/home/nfroidure/');
assert.equal(files[1].base, '/home/nfroidure/test');
assert.equal(files[1].path, '/home/nfroidure/test/file2.js');
files[0].pipe(streamtest[version].toText(function(err2, text) {
if(err2) {
return done(err2);
}
assert.equal(text, 'plipplaptest');
files[1].pipe(streamtest[version].toText(function(err3, text2) {
if(err3) {
return done(err3);
}
assert.equal(text2, 'plippluptest');
done();
}));
}));
}));
});
it('should work with plugin function provinding async files streams', function(done) {
streamtest[version].fromObjects([
new gutil.File({
cwd: '/home/nfroidure/',
base: '/home/nfroidure/test',
path: '/home/nfroidure/test/file.js',
contents: streamtest.v2.fromChunks(['plip', 'plap']),
}),
new gutil.File({
cwd: '/home/nfroidure/',
base: '/home/nfroidure/test',
path: '/home/nfroidure/test/file2.js',
contents: streamtest.v2.fromChunks(['plip', 'plup']),
}),
])
.pipe(gStreamify(pluginFunction))
.pipe(streamtest[version].toObjects(function(err, files) {
if(err) {
return done(err);
}
assert.equal(files.length, 2);
assert.equal(files[0].cwd, '/home/nfroidure/');
assert.equal(files[0].base, '/home/nfroidure/test');
assert.equal(files[0].path, '/home/nfroidure/test/file.js');
assert.equal(files[1].cwd, '/home/nfroidure/');
assert.equal(files[1].base, '/home/nfroidure/test');
assert.equal(files[1].path, '/home/nfroidure/test/file2.js');
files[0].pipe(streamtest[version].toText(function(err2, text) {
if(err2) {
return done(err2);
}
assert.equal(text, 'plipplaptest');
files[1].pipe(streamtest[version].toText(function(err3, text2) {
if(err3) {
return done(err3);
}
assert.equal(text2, 'plippluptest');
done();
}));
}));
}));
});
});
describe('in buffer mode', function() {
it('should work', function(done) {
var pluginStream = pluginFunction();
streamtest[version].fromObjects([
new gutil.File({
cwd: '/home/nfroidure/',
base: '/home/nfroidure/test',
path: '/home/nfroidure/test/file.js',
contents: new Buffer('plipplap'),
}),
new gutil.File({
cwd: '/home/nfroidure/',
base: '/home/nfroidure/test',
path: '/home/nfroidure/test/file2.js',
contents: new Buffer('plipplup'),
}),
])
.pipe(gStreamify(pluginStream))
.pipe(streamtest[version].toObjects(function(err, files) {
if(err) {
return done(err);
}
assert.equal(files.length, 2);
assert.equal(files[0].cwd, '/home/nfroidure/');
assert.equal(files[0].base, '/home/nfroidure/test');
assert(files[0].contents instanceof Buffer);
assert.equal(files[0].path, '/home/nfroidure/test/file.js');
assert.equal(files[0].contents.toString(), 'plipplaptest');
assert.equal(files[1].cwd, '/home/nfroidure/');
assert.equal(files[1].base, '/home/nfroidure/test');
assert(files[1].contents instanceof Buffer);
assert.equal(files[1].path, '/home/nfroidure/test/file2.js');
assert.equal(files[1].contents.toString(), 'plippluptest');
done();
}));
});
});
});
});
});
| {'content_hash': '97ed16e0b5af1658555fd86f77e15c58', 'timestamp': '', 'source': 'github', 'line_count': 324, 'max_line_length': 94, 'avg_line_length': 35.23456790123457, 'alnum_prop': 0.5108619481429573, 'repo_name': 'infinito84/frontend-arch', 'id': 'b3c5185429945487b7e5ec45c97f61d59c401d07', 'size': '11416', 'binary': False, 'copies': '8', 'ref': 'refs/heads/master', 'path': 'node_modules/gulp-streamify/tests/index.mocha.js', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '158709'}, {'name': 'HTML', 'bytes': '1157'}, {'name': 'JavaScript', 'bytes': '2522'}]} |
#include <stdlib.h>
#include <math.h>
#include <stdbool.h>
#include <string.h>
#include "assignmentoptimal.h"
#define cell double
#define cell_max INFINITY
void buildassignmentvector_dbl(long*, bool*, long , long );
void computeassignmentcost_dbl(long*, cell*, cell*, long );
void step2a_dbl(long*, cell*, bool*, bool*, bool*, bool*, bool*, long, long, long);
void step2b_dbl(long*, cell*, bool*, bool*, bool*, bool*, bool*, long, long, long);
void step3_dbl (long*, cell*, bool*, bool*, bool*, bool*, bool*, long, long, long);
void step4_dbl (long*, cell*, bool*, bool*, bool*, bool*, bool*, long, long, long, long, long);
void step5_dbl (long*, cell*, bool*, bool*, bool*, bool*, bool*, long, long, long);
void assignmentoptimal_dbl(long *assignment, cell *cost, cell *distMatrixIn, long nOfRows, long nOfColumns)
{
cell *distMatrix, *distMatrixTemp, *distMatrixEnd, *columnEnd, value, minValue;
bool *coveredColumns, *coveredRows, *starMatrix, *newStarMatrix, *primeMatrix;
long nOfElements, minDim, row, col;
/* initialization */
*cost = 0;
for(row=0; row<nOfRows; row++)
assignment[row] = -1;
/* create working copy of distance Matrix */
nOfElements = nOfRows * nOfColumns;
distMatrix = (cell *)malloc(nOfElements * sizeof(cell));
distMatrixEnd = distMatrix + nOfElements;
memcpy(distMatrix, distMatrixIn, nOfElements * sizeof(cell));
/* memory allocation */
coveredColumns = (bool *)calloc(nOfColumns, sizeof(bool));
coveredRows = (bool *)calloc(nOfRows, sizeof(bool));
starMatrix = (bool *)calloc(nOfElements, sizeof(bool));
primeMatrix = (bool *)calloc(nOfElements, sizeof(bool));
newStarMatrix = (bool *)calloc(nOfElements, sizeof(bool)); /* used in step4_dbl */
/* preliminary steps */
if(nOfRows <= nOfColumns)
{
minDim = nOfRows;
for(row=0; row<nOfRows; row++)
{
/* find the smallest element in the row */
distMatrixTemp = distMatrix + row;
minValue = *distMatrixTemp;
distMatrixTemp += nOfRows;
while(distMatrixTemp < distMatrixEnd)
{
value = *distMatrixTemp;
if(value < minValue)
minValue = value;
distMatrixTemp += nOfRows;
}
/* subtract the smallest element from each element of the row */
distMatrixTemp = distMatrix + row;
while(distMatrixTemp < distMatrixEnd)
{
*distMatrixTemp -= minValue;
distMatrixTemp += nOfRows;
}
}
/* Steps 1 and 2a */
for(row=0; row<nOfRows; row++)
for(col=0; col<nOfColumns; col++)
if(distMatrix[row + nOfRows*col] == 0)
if(!coveredColumns[col])
{
starMatrix[row + nOfRows*col] = true;
coveredColumns[col] = true;
break;
}
}
else /* if(nOfRows > nOfColumns) */
{
minDim = nOfColumns;
for(col=0; col<nOfColumns; col++)
{
/* find the smallest element in the column */
distMatrixTemp = distMatrix + nOfRows*col;
columnEnd = distMatrixTemp + nOfRows;
minValue = *distMatrixTemp++;
while(distMatrixTemp < columnEnd)
{
value = *distMatrixTemp++;
if(value < minValue)
minValue = value;
}
/* subtract the smallest element from each element of the column */
distMatrixTemp = distMatrix + nOfRows*col;
while(distMatrixTemp < columnEnd)
*distMatrixTemp++ -= minValue;
}
/* Steps 1 and 2a */
for(col=0; col<nOfColumns; col++)
for(row=0; row<nOfRows; row++)
if(distMatrix[row + nOfRows*col] == 0)
if(!coveredRows[row])
{
starMatrix[row + nOfRows*col] = true;
coveredColumns[col] = true;
coveredRows[row] = true;
break;
}
for(row=0; row<nOfRows; row++)
coveredRows[row] = false;
}
/* move to step 2b */
step2b_dbl(assignment, distMatrix, starMatrix, newStarMatrix, primeMatrix, coveredColumns, coveredRows, nOfRows, nOfColumns, minDim);
/* compute cost and remove invalid assignments */
computeassignmentcost_dbl(assignment, cost, distMatrixIn, nOfRows);
/* free allocated memory */
free(distMatrix);
free(coveredColumns);
free(coveredRows);
free(starMatrix);
free(primeMatrix);
free(newStarMatrix);
return;
}
/********************************************************/
void buildassignmentvector_dbl(long *assignment, bool *starMatrix, long nOfRows, long nOfColumns)
{
long row, col;
for(row=0; row<nOfRows; row++)
for(col=0; col<nOfColumns; col++)
if(starMatrix[row + nOfRows*col])
{
assignment[row] = col;
break;
}
}
/********************************************************/
void computeassignmentcost_dbl(long *assignment, cell *cost, cell *distMatrix, long nOfRows)
{
long row, col;
for(row=0; row<nOfRows; row++)
{
col = assignment[row];
if(col >= 0)
{
*cost += distMatrix[row + nOfRows*col];
}
}
}
/********************************************************/
void step2a_dbl(long *assignment, cell *distMatrix, bool *starMatrix, bool *newStarMatrix, bool *primeMatrix, bool *coveredColumns, bool *coveredRows, long nOfRows, long nOfColumns, long minDim)
{
bool *starMatrixTemp, *columnEnd;
long col;
/* cover every column containing a starred zero */
for(col=0; col<nOfColumns; col++)
{
starMatrixTemp = starMatrix + nOfRows*col;
columnEnd = starMatrixTemp + nOfRows;
while(starMatrixTemp < columnEnd){
if(*starMatrixTemp++)
{
coveredColumns[col] = true;
break;
}
}
}
/* move to step 3 */
step2b_dbl(assignment, distMatrix, starMatrix, newStarMatrix, primeMatrix, coveredColumns, coveredRows, nOfRows, nOfColumns, minDim);
}
/********************************************************/
void step2b_dbl(long *assignment, cell *distMatrix, bool *starMatrix, bool *newStarMatrix, bool *primeMatrix, bool *coveredColumns, bool *coveredRows, long nOfRows, long nOfColumns, long minDim)
{
long col, nOfCoveredColumns;
/* count covered columns */
nOfCoveredColumns = 0;
for(col=0; col<nOfColumns; col++)
if(coveredColumns[col])
nOfCoveredColumns++;
if(nOfCoveredColumns == minDim)
{
/* algorithm finished */
buildassignmentvector_dbl(assignment, starMatrix, nOfRows, nOfColumns);
}
else
{
/* move to step 3 */
step3_dbl(assignment, distMatrix, starMatrix, newStarMatrix, primeMatrix, coveredColumns, coveredRows, nOfRows, nOfColumns, minDim);
}
}
/********************************************************/
void step3_dbl(long *assignment, cell *distMatrix, bool *starMatrix, bool *newStarMatrix, bool *primeMatrix, bool *coveredColumns, bool *coveredRows, long nOfRows, long nOfColumns, long minDim)
{
bool zerosFound;
long row, col, starCol;
zerosFound = true;
while(zerosFound)
{
zerosFound = false;
for(col=0; col<nOfColumns; col++)
if(!coveredColumns[col])
for(row=0; row<nOfRows; row++)
if((!coveredRows[row]) && (distMatrix[row + nOfRows*col] == 0))
{
/* prime zero */
primeMatrix[row + nOfRows*col] = true;
/* find starred zero in current row */
for(starCol=0; starCol<nOfColumns; starCol++)
if(starMatrix[row + nOfRows*starCol])
break;
if(starCol == nOfColumns) /* no starred zero found */
{
/* move to step 4 */
step4_dbl(assignment, distMatrix, starMatrix, newStarMatrix, primeMatrix, coveredColumns, coveredRows, nOfRows, nOfColumns, minDim, row, col);
return;
}
else
{
coveredRows[row] = true;
coveredColumns[starCol] = false;
zerosFound = true;
break;
}
}
}
/* move to step 5 */
step5_dbl(assignment, distMatrix, starMatrix, newStarMatrix, primeMatrix, coveredColumns, coveredRows, nOfRows, nOfColumns, minDim);
}
/********************************************************/
void step4_dbl(long *assignment, cell *distMatrix, bool *starMatrix, bool *newStarMatrix, bool *primeMatrix, bool *coveredColumns, bool *coveredRows, long nOfRows, long nOfColumns, long minDim, long row, long col)
{
long n, starRow, starCol, primeRow, primeCol;
long nOfElements = nOfRows*nOfColumns;
/* create a temporary copy of starMatrix */
memcpy(newStarMatrix, starMatrix, nOfElements * sizeof(bool));
/* star current zero */
newStarMatrix[row + nOfRows*col] = true;
/* find starred zero in current column */
starCol = col;
for(starRow=0; starRow<nOfRows; starRow++)
if(starMatrix[starRow + nOfRows*starCol])
break;
while(starRow<nOfRows)
{
/* unstar the starred zero */
newStarMatrix[starRow + nOfRows*starCol] = false;
/* find primed zero in current row */
primeRow = starRow;
for(primeCol=0; primeCol<nOfColumns; primeCol++)
if(primeMatrix[primeRow + nOfRows*primeCol])
break;
/* star the primed zero */
newStarMatrix[primeRow + nOfRows*primeCol] = true;
/* find starred zero in current column */
starCol = primeCol;
for(starRow=0; starRow<nOfRows; starRow++)
if(starMatrix[starRow + nOfRows*starCol])
break;
}
/* use temporary copy as new starMatrix */
/* delete all primes, uncover all rows */
for(n=0; n<nOfElements; n++)
{
primeMatrix[n] = false;
starMatrix[n] = newStarMatrix[n];
}
for(n=0; n<nOfRows; n++)
coveredRows[n] = false;
/* move to step 2a */
step2a_dbl(assignment, distMatrix, starMatrix, newStarMatrix, primeMatrix, coveredColumns, coveredRows, nOfRows, nOfColumns, minDim);
}
/********************************************************/
void step5_dbl(long *assignment, cell *distMatrix, bool *starMatrix, bool *newStarMatrix, bool *primeMatrix, bool *coveredColumns, bool *coveredRows, long nOfRows, long nOfColumns, long minDim)
{
cell h, value;
long row, col;
/* find smallest uncovered element h */
h = cell_max;
for(row=0; row<nOfRows; row++)
if(!coveredRows[row])
for(col=0; col<nOfColumns; col++)
if(!coveredColumns[col])
{
value = distMatrix[row + nOfRows*col];
if(value < h)
h = value;
}
/* add h to each covered row */
for(row=0; row<nOfRows; row++)
if(coveredRows[row])
for(col=0; col<nOfColumns; col++)
distMatrix[row + nOfRows*col] += h;
/* subtract h from each uncovered column */
for(col=0; col<nOfColumns; col++)
if(!coveredColumns[col])
for(row=0; row<nOfRows; row++)
distMatrix[row + nOfRows*col] -= h;
/* move to step 3 */
step3_dbl(assignment, distMatrix, starMatrix, newStarMatrix, primeMatrix, coveredColumns, coveredRows, nOfRows, nOfColumns, minDim);
}
| {'content_hash': '1e9cf3c7990640be561c2da68004cb3b', 'timestamp': '', 'source': 'github', 'line_count': 347, 'max_line_length': 213, 'avg_line_length': 30.70028818443804, 'alnum_prop': 0.6243311743170937, 'repo_name': 'escherba/lsh-hdc', 'id': '7158f8b74bc8121184c5123a540def9fb49e3084', 'size': '11996', 'binary': False, 'copies': '3', 'ref': 'refs/heads/master', 'path': 'lsh_hdc/assignmentoptimal_dbl.c', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'C', 'bytes': '44962'}, {'name': 'Fortran', 'bytes': '4738'}, {'name': 'HTML', 'bytes': '957'}, {'name': 'JavaScript', 'bytes': '6302'}, {'name': 'Makefile', 'bytes': '11398'}, {'name': 'Python', 'bytes': '322611'}, {'name': 'Shell', 'bytes': '1139'}]} |
JOB_POOL_LINK
-------------
Ninja only: Pool used for linking.
The number of parallel link processes could be limited by defining
pools with the global :prop_gbl:`JOB_POOLS`
property and then specifing here the pool name.
For instance:
.. code-block:: cmake
set_property(TARGET myexe PROPERTY JOB_POOL_LINK two_jobs)
This property is initialized by the value of :variable:`CMAKE_JOB_POOL_LINK`.
| {'content_hash': '62a31a8d4c6ddc36774471da255b838f', 'timestamp': '', 'source': 'github', 'line_count': 16, 'max_line_length': 77, 'avg_line_length': 25.1875, 'alnum_prop': 0.7444168734491315, 'repo_name': 'Enseed/Reflect_build', 'id': '716f53f71ae5a6961679e1f907347e5128297735', 'size': '403', 'binary': False, 'copies': '9', 'ref': 'refs/heads/master', 'path': 'build-clang/msvc/cmake/share/cmake-3.3/Help/prop_tgt/JOB_POOL_LINK.rst', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '960'}, {'name': 'C', 'bytes': '2286542'}, {'name': 'C++', 'bytes': '214552160'}, {'name': 'CMake', 'bytes': '3703942'}, {'name': 'Emacs Lisp', 'bytes': '14305'}, {'name': 'FORTRAN', 'bytes': '2105'}, {'name': 'PHP', 'bytes': '337439'}, {'name': 'Python', 'bytes': '3462'}, {'name': 'Shell', 'bytes': '8849'}, {'name': 'VimL', 'bytes': '8478'}]} |
Start sida
====================================
Yes indeed! | {'content_hash': '9010febb8e5742f471a73d3a85c348a7', 'timestamp': '', 'source': 'github', 'line_count': 4, 'max_line_length': 36, 'avg_line_length': 15.0, 'alnum_prop': 0.3, 'repo_name': 'djeppe/anax_mvc', 'id': '96bc58c2ff2bd548182cbc25f08f549f2861dc1f', 'size': '60', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'app/content/index.md', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '7565'}, {'name': 'JavaScript', 'bytes': '2718'}, {'name': 'PHP', 'bytes': '210231'}]} |
<?php
/**
* Chaos allows a programmer the ability to assign random configuration values
* at runtime.
*
* @category Ash
* @package Ash_Chaos
* @copyright Copyright (c) 2015 August Ash, Inc. (http://www.augustash.com)
*/
/**
* Values model
*
* @category Ash
* @package Ash_Chaos
* @author August Ash Team <[email protected]>
*/
class Ash_Chaos_Model_Values
{
/**
* Returns a random boolean value
*
* @return boolean
*/
public function getRandomBoolean()
{
return (string) rand(0,1);
}
}
| {'content_hash': '65fec13b452ac1ebf01b6de7876f2df8', 'timestamp': '', 'source': 'github', 'line_count': 31, 'max_line_length': 78, 'avg_line_length': 18.35483870967742, 'alnum_prop': 0.5975395430579965, 'repo_name': 'augustash/ash_chaos', 'id': 'be8fab534aec2fc0b071c40418cd850505971406', 'size': '1690', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'app/code/community/Ash/Chaos/Model/Values.php', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'PHP', 'bytes': '8859'}]} |
class ApplicationController < ActionController::Base
protect_from_forgery
after_filter :set_csrf_cookie_for_ng
before_filter :set_shopping_cart
def set_csrf_cookie_for_ng
cookies['XSRF-TOKEN'] = form_authenticity_token if verified_request?
end
protected
def verified_request?
super || form_authenticity_token == request.headers['X_XSRF_TOKEN']
end
def set_shopping_cart
@cart = Cart.find(session[:cart]) rescue Cart.create
session[:cart] = @cart.id
end
end
| {'content_hash': 'a74926f12675d02c1aa57466884616b6', 'timestamp': '', 'source': 'github', 'line_count': 24, 'max_line_length': 72, 'avg_line_length': 21.166666666666668, 'alnum_prop': 0.7047244094488189, 'repo_name': 'kalyanallampalli/eshop', 'id': 'd8072e8661ca9f4a42a39e81c97eb8a4b93c23a1', 'size': '508', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'app/controllers/application_controller.rb', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '4069'}, {'name': 'JavaScript', 'bytes': '5835'}, {'name': 'Ruby', 'bytes': '25122'}]} |
using System.Diagnostics;
namespace System.IO
{
/// <summary>
/// Provides a string parser that may be used instead of String.Split
/// to avoid unnecessary string and array allocations.
/// </summary>
internal struct StringParser
{
/// <summary>The string being parsed.</summary>
private readonly string _buffer;
/// <summary>The separator character used to separate subcomponents of the larger string.</summary>
private readonly char _separator;
/// <summary>true if empty subcomponents should be skipped; false to treat them as valid entries.</summary>
private readonly bool _skipEmpty;
/// <summary>The starting index from which to parse the current entry.</summary>
private int _startIndex;
/// <summary>The ending index that represents the next index after the last character that's part of the current entry.</summary>
private int _endIndex;
/// <summary>Initialize the StringParser.</summary>
/// <param name="buffer">The string to parse.</param>
/// <param name="separator">The separator character used to separate subcomponents of <paramref name="buffer"/>.</param>
/// <param name="skipEmpty">true if empty subcomponents should be skipped; false to treat them as valid entries. Defaults to false.</param>
public StringParser(string buffer, char separator, bool skipEmpty = false)
{
if (buffer == null)
{
throw new ArgumentNullException(nameof(buffer));
}
_buffer = buffer;
_separator = separator;
_skipEmpty = skipEmpty;
_startIndex = -1;
_endIndex = -1;
}
/// <summary>Moves to the next component of the string.</summary>
/// <returns>true if there is a next component to be parsed; otherwise, false.</returns>
public bool MoveNext()
{
if (_buffer == null)
{
throw new InvalidOperationException();
}
while (true)
{
if (_endIndex >= _buffer.Length)
{
_startIndex = _endIndex;
return false;
}
int nextSeparator = _buffer.IndexOf(_separator, _endIndex + 1);
_startIndex = _endIndex + 1;
_endIndex = nextSeparator >= 0 ? nextSeparator : _buffer.Length;
if (!_skipEmpty || _endIndex >= _startIndex + 1)
{
return true;
}
}
}
/// <summary>
/// Moves to the next component of the string. If there isn't one, it throws an exception.
/// </summary>
public void MoveNextOrFail()
{
if (!MoveNext())
{
ThrowForInvalidData();
}
}
/// <summary>
/// Moves to the next component of the string and returns it as a string.
/// </summary>
/// <returns></returns>
public string MoveAndExtractNext()
{
MoveNextOrFail();
return _buffer.Substring(_startIndex, _endIndex - _startIndex);
}
/// <summary>
/// Moves to the next component of the string, which must be enclosed in the only set of top-level parentheses
/// in the string. The extracted value will be everything between (not including) those parentheses.
/// </summary>
/// <returns></returns>
public string MoveAndExtractNextInOuterParens()
{
// Move to the next position
MoveNextOrFail();
// After doing so, we should be sitting at a the opening paren.
if (_buffer[_startIndex] != '(')
{
ThrowForInvalidData();
}
// Since we only allow for one top-level set of parentheses, find the last
// parenthesis in the string; it's paired with the opening one we just found.
int lastParen = _buffer.LastIndexOf(')');
if (lastParen == -1 || lastParen < _startIndex)
{
ThrowForInvalidData();
}
// Extract the contents of the parens, then move our ending position to be after the paren
string result = _buffer.Substring(_startIndex + 1, lastParen - _startIndex - 1);
_endIndex = lastParen + 1;
return result;
}
/// <summary>
/// Gets the current subcomponent of the string as a string.
/// </summary>
public string ExtractCurrent()
{
if (_buffer == null || _startIndex == -1)
{
throw new InvalidOperationException();
}
return _buffer.Substring(_startIndex, _endIndex - _startIndex);
}
/// <summary>Moves to the next component and parses it as an Int32.</summary>
public unsafe int ParseNextInt32()
{
MoveNextOrFail();
bool negative = false;
int result = 0;
fixed (char* bufferPtr = _buffer)
{
char* p = bufferPtr + _startIndex;
char* end = bufferPtr + _endIndex;
if (p == end)
{
ThrowForInvalidData();
}
if (*p == '-')
{
negative = true;
p++;
if (p == end)
{
ThrowForInvalidData();
}
}
while (p != end)
{
int d = *p - '0';
if (d < 0 || d > 9)
{
ThrowForInvalidData();
}
result = negative ? checked((result * 10) - d) : checked((result * 10) + d);
p++;
}
}
Debug.Assert(result == int.Parse(ExtractCurrent()), "Expected manually parsed result to match Parse result");
return result;
}
/// <summary>Moves to the next component and parses it as an Int64.</summary>
public unsafe long ParseNextInt64()
{
MoveNextOrFail();
bool negative = false;
long result = 0;
fixed (char* bufferPtr = _buffer)
{
char* p = bufferPtr + _startIndex;
char* end = bufferPtr + _endIndex;
if (p == end)
{
ThrowForInvalidData();
}
if (*p == '-')
{
negative = true;
p++;
if (p == end)
{
ThrowForInvalidData();
}
}
while (p != end)
{
int d = *p - '0';
if (d < 0 || d > 9)
{
ThrowForInvalidData();
}
result = negative ? checked((result * 10) - d) : checked((result * 10) + d);
p++;
}
}
Debug.Assert(result == long.Parse(ExtractCurrent()), "Expected manually parsed result to match Parse result");
return result;
}
/// <summary>Moves to the next component and parses it as a UInt32.</summary>
public unsafe uint ParseNextUInt32()
{
MoveNextOrFail();
if (_startIndex == _endIndex)
{
ThrowForInvalidData();
}
uint result = 0;
fixed (char* bufferPtr = _buffer)
{
char* p = bufferPtr + _startIndex;
char* end = bufferPtr + _endIndex;
while (p != end)
{
int d = *p - '0';
if (d < 0 || d > 9)
{
ThrowForInvalidData();
}
result = (uint)checked((result * 10) + d);
p++;
}
}
Debug.Assert(result == uint.Parse(ExtractCurrent()), "Expected manually parsed result to match Parse result");
return result;
}
/// <summary>Moves to the next component and parses it as a UInt64.</summary>
public unsafe ulong ParseNextUInt64()
{
MoveNextOrFail();
ulong result = 0;
fixed (char* bufferPtr = _buffer)
{
char* p = bufferPtr + _startIndex;
char* end = bufferPtr + _endIndex;
while (p != end)
{
int d = *p - '0';
if (d < 0 || d > 9)
{
ThrowForInvalidData();
}
result = checked((result * 10ul) + (ulong)d);
p++;
}
}
Debug.Assert(result == ulong.Parse(ExtractCurrent()), "Expected manually parsed result to match Parse result");
return result;
}
/// <summary>Moves to the next component and parses it as a Char.</summary>
public char ParseNextChar()
{
MoveNextOrFail();
if (_endIndex - _startIndex != 1)
{
ThrowForInvalidData();
}
char result = _buffer[_startIndex];
Debug.Assert(result == char.Parse(ExtractCurrent()), "Expected manually parsed result to match Parse result");
return result;
}
internal delegate T ParseRawFunc<T>(string buffer, ref int startIndex, ref int endIndex);
/// <summary>
/// Moves to the next component and hands the raw buffer and indexing data to a selector function
/// that can validate and return the appropriate data from the component.
/// </summary>
internal T ParseRaw<T>(ParseRawFunc<T> selector)
{
MoveNextOrFail();
return selector(_buffer, ref _startIndex, ref _endIndex);
}
/// <summary>
/// Gets the current subcomponent and all remaining components of the string as a string.
/// </summary>
public string ExtractCurrentToEnd()
{
if (_buffer == null || _startIndex == -1)
{
throw new InvalidOperationException();
}
return _buffer.Substring(_startIndex);
}
/// <summary>Throws unconditionally for invalid data.</summary>
private static void ThrowForInvalidData()
{
throw new InvalidDataException();
}
}
}
| {'content_hash': '0636e635f1414a78e8c675b6255d4355', 'timestamp': '', 'source': 'github', 'line_count': 328, 'max_line_length': 148, 'avg_line_length': 33.417682926829265, 'alnum_prop': 0.4785147340571116, 'repo_name': 'fgreinacher/corefx', 'id': 'd79d7ce2e82e9a041720449d405d4291c20045b2', 'size': '11165', 'binary': False, 'copies': '103', 'ref': 'refs/heads/master', 'path': 'src/Common/src/System/IO/StringParser.cs', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Batchfile', 'bytes': '1555'}, {'name': 'C', 'bytes': '25748'}, {'name': 'C#', 'bytes': '91179997'}, {'name': 'C++', 'bytes': '55067'}, {'name': 'CMake', 'bytes': '3078'}, {'name': 'Shell', 'bytes': '21031'}, {'name': 'Smalltalk', 'bytes': '8024'}, {'name': 'Visual Basic', 'bytes': '827616'}]} |
package org.hisp.dhis.webapi.filter;
import java.io.IOException;
import java.util.regex.Pattern;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebFilter;
import javax.servlet.annotation.WebInitParam;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import lombok.extern.slf4j.Slf4j;
import org.springframework.util.Assert;
import org.springframework.web.filter.ShallowEtagHeaderFilter;
/**
* <p>
* Subclass of {@link org.springframework.web.filter.ShallowEtagHeaderFilter}
* which allows exclusion of URIs matching a regex.
*
* <p>
* The regex is given as the init-param named 'excludeUriRegex' in the filter
* configuration.
*
* <p>
* Example configuration:
*
* <pre>
* {@code
* <filter>
* <filter-name>ShallowEtagHeaderFilter</filter-name>
* <filter-class>org.hisp.dhis.webapi.filter.ExcludableShallowEtagHeaderFilter</filter-class>
* <init-param>
* <param-name>excludeUriRegex</param-name>
* <param-value>/api/dataValues|/api/dataValues/files</param-value>
* </init-param>
* </filter>
* }
* </pre>
*
* <p>
* The example exactly matches and excludes any request to the '/api/dataValues'
* and '/api/dataValues/files' from the filter.
*
* @author Lars Helge Overland
* @author Halvdan Hoem Grelland
*/
@Slf4j
@WebFilter( urlPatterns = {
"/api/*"
}, initParams = {
@WebInitParam( name = "excludeUriRegex", value = "/api/(\\d{2}/)?dataValueSets|/api/(\\d{2}/)?dataValues|/api/(\\d{2}/)?fileResources" )
} )
public class ExcludableShallowEtagHeaderFilter
extends ShallowEtagHeaderFilter
{
private static final String EXCLUDE_URI_REGEX_VAR_NAME = "excludeUriRegex";
private Pattern pattern = null;
@Override
protected void initFilterBean()
throws ServletException
{
FilterConfig filterConfig = getFilterConfig();
String excludeRegex = filterConfig != null ? filterConfig.getInitParameter( EXCLUDE_URI_REGEX_VAR_NAME ) : null;
Assert.notNull( excludeRegex, String.format( excludeRegex,
"Parameter '%s' must be specified for ExcludableShallowEtagHeaderFilter", EXCLUDE_URI_REGEX_VAR_NAME ) );
pattern = Pattern.compile( excludeRegex );
log.debug( String.format( "ExcludableShallowEtagHeaderFilter initialized with %s: '%s'",
EXCLUDE_URI_REGEX_VAR_NAME, excludeRegex ) );
}
@Override
protected void doFilterInternal( HttpServletRequest request, HttpServletResponse response, FilterChain filterChain )
throws ServletException,
IOException
{
String uri = request.getRequestURI();
boolean match = pattern.matcher( uri ).find();
if ( match )
{
filterChain.doFilter( request, response ); // Proceed without
// invoking this filter
}
else
{
super.doFilterInternal( request, response, filterChain ); // Invoke
// this
// filter
}
}
}
| {'content_hash': 'b8ba9cea810b5c56dfc5b260b864053f', 'timestamp': '', 'source': 'github', 'line_count': 104, 'max_line_length': 140, 'avg_line_length': 31.35576923076923, 'alnum_prop': 0.6510272922416437, 'repo_name': 'hispindia/dhis2-Core', 'id': '15f680005e9b495cb684f9b2f0a8ae21fc6dc6cb', 'size': '4817', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'dhis-2/dhis-web-api/src/main/java/org/hisp/dhis/webapi/filter/ExcludableShallowEtagHeaderFilter.java', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'CSS', 'bytes': '186517'}, {'name': 'Dockerfile', 'bytes': '2280'}, {'name': 'HTML', 'bytes': '69119'}, {'name': 'Java', 'bytes': '30859159'}, {'name': 'JavaScript', 'bytes': '958564'}, {'name': 'PLpgSQL', 'bytes': '60867'}, {'name': 'Ruby', 'bytes': '1011'}, {'name': 'SCSS', 'bytes': '4229'}, {'name': 'Shell', 'bytes': '21208'}, {'name': 'XSLT', 'bytes': '8451'}]} |
#ifndef JSString_h
#define JSString_h
#include "CallFrame.h"
#include "CommonIdentifiers.h"
#include "Identifier.h"
#include "PropertyDescriptor.h"
#include "PropertySlot.h"
#include "RopeImpl.h"
#include "Structure.h"
namespace JSC {
class JSString;
JSString* jsEmptyString(JSGlobalData*);
JSString* jsEmptyString(ExecState*);
JSString* jsString(JSGlobalData*, const UString&); // returns empty string if passed null string
JSString* jsString(ExecState*, const UString&); // returns empty string if passed null string
JSString* jsSingleCharacterString(JSGlobalData*, UChar);
JSString* jsSingleCharacterString(ExecState*, UChar);
JSString* jsSingleCharacterSubstring(ExecState*, const UString&, unsigned offset);
JSString* jsSubstring(JSGlobalData*, const UString&, unsigned offset, unsigned length);
JSString* jsSubstring(ExecState*, const UString&, unsigned offset, unsigned length);
// Non-trivial strings are two or more characters long.
// These functions are faster than just calling jsString.
JSString* jsNontrivialString(JSGlobalData*, const UString&);
JSString* jsNontrivialString(ExecState*, const UString&);
JSString* jsNontrivialString(JSGlobalData*, const char*);
JSString* jsNontrivialString(ExecState*, const char*);
// Should be used for strings that are owned by an object that will
// likely outlive the JSValue this makes, such as the parse tree or a
// DOM object that contains a UString
JSString* jsOwnedString(JSGlobalData*, const UString&);
JSString* jsOwnedString(ExecState*, const UString&);
class JS_EXPORTCLASS JSString : public JSCell {
public:
friend class JIT;
friend class JSGlobalData;
friend class SpecializedThunkJIT;
friend struct ThunkHelpers;
class RopeBuilder {
public:
RopeBuilder(unsigned fiberCount)
: m_index(0)
, m_rope(RopeImpl::tryCreateUninitialized(fiberCount))
{
}
bool isOutOfMemory() { return !m_rope; }
void append(RopeImpl::Fiber& fiber)
{
ASSERT(m_rope);
m_rope->initializeFiber(m_index, fiber);
}
void append(const UString& string)
{
ASSERT(m_rope);
m_rope->initializeFiber(m_index, string.impl());
}
void append(JSString* jsString)
{
if (jsString->isRope()) {
for (unsigned i = 0; i < jsString->m_fiberCount; ++i)
append(jsString->m_fibers[i]);
} else
append(jsString->string());
}
PassRefPtr<RopeImpl> release()
{
ASSERT(m_index == m_rope->fiberCount());
return m_rope.release();
}
unsigned length() { return m_rope->length(); }
private:
unsigned m_index;
RefPtr<RopeImpl> m_rope;
};
class RopeIterator {
public:
RopeIterator() { }
RopeIterator(RopeImpl::Fiber* fibers, size_t fiberCount)
{
ASSERT(fiberCount);
m_workQueue.append(WorkItem(fibers, fiberCount));
skipRopes();
}
RopeIterator& operator++()
{
WorkItem& item = m_workQueue.last();
ASSERT(!RopeImpl::isRope(item.fibers[item.i]));
if (++item.i == item.fiberCount)
m_workQueue.removeLast();
skipRopes();
return *this;
}
StringImpl* operator*()
{
WorkItem& item = m_workQueue.last();
RopeImpl::Fiber fiber = item.fibers[item.i];
ASSERT(!RopeImpl::isRope(fiber));
return static_cast<StringImpl*>(fiber);
}
bool operator!=(const RopeIterator& other) const
{
return m_workQueue != other.m_workQueue;
}
private:
struct WorkItem {
WorkItem(RopeImpl::Fiber* fibers, size_t fiberCount)
: fibers(fibers)
, fiberCount(fiberCount)
, i(0)
{
}
bool operator!=(const WorkItem& other) const
{
return fibers != other.fibers || fiberCount != other.fiberCount || i != other.i;
}
RopeImpl::Fiber* fibers;
size_t fiberCount;
size_t i;
};
void skipRopes()
{
if (m_workQueue.isEmpty())
return;
while (1) {
WorkItem& item = m_workQueue.last();
RopeImpl::Fiber fiber = item.fibers[item.i];
if (!RopeImpl::isRope(fiber))
break;
RopeImpl* rope = static_cast<RopeImpl*>(fiber);
if (++item.i == item.fiberCount)
m_workQueue.removeLast();
m_workQueue.append(WorkItem(rope->fibers(), rope->fiberCount()));
}
}
Vector<WorkItem, 16> m_workQueue;
};
ALWAYS_INLINE JSString(JSGlobalData* globalData, const UString& value)
: JSCell(*globalData, globalData->stringStructure.get())
, m_length(value.length())
, m_value(value)
, m_fiberCount(0)
{
ASSERT(!m_value.isNull());
Heap::heap(this)->reportExtraMemoryCost(value.impl()->cost());
}
enum HasOtherOwnerType { HasOtherOwner };
JSString(JSGlobalData* globalData, const UString& value, HasOtherOwnerType)
: JSCell(*globalData, globalData->stringStructure.get())
, m_length(value.length())
, m_value(value)
, m_fiberCount(0)
{
ASSERT(!m_value.isNull());
}
JSString(JSGlobalData* globalData, PassRefPtr<StringImpl> value, HasOtherOwnerType)
: JSCell(*globalData, globalData->stringStructure.get())
, m_length(value->length())
, m_value(value)
, m_fiberCount(0)
{
ASSERT(!m_value.isNull());
}
JSString(JSGlobalData* globalData, PassRefPtr<RopeImpl> rope)
: JSCell(*globalData, globalData->stringStructure.get())
, m_length(rope->length())
, m_fiberCount(1)
{
m_fibers[0] = rope.leakRef();
}
// This constructor constructs a new string by concatenating s1 & s2.
// This should only be called with fiberCount <= 3.
JSString(JSGlobalData* globalData, unsigned fiberCount, JSString* s1, JSString* s2)
: JSCell(*globalData, globalData->stringStructure.get())
, m_length(s1->length() + s2->length())
, m_fiberCount(fiberCount)
{
ASSERT(fiberCount <= s_maxInternalRopeLength);
unsigned index = 0;
appendStringInConstruct(index, s1);
appendStringInConstruct(index, s2);
ASSERT(fiberCount == index);
}
// This constructor constructs a new string by concatenating s1 & s2.
// This should only be called with fiberCount <= 3.
JSString(JSGlobalData* globalData, unsigned fiberCount, JSString* s1, const UString& u2)
: JSCell(*globalData, globalData->stringStructure.get())
, m_length(s1->length() + u2.length())
, m_fiberCount(fiberCount)
{
ASSERT(fiberCount <= s_maxInternalRopeLength);
unsigned index = 0;
appendStringInConstruct(index, s1);
appendStringInConstruct(index, u2);
ASSERT(fiberCount == index);
}
// This constructor constructs a new string by concatenating s1 & s2.
// This should only be called with fiberCount <= 3.
JSString(JSGlobalData* globalData, unsigned fiberCount, const UString& u1, JSString* s2)
: JSCell(*globalData, globalData->stringStructure.get())
, m_length(u1.length() + s2->length())
, m_fiberCount(fiberCount)
{
ASSERT(fiberCount <= s_maxInternalRopeLength);
unsigned index = 0;
appendStringInConstruct(index, u1);
appendStringInConstruct(index, s2);
ASSERT(fiberCount == index);
}
// This constructor constructs a new string by concatenating v1, v2 & v3.
// This should only be called with fiberCount <= 3 ... which since every
// value must require a fiberCount of at least one implies that the length
// for each value must be exactly 1!
JSString(ExecState* exec, JSValue v1, JSValue v2, JSValue v3)
: JSCell(exec->globalData(), exec->globalData().stringStructure.get())
, m_length(0)
, m_fiberCount(s_maxInternalRopeLength)
{
unsigned index = 0;
appendValueInConstructAndIncrementLength(exec, index, v1);
appendValueInConstructAndIncrementLength(exec, index, v2);
appendValueInConstructAndIncrementLength(exec, index, v3);
ASSERT(index == s_maxInternalRopeLength);
}
// This constructor constructs a new string by concatenating u1 & u2.
JSString(JSGlobalData* globalData, const UString& u1, const UString& u2)
: JSCell(*globalData, globalData->stringStructure.get())
, m_length(u1.length() + u2.length())
, m_fiberCount(2)
{
unsigned index = 0;
appendStringInConstruct(index, u1);
appendStringInConstruct(index, u2);
ASSERT(index <= s_maxInternalRopeLength);
}
// This constructor constructs a new string by concatenating u1, u2 & u3.
JSString(JSGlobalData* globalData, const UString& u1, const UString& u2, const UString& u3)
: JSCell(*globalData, globalData->stringStructure.get())
, m_length(u1.length() + u2.length() + u3.length())
, m_fiberCount(s_maxInternalRopeLength)
{
unsigned index = 0;
appendStringInConstruct(index, u1);
appendStringInConstruct(index, u2);
appendStringInConstruct(index, u3);
ASSERT(index <= s_maxInternalRopeLength);
}
~JSString()
{
ASSERT(vptr() == JSGlobalData::jsStringVPtr);
for (unsigned i = 0; i < m_fiberCount; ++i)
RopeImpl::deref(m_fibers[i]);
}
const UString& value(ExecState* exec) const
{
if (isRope())
resolveRope(exec);
return m_value;
}
const UString& tryGetValue() const
{
if (isRope())
resolveRope(0);
return m_value;
}
unsigned length() { return m_length; }
bool getStringPropertySlot(ExecState*, const Identifier& propertyName, PropertySlot&);
bool getStringPropertySlot(ExecState*, unsigned propertyName, PropertySlot&);
bool getStringPropertyDescriptor(ExecState*, const Identifier& propertyName, PropertyDescriptor&);
bool canGetIndex(unsigned i) { return i < m_length; }
JSString* getIndex(ExecState*, unsigned);
JSString* getIndexSlowCase(ExecState*, unsigned);
JSValue replaceCharacter(ExecState*, UChar, const UString& replacement);
static Structure* createStructure(JSGlobalData& globalData, JSValue proto)
{
return Structure::create(globalData, proto, TypeInfo(StringType, OverridesGetOwnPropertySlot | NeedsThisConversion), AnonymousSlotCount, &s_info);
}
private:
JSString(VPtrStealingHackType)
: JSCell(VPtrStealingHack)
, m_fiberCount(0)
{
}
static const ClassInfo s_info;
void resolveRope(ExecState*) const;
void resolveRopeSlowCase(ExecState*, UChar*) const;
void outOfMemory(ExecState*) const;
JSString* substringFromRope(ExecState*, unsigned offset, unsigned length);
void appendStringInConstruct(unsigned& index, const UString& string)
{
StringImpl* impl = string.impl();
impl->ref();
m_fibers[index++] = impl;
}
void appendStringInConstruct(unsigned& index, JSString* jsString)
{
if (jsString->isRope()) {
for (unsigned i = 0; i < jsString->m_fiberCount; ++i) {
RopeImpl::Fiber fiber = jsString->m_fibers[i];
fiber->ref();
m_fibers[index++] = fiber;
}
} else
appendStringInConstruct(index, jsString->string());
}
void appendValueInConstructAndIncrementLength(ExecState* exec, unsigned& index, JSValue v)
{
if (v.isString()) {
ASSERT(v.asCell()->isString());
JSString* s = static_cast<JSString*>(v.asCell());
ASSERT(s->fiberCount() == 1);
appendStringInConstruct(index, s);
m_length += s->length();
} else {
UString u(v.toString(exec));
StringImpl* impl = u.impl();
impl->ref();
m_fibers[index++] = impl;
m_length += u.length();
}
}
virtual JSValue toPrimitive(ExecState*, PreferredPrimitiveType) const;
virtual bool getPrimitiveNumber(ExecState*, double& number, JSValue& value);
virtual bool toBoolean(ExecState*) const;
virtual double toNumber(ExecState*) const;
virtual JSObject* toObject(ExecState*, JSGlobalObject*) const;
virtual UString toString(ExecState*) const;
virtual JSObject* toThisObject(ExecState*) const;
// Actually getPropertySlot, not getOwnPropertySlot (see JSCell).
virtual bool getOwnPropertySlot(ExecState*, const Identifier& propertyName, PropertySlot&);
virtual bool getOwnPropertySlot(ExecState*, unsigned propertyName, PropertySlot&);
virtual bool getOwnPropertyDescriptor(ExecState*, const Identifier&, PropertyDescriptor&);
static const unsigned s_maxInternalRopeLength = 3;
// A string is represented either by a UString or a RopeImpl.
unsigned m_length;
mutable UString m_value;
mutable unsigned m_fiberCount;
mutable FixedArray<RopeImpl::Fiber, s_maxInternalRopeLength> m_fibers;
bool isRope() const { return m_fiberCount; }
UString& string() { ASSERT(!isRope()); return m_value; }
unsigned fiberCount() { return m_fiberCount ? m_fiberCount : 1; }
friend JSValue jsString(ExecState* exec, JSString* s1, JSString* s2);
friend JSValue jsString(ExecState* exec, const UString& u1, JSString* s2);
friend JSValue jsString(ExecState* exec, JSString* s1, const UString& u2);
friend JSValue jsString(ExecState* exec, Register* strings, unsigned count);
friend JSValue jsString(ExecState* exec, JSValue thisValue);
friend JSString* jsSubstring(ExecState* exec, JSString* s, unsigned offset, unsigned length);
};
JSString* asString(JSValue);
// When an object is created from a different DLL, MSVC changes vptr to a "local" one right after invoking a constructor,
// see <http://groups.google.com/group/microsoft.public.vc.language/msg/55cdcefeaf770212>.
// This breaks isJSString(), and we don't need that hack anyway, so we change vptr back to primary one.
// The below function must be called by any inline function that invokes a JSString constructor.
#if COMPILER(MSVC) && !defined(BUILDING_JavaScriptCore)
inline JSString* fixupVPtr(JSGlobalData* globalData, JSString* string) { string->setVPtr(globalData->jsStringVPtr); return string; }
#else
inline JSString* fixupVPtr(JSGlobalData*, JSString* string) { return string; }
#endif
inline JSString* asString(JSValue value)
{
ASSERT(value.asCell()->isString());
return static_cast<JSString*>(value.asCell());
}
inline JSString* jsEmptyString(JSGlobalData* globalData)
{
return globalData->smallStrings.emptyString(globalData);
}
inline JSString* jsSingleCharacterString(JSGlobalData* globalData, UChar c)
{
if (c <= maxSingleCharacterString)
return globalData->smallStrings.singleCharacterString(globalData, c);
return fixupVPtr(globalData, new (globalData) JSString(globalData, UString(&c, 1)));
}
inline JSString* jsSingleCharacterSubstring(ExecState* exec, const UString& s, unsigned offset)
{
JSGlobalData* globalData = &exec->globalData();
ASSERT(offset < static_cast<unsigned>(s.length()));
UChar c = s.characters()[offset];
if (c <= maxSingleCharacterString)
return globalData->smallStrings.singleCharacterString(globalData, c);
return fixupVPtr(globalData, new (globalData) JSString(globalData, UString(StringImpl::create(s.impl(), offset, 1))));
}
inline JSString* jsNontrivialString(JSGlobalData* globalData, const char* s)
{
ASSERT(s);
ASSERT(s[0]);
ASSERT(s[1]);
return fixupVPtr(globalData, new (globalData) JSString(globalData, s));
}
inline JSString* jsNontrivialString(JSGlobalData* globalData, const UString& s)
{
ASSERT(s.length() > 1);
return fixupVPtr(globalData, new (globalData) JSString(globalData, s));
}
inline JSString* JSString::getIndex(ExecState* exec, unsigned i)
{
ASSERT(canGetIndex(i));
if (isRope())
return getIndexSlowCase(exec, i);
ASSERT(i < m_value.length());
return jsSingleCharacterSubstring(exec, m_value, i);
}
inline JSString* jsString(JSGlobalData* globalData, const UString& s)
{
int size = s.length();
if (!size)
return globalData->smallStrings.emptyString(globalData);
if (size == 1) {
UChar c = s.characters()[0];
if (c <= maxSingleCharacterString)
return globalData->smallStrings.singleCharacterString(globalData, c);
}
return fixupVPtr(globalData, new (globalData) JSString(globalData, s));
}
inline JSString* jsSubstring(ExecState* exec, JSString* s, unsigned offset, unsigned length)
{
ASSERT(offset <= static_cast<unsigned>(s->length()));
ASSERT(length <= static_cast<unsigned>(s->length()));
ASSERT(offset + length <= static_cast<unsigned>(s->length()));
JSGlobalData* globalData = &exec->globalData();
if (!length)
return globalData->smallStrings.emptyString(globalData);
if (s->isRope())
return s->substringFromRope(exec, offset, length);
return jsSubstring(globalData, s->m_value, offset, length);
}
inline JSString* jsSubstring(JSGlobalData* globalData, const UString& s, unsigned offset, unsigned length)
{
ASSERT(offset <= static_cast<unsigned>(s.length()));
ASSERT(length <= static_cast<unsigned>(s.length()));
ASSERT(offset + length <= static_cast<unsigned>(s.length()));
if (!length)
return globalData->smallStrings.emptyString(globalData);
if (length == 1) {
UChar c = s.characters()[offset];
if (c <= maxSingleCharacterString)
return globalData->smallStrings.singleCharacterString(globalData, c);
}
return fixupVPtr(globalData, new (globalData) JSString(globalData, UString(StringImpl::create(s.impl(), offset, length)), JSString::HasOtherOwner));
}
inline JSString* jsOwnedString(JSGlobalData* globalData, const UString& s)
{
int size = s.length();
if (!size)
return globalData->smallStrings.emptyString(globalData);
if (size == 1) {
UChar c = s.characters()[0];
if (c <= maxSingleCharacterString)
return globalData->smallStrings.singleCharacterString(globalData, c);
}
return fixupVPtr(globalData, new (globalData) JSString(globalData, s, JSString::HasOtherOwner));
}
inline JSString* jsEmptyString(ExecState* exec) { return jsEmptyString(&exec->globalData()); }
inline JSString* jsString(ExecState* exec, const UString& s) { return jsString(&exec->globalData(), s); }
inline JSString* jsSingleCharacterString(ExecState* exec, UChar c) { return jsSingleCharacterString(&exec->globalData(), c); }
inline JSString* jsSubstring(ExecState* exec, const UString& s, unsigned offset, unsigned length) { return jsSubstring(&exec->globalData(), s, offset, length); }
inline JSString* jsNontrivialString(ExecState* exec, const UString& s) { return jsNontrivialString(&exec->globalData(), s); }
inline JSString* jsNontrivialString(ExecState* exec, const char* s) { return jsNontrivialString(&exec->globalData(), s); }
inline JSString* jsOwnedString(ExecState* exec, const UString& s) { return jsOwnedString(&exec->globalData(), s); }
ALWAYS_INLINE bool JSString::getStringPropertySlot(ExecState* exec, const Identifier& propertyName, PropertySlot& slot)
{
if (propertyName == exec->propertyNames().length) {
slot.setValue(jsNumber(m_length));
return true;
}
bool isStrictUInt32;
unsigned i = propertyName.toUInt32(isStrictUInt32);
if (isStrictUInt32 && i < m_length) {
slot.setValue(getIndex(exec, i));
return true;
}
return false;
}
ALWAYS_INLINE bool JSString::getStringPropertySlot(ExecState* exec, unsigned propertyName, PropertySlot& slot)
{
if (propertyName < m_length) {
slot.setValue(getIndex(exec, propertyName));
return true;
}
return false;
}
inline bool isJSString(JSGlobalData* globalData, JSValue v) { return v.isCell() && v.asCell()->vptr() == globalData->jsStringVPtr; }
// --- JSValue inlines ----------------------------
inline UString JSValue::toString(ExecState* exec) const
{
if (isString())
return static_cast<JSString*>(asCell())->value(exec);
if (isInt32())
return exec->globalData().numericStrings.add(asInt32());
if (isDouble())
return exec->globalData().numericStrings.add(asDouble());
if (isTrue())
return "true";
if (isFalse())
return "false";
if (isNull())
return "null";
if (isUndefined())
return "undefined";
ASSERT(isCell());
return asCell()->toString(exec);
}
inline UString JSValue::toPrimitiveString(ExecState* exec) const
{
ASSERT(!isString());
if (isInt32())
return exec->globalData().numericStrings.add(asInt32());
if (isDouble())
return exec->globalData().numericStrings.add(asDouble());
if (isTrue())
return "true";
if (isFalse())
return "false";
if (isNull())
return "null";
if (isUndefined())
return "undefined";
ASSERT(isCell());
return asCell()->toPrimitive(exec, NoPreference).toString(exec);
}
} // namespace JSC
#endif // JSString_h
| {'content_hash': 'ce533ba1457c1e6383b9208fdd3d533d', 'timestamp': '', 'source': 'github', 'line_count': 594, 'max_line_length': 165, 'avg_line_length': 40.33838383838384, 'alnum_prop': 0.5885397103626727, 'repo_name': 'mediathread/mdtprint', 'id': '0ac64bd8b790a977368e08ef677b8bc3833d0101', 'size': '24950', 'binary': False, 'copies': '41', 'ref': 'refs/heads/master', 'path': 'app/bower_components/phantom/src/qt/src/3rdparty/webkit/Source/JavaScriptCore/runtime/JSString.h', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '2377'}, {'name': 'HTML', 'bytes': '17662'}, {'name': 'JavaScript', 'bytes': '97267'}, {'name': 'PHP', 'bytes': '1090'}]} |
package logic;
import java.io.*;
import javax.swing.JOptionPane;
import javax.xml.transform.*;
import javax.xml.transform.stream.*;
/**
* Class Convert
*
*@author JkmAS Mejstrik Jakub
*@version 1.1.1
*@created 05/2014
*/
public class Convert {
/**
* Constructor.
*/
public Convert(){
}
/**
* Converting XML file to HTML
* @param file
*/
public void convertXMLtoHTML(File file){
try{
FileLoader fileLoader = new FileLoader();
TransformerFactory transformerFactory = TransformerFactory.newInstance();
Source XSLDoc = new StreamSource(fileLoader.fileLoader("data/convert","devices_list.xsl"));
Source XMLDoc = new StreamSource(file);
String outputFileName = fileLoader.fileLoader("data/convert","devices_list.html");
OutputStream HTMLFile = new FileOutputStream(outputFileName);
Transformer transformer = transformerFactory.newTransformer(XSLDoc);
transformer.transform(XMLDoc, new StreamResult(HTMLFile));
} catch(FileNotFoundException | TransformerException e) {
JOptionPane.showMessageDialog(null,"Can not convert XML to HTML",
"Error during converting" ,JOptionPane.ERROR_MESSAGE);
}
}
} | {'content_hash': '67a46337caef7102b114ac49bfb7b61d', 'timestamp': '', 'source': 'github', 'line_count': 46, 'max_line_length': 103, 'avg_line_length': 28.73913043478261, 'alnum_prop': 0.640695915279879, 'repo_name': 'JkmAS/wifiguard', 'id': 'b0160d5804f482b6a55ba019c7443e2d34a95d8d', 'size': '1322', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/logic/Convert.java', 'mode': '33261', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '622'}, {'name': 'Java', 'bytes': '51399'}, {'name': 'XSLT', 'bytes': '2162'}]} |
"""Tests for tensorflow.learning.training_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework.test_util import TensorFlowTestCase
# Import resource_variable_ops for the variables-to-tensor implicit conversion.
from tensorflow.python.ops import resource_variable_ops # pylint: disable=unused-import
from tensorflow.python.ops import variables
from tensorflow.python.platform import googletest
from tensorflow.python.training import training_ops
class TrainingOpsTest(TensorFlowTestCase):
def _toType(self, dtype):
if dtype == np.float16:
return dtypes.float16
elif dtype == np.float32:
return dtypes.float32
elif dtype == np.float64:
return dtypes.float64
elif dtype == np.int32:
return dtypes.int32
elif dtype == np.int64:
return dtypes.int64
else:
assert False, (dtype)
def _testTypes(self, x, alpha, delta, use_gpu=None):
self.setUp()
with self.test_session(use_gpu=use_gpu):
var = variables.Variable(x)
variables.global_variables_initializer().run()
self.assertAllCloseAccordingToType(x, var.eval())
apply_sgd = training_ops.apply_gradient_descent(var, alpha, delta)
out = apply_sgd.eval()
self.assertShapeEqual(out, apply_sgd)
self.assertAllCloseAccordingToType(x - alpha * delta, out)
def testApplyGradientDescent(self):
for (dtype, use_gpu) in itertools.product(
[np.float16, np.float32, np.float64], [False, True]):
x = np.arange(100).astype(dtype)
alpha = np.array(2.0).astype(dtype)
delta = np.arange(100).astype(dtype)
self._testTypes(x, alpha, delta, use_gpu)
def _testTypesForAdagrad(self, x, y, lr, grad, use_gpu=None):
self.setUp()
with self.test_session(use_gpu=use_gpu):
var = variables.Variable(x)
accum = variables.Variable(y)
variables.global_variables_initializer().run()
self.assertAllCloseAccordingToType(x, var.eval())
apply_adagrad = training_ops.apply_adagrad(var, accum, lr, grad)
out = apply_adagrad.eval()
self.assertShapeEqual(out, apply_adagrad)
self.assertAllCloseAccordingToType(x - lr * grad * (y + grad * grad)**
(-0.5), out)
self.assertAllCloseAccordingToType(y + grad * grad, accum.eval())
def _testTypesForFtrl(self,
x,
y,
z,
lr,
grad,
use_gpu=None,
l1=0.0,
l2=0.0,
lr_power=-0.5):
self.setUp()
with self.test_session(use_gpu=use_gpu):
var = variables.Variable(x)
accum = variables.Variable(y)
linear = variables.Variable(z)
variables.global_variables_initializer().run()
self.assertAllCloseAccordingToType(x, var.eval())
apply_ftrl = training_ops.apply_ftrl(var, accum, linear, grad, lr, l1, l2,
lr_power)
out = apply_ftrl.eval()
self.assertShapeEqual(out, apply_ftrl)
accum_update = y + grad * grad
linear_update = z + grad - (accum_update**(-lr_power) - y**
(-lr_power)) / lr * x
quadratic = 1.0 / (accum_update**(lr_power) * lr) + 2 * l2
expected_out = np.array([(
np.sign(linear_update[i]) * l1 - linear_update[i]) / (quadratic[i]) if
np.abs(linear_update[i]) > l1 else 0.0
for i in range(linear_update.size)])
self.assertAllCloseAccordingToType(accum_update, accum.eval())
if x.dtype == np.float16:
# The calculations here really are not very precise in float16.
self.assertAllClose(linear_update, linear.eval(), rtol=2e-2, atol=2e-2)
self.assertAllClose(expected_out, out, rtol=2e-2, atol=2e-2)
elif x.dtype == np.float32:
# The calculations here not sufficiently precise in float32.
self.assertAllClose(linear_update, linear.eval(), rtol=1e-5, atol=1e-5)
self.assertAllClose(expected_out, out, rtol=1e-5, atol=1e-5)
else:
self.assertAllClose(linear_update, linear.eval())
self.assertAllClose(expected_out, out)
def testApplyAdagrad(self):
for (dtype, use_gpu) in itertools.product(
[np.float16, np.float32, np.float64], [False, True]):
x = np.arange(100).astype(dtype)
y = np.arange(1, 101).astype(dtype)
lr = np.array(2.0).astype(dtype)
grad = np.arange(100).astype(dtype)
self._testTypesForAdagrad(x, y, lr, grad, use_gpu)
def testApplyFtrl(self):
for dtype in [np.float16, np.float32, np.float64]:
x = np.arange(100).astype(dtype)
y = np.arange(1, 101).astype(dtype)
z = np.arange(102, 202).astype(dtype)
lr = np.array(2.0).astype(dtype)
l1 = np.array(3.0).astype(dtype)
l2 = np.array(4.0).astype(dtype)
grad = np.arange(100).astype(dtype)
self._testTypesForFtrl(x, y, z, lr, grad, use_gpu=False, l1=l1, l2=l2)
def _testTypesForSparseAdagrad(self, x, y, lr, grad, indices):
self.setUp()
with self.test_session(use_gpu=False):
var = variables.Variable(x)
accum = variables.Variable(y)
variables.global_variables_initializer().run()
self.assertAllCloseAccordingToType(x, var.eval())
sparse_apply_adagrad = training_ops.sparse_apply_adagrad(
var, accum, lr, grad,
constant_op.constant(indices, self._toType(indices.dtype)))
out = sparse_apply_adagrad.eval()
self.assertShapeEqual(out, sparse_apply_adagrad)
for (i, index) in enumerate(indices):
self.assertAllCloseAccordingToType(
x[index] - lr * grad[i] * (y[index] + grad[i] * grad[i])**(-0.5),
var.eval()[index])
self.assertAllCloseAccordingToType(y[index] + grad[i] * grad[i],
accum.eval()[index])
def _testTypesForSparseFtrl(self,
x,
y,
z,
lr,
grad,
indices,
l1=0.0,
l2=0.0,
lr_power=-0.5):
self.setUp()
with self.test_session(use_gpu=False):
var = variables.Variable(x)
accum = variables.Variable(y)
linear = variables.Variable(z)
variables.global_variables_initializer().run()
self.assertAllCloseAccordingToType(x, var.eval())
sparse_apply_ftrl = training_ops.sparse_apply_ftrl(
var,
accum,
linear,
grad,
constant_op.constant(indices, self._toType(indices.dtype)),
lr,
l1,
l2,
lr_power=lr_power)
out = sparse_apply_ftrl.eval()
self.assertShapeEqual(out, sparse_apply_ftrl)
for (i, index) in enumerate(indices):
self.assertAllCloseAccordingToType(x[index] - lr * grad[i] *
(y[index] + grad[i] * grad[i])**
(lr_power), var.eval()[index])
self.assertAllCloseAccordingToType(y[index] + grad[i] * grad[i],
accum.eval()[index])
def testSparseApplyAdagrad(self):
for (dtype, index_type) in itertools.product(
[np.float16, np.float32, np.float64], [np.int32, np.int64]):
x_val = [np.arange(10), np.arange(10, 20), np.arange(20, 30)]
y_val = [np.arange(1, 11), np.arange(11, 21), np.arange(21, 31)]
x = np.array(x_val).astype(dtype)
y = np.array(y_val).astype(dtype)
lr = np.array(2.0).astype(dtype)
grad_val = [np.arange(10), np.arange(10)]
grad = np.array(grad_val).astype(dtype)
indices = np.array([0, 2]).astype(index_type)
self._testTypesForSparseAdagrad(x, y, lr, grad, indices)
def testSparseApplyAdagradDim1(self):
for (dtype, index_type) in itertools.product(
[np.float16, np.float32, np.float64], [np.int32, np.int64]):
x_val = [[1.0], [2.0], [3.0]]
y_val = [[4.0], [5.0], [6.0]]
x = np.array(x_val).astype(dtype)
y = np.array(y_val).astype(dtype)
lr = np.array(2.0).astype(dtype)
grad_val = [[1.5], [2.5]]
grad = np.array(grad_val).astype(dtype)
indices = np.array([0, 2]).astype(index_type)
self._testTypesForSparseAdagrad(x, y, lr, grad, indices)
def testSparseApplyFtrlDim1(self):
for (dtype, index_type) in itertools.product(
[np.float16, np.float32, np.float64], [np.int32, np.int64]):
x_val = [[0.0], [0.0], [0.0]]
y_val = [[4.0], [5.0], [6.0]]
z_val = [[0.0], [0.0], [0.0]]
x = np.array(x_val).astype(dtype)
y = np.array(y_val).astype(dtype)
z = np.array(z_val).astype(dtype)
lr = np.array(2.0).astype(dtype)
grad_val = [[1.5], [2.5]]
grad = np.array(grad_val).astype(dtype)
indices = np.array([0, 2]).astype(index_type)
self._testTypesForSparseFtrl(x, y, z, lr, grad, indices)
def testApplyAdam(self):
for dtype, use_gpu in itertools.product(
[np.float16, np.float32, np.float64], [False, True]):
var = np.arange(100).astype(dtype)
m = np.arange(1, 101).astype(dtype)
v = np.arange(101, 201).astype(dtype)
grad = np.arange(100).astype(dtype)
self._testTypesForAdam(var, m, v, grad, use_gpu)
def _testTypesForAdam(self, var, m, v, grad, use_gpu):
self.setUp()
with self.test_session(use_gpu=use_gpu):
var_t = variables.Variable(var)
m_t = variables.Variable(m)
v_t = variables.Variable(v)
t = 1
beta1 = np.array(0.9, dtype=var.dtype)
beta2 = np.array(0.999, dtype=var.dtype)
beta1_power = beta1**t
beta2_power = beta2**t
lr = np.array(0.001, dtype=var.dtype)
epsilon = np.array(1e-8, dtype=var.dtype)
beta1_t = constant_op.constant(beta1, self._toType(var.dtype), [])
beta2_t = constant_op.constant(beta2, self._toType(var.dtype), [])
beta1_power_t = variables.Variable(beta1_power)
beta2_power_t = variables.Variable(beta2_power)
lr_t = constant_op.constant(lr, self._toType(var.dtype), [])
epsilon_t = constant_op.constant(epsilon, self._toType(var.dtype), [])
variables.global_variables_initializer().run()
self.assertAllCloseAccordingToType(var, var_t.eval())
new_var, _, _ = self._adamUpdateNumpy(var, grad, t, m, v, lr, beta1,
beta2, epsilon)
apply_adam = training_ops.apply_adam(var_t, m_t, v_t, beta1_power_t,
beta2_power_t, lr_t, beta1_t,
beta2_t, epsilon_t, grad)
out = apply_adam.eval()
self.assertShapeEqual(out, apply_adam)
self.assertAllCloseAccordingToType(new_var, out)
def _adamUpdateNumpy(self, param, g_t, t, m, v, alpha, beta1, beta2, epsilon):
alpha_t = alpha * np.sqrt(1 - beta2**t) / (1 - beta1**t)
m_t = beta1 * m + (1 - beta1) * g_t
v_t = beta2 * v + (1 - beta2) * g_t * g_t
param_t = param - alpha_t * m_t / (np.sqrt(v_t) + epsilon)
return param_t, m_t, v_t
if __name__ == '__main__':
googletest.main()
| {'content_hash': '9411523374be33ea26929a8a7db489e1', 'timestamp': '', 'source': 'github', 'line_count': 286, 'max_line_length': 88, 'avg_line_length': 40.52097902097902, 'alnum_prop': 0.5825351626542411, 'repo_name': 'zasdfgbnm/tensorflow', 'id': 'd131a1106759d7c2f47be4b28382e96d15d0b622', 'size': '12278', 'binary': False, 'copies': '57', 'ref': 'refs/heads/master', 'path': 'tensorflow/python/training/training_ops_test.py', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '9096'}, {'name': 'C', 'bytes': '341181'}, {'name': 'C++', 'bytes': '37811513'}, {'name': 'CMake', 'bytes': '193934'}, {'name': 'Go', 'bytes': '1061098'}, {'name': 'HTML', 'bytes': '4680032'}, {'name': 'Java', 'bytes': '551109'}, {'name': 'Jupyter Notebook', 'bytes': '1940883'}, {'name': 'LLVM', 'bytes': '6536'}, {'name': 'Makefile', 'bytes': '48122'}, {'name': 'Objective-C', 'bytes': '12456'}, {'name': 'Objective-C++', 'bytes': '94385'}, {'name': 'PHP', 'bytes': '1556'}, {'name': 'Perl', 'bytes': '6179'}, {'name': 'Perl 6', 'bytes': '1357'}, {'name': 'PureBasic', 'bytes': '25356'}, {'name': 'Python', 'bytes': '32936295'}, {'name': 'Ruby', 'bytes': '533'}, {'name': 'Shell', 'bytes': '425164'}]} |
<input type="hidden"
<?php echo $view['form']->attributes() ?>
name="<?php echo $view->escape($full_name) ?>"
value="<?php echo $view->escape($value) ?>"
<?php if ($read_only): ?>disabled="disabled"<?php endif ?>
/> | {'content_hash': '063678e4746a8cf7ef6fce1a75e262aa', 'timestamp': '', 'source': 'github', 'line_count': 6, 'max_line_length': 62, 'avg_line_length': 38.5, 'alnum_prop': 0.5757575757575758, 'repo_name': 'VelvetMirror/doctrine', 'id': '4bc2154f101d47a0601db2ade1c1fef8f8b031af', 'size': '231', 'binary': False, 'copies': '15', 'ref': 'refs/heads/master', 'path': 'vendor/symfony/src/Symfony/Bundle/FrameworkBundle/Resources/views/Form/hidden_widget.html.php', 'mode': '33188', 'license': 'mit', 'language': []} |
module Asana
class HttpClient
# Internal: Represents a response from the Asana API.
class Response
# Public:
# Returns a [Faraday::Env] object for debugging.
attr_reader :faraday_env
# Public:
# Returns the [Integer] status code of the response.
attr_reader :status
# Public:
# Returns the [Hash] representing the parsed JSON body.
attr_reader :body
# Public: Wraps a Faraday response.
#
# faraday_response - [Faraday::Response] the Faraday response to wrap.
def initialize(faraday_response)
@faraday_env = faraday_response.env
@status = faraday_env.status
@body = faraday_env.body
end
# Public:
# Returns a [String] representation of the response.
def to_s
"#<Asana::HttpClient::Response status=#{@status} body=#{@body}>"
end
alias_method :inspect, :to_s
end
end
end
| {'content_hash': 'a0a74182f191ac871f1b64b1a821e462', 'timestamp': '', 'source': 'github', 'line_count': 32, 'max_line_length': 76, 'avg_line_length': 29.5, 'alnum_prop': 0.611228813559322, 'repo_name': 'cocktail-io/ruby-asana', 'id': '1d7c235ded1758e78a203bd99c26bfc0d0eecb76', 'size': '944', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'lib/asana/http_client/response.rb', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'JavaScript', 'bytes': '1232'}, {'name': 'Ruby', 'bytes': '137988'}, {'name': 'Shell', 'bytes': '126'}]} |
package org.apache.hadoop.conf;
/** Base class for things that may be configured with a {@link Configuration}. */
public class Configured implements Configurable {
private Configuration conf;
/** Construct a Configured. */
public Configured(Configuration conf) {
setConf(conf);
}
// inherit javadoc
public void setConf(Configuration conf) {
this.conf = conf;
}
// inherit javadoc
public Configuration getConf() {
return conf;
}
}
| {'content_hash': 'e857a8fa814a516f62e9708b91915b72', 'timestamp': '', 'source': 'github', 'line_count': 25, 'max_line_length': 81, 'avg_line_length': 18.8, 'alnum_prop': 0.6936170212765957, 'repo_name': 'moreus/hadoop', 'id': '609c6d1fbe59a4cf7db1fa1689ed45d2583cd474', 'size': '1276', 'binary': False, 'copies': '5', 'ref': 'refs/heads/master', 'path': 'hadoop-0.11.1/src/java/org/apache/hadoop/conf/Configured.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'AspectJ', 'bytes': '31146'}, {'name': 'C', 'bytes': '1067911'}, {'name': 'C++', 'bytes': '521803'}, {'name': 'CSS', 'bytes': '157107'}, {'name': 'Erlang', 'bytes': '232'}, {'name': 'Java', 'bytes': '43984644'}, {'name': 'JavaScript', 'bytes': '87848'}, {'name': 'Perl', 'bytes': '18992'}, {'name': 'Python', 'bytes': '32767'}, {'name': 'Shell', 'bytes': '1369281'}, {'name': 'TeX', 'bytes': '19322'}, {'name': 'XSLT', 'bytes': '185841'}]} |
10 GB vietas Dropboxā vai Skype kredītu 10 dolāru vērtībā ir ierobežotas K-12 fakultātei, pedagogiem priekš ārpusskolas klubiem un izglītības organizācijām. 10 GB brīvas vietas jātiek pievienotai esošam Dropbox kontam. Ierobežojums, viens uz organizatoru.
Katram organizatoram ir nepieciešams reģistrēties Programmēšanas stundai lai varētu pretendēt Dropbox vietai vai Skype kredītu. Ja pasākumā piedalās visa jūsu skola, tad katram pedagogam ir nepieciešams piereģistrēties kā organizatoram, lai varētu pretendēt uz balvām.
Code.org sazināsies ar organizatoriem pēc Programmēšanas dzīves (dec. 8-14), lai sniegtu norādījumus lai iegūtu Dropbox vietu vai Skype kredītu.
<% if @country == 'us' %>
## Video čats ar vies-runātāju:
Prize limited to K-12 classrooms in the U.S. and Canada only. To qualify, sign up your classroom by November 14, 2014. Code.org will select winning classrooms, provide a time slot for the web chat, and work with the appropriate teacher to set up the technology details. Your whole school does not need to apply to qualify for this prize. Both public and private schools are eligbile to win.
## Class-set of laptops (or $10,000 for other technology):
Prize limited to public K-12 U.S. schools only. To qualify, your entire school must register for the Hour of Code by November 14, 2014. One school in every U.S. state will receive a class-set of computers. Code.org will select and notify winners via email by December 1, 2014.
To clarify, this is not a sweepstakes or a contest involving pure chance.
1) There is no financial stake or risk involved in applying - any school or classroom may participate, without any payment to Code.org or any other organization
2) Winners will only be selected among schools where the entire classroom (or school) participates in an Hour of Code, which involves a test of the students' and teachers' collective skill.
<% end %> | {'content_hash': '81268f9cdf72190f6dac403f01095b09', 'timestamp': '', 'source': 'github', 'line_count': 23, 'max_line_length': 390, 'avg_line_length': 82.6086956521739, 'alnum_prop': 0.7952631578947369, 'repo_name': 'cloud3edu/code-dot-org-old', 'id': '4efca94766a4fb4ef3daf4266f823267c6e28887', 'size': '2028', 'binary': False, 'copies': '9', 'ref': 'refs/heads/staging', 'path': 'pegasus/sites.v3/hourofcode.com/public/lv/prizes-terms.md', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'ApacheConf', 'bytes': '93'}, {'name': 'CSS', 'bytes': '2350914'}, {'name': 'Cucumber', 'bytes': '51437'}, {'name': 'Diff', 'bytes': '10815'}, {'name': 'Emacs Lisp', 'bytes': '2410'}, {'name': 'HTML', 'bytes': '9420250'}, {'name': 'JavaScript', 'bytes': '53168309'}, {'name': 'PHP', 'bytes': '259591'}, {'name': 'Perl', 'bytes': '555'}, {'name': 'Processing', 'bytes': '11068'}, {'name': 'Prolog', 'bytes': '679'}, {'name': 'Python', 'bytes': '124866'}, {'name': 'Racket', 'bytes': '131852'}, {'name': 'Ruby', 'bytes': '1166799'}, {'name': 'Shell', 'bytes': '16440'}]} |
extern long __CFConstantStringClassReference[];
#elif defined(DEFN)
long __CFConstantStringClassReference[32];
#endif
#else
#if defined(EXTERN)
extern long __CFConstantStringClassReference[];
#else
long __CFConstantStringClassReference[];
#endif
#endif
typedef struct __CFString *CFStringRef;
const CFStringRef string = (CFStringRef)__builtin___CFStringMakeConstantString("string");
// CHECK-CF-IN-CF-DECL: @__CFConstantStringClassReference = external global [0 x i32]
// CHECK-CF-IN-CF-DEFN: @__CFConstantStringClassReference = common global [32 x i64] zeroinitializer, align 16
// CHECK-CF: @__CFConstantStringClassReference = common global [1 x i64] zeroinitializer, align 8
// CHECK-CF-EXTERN: @__CFConstantStringClassReference = external global [0 x i32]
// CHECK-CF-EXTERN: @.str = private unnamed_addr constant [7 x i8] c"string\00", section ".rodata", align 1
| {'content_hash': '7066aef303448343d29197c6b31ec970', 'timestamp': '', 'source': 'github', 'line_count': 21, 'max_line_length': 110, 'avg_line_length': 41.476190476190474, 'alnum_prop': 0.7657864523536165, 'repo_name': 'epiqc/ScaffCC', 'id': '8b90d7c0c1d80f2e42e6ac1375e68e31c6490549', 'size': '1928', 'binary': False, 'copies': '10', 'ref': 'refs/heads/master', 'path': 'clang/test/CodeGen/cfstring-elf-cfbuild-x86_64.c', 'mode': '33188', 'license': 'bsd-2-clause', 'language': [{'name': 'Assembly', 'bytes': '3493637'}, {'name': 'Batchfile', 'bytes': '753'}, {'name': 'C', 'bytes': '3272269'}, {'name': 'C++', 'bytes': '56117969'}, {'name': 'CMake', 'bytes': '204481'}, {'name': 'CSS', 'bytes': '55547'}, {'name': 'Cuda', 'bytes': '5785'}, {'name': 'Emacs Lisp', 'bytes': '20994'}, {'name': 'HTML', 'bytes': '3200864'}, {'name': 'JavaScript', 'bytes': '17391'}, {'name': 'LLVM', 'bytes': '10223782'}, {'name': 'M', 'bytes': '578'}, {'name': 'M4', 'bytes': '189436'}, {'name': 'MATLAB', 'bytes': '22305'}, {'name': 'Makefile', 'bytes': '413012'}, {'name': 'Mercury', 'bytes': '1195'}, {'name': 'OCaml', 'bytes': '343061'}, {'name': 'Objective-C', 'bytes': '18301489'}, {'name': 'Objective-C++', 'bytes': '317800'}, {'name': 'PHP', 'bytes': '1128'}, {'name': 'Perl', 'bytes': '200404'}, {'name': 'Python', 'bytes': '1043548'}, {'name': 'Roff', 'bytes': '18799'}, {'name': 'Shell', 'bytes': '566849'}, {'name': 'Vim script', 'bytes': '27002'}]} |
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.1008
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace TestApp.Properties {
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "10.0.0.0")]
internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
public static Settings Default {
get {
return defaultInstance;
}
}
[global::System.Configuration.UserScopedSettingAttribute()]
[global::System.Configuration.SettingsProviderAttribute(typeof(Palaso.Settings.CrossPlatformSettingsProvider))]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("True")]
public bool NeedsUpgrade {
get {
return ((bool)(this["NeedsUpgrade"]));
}
set {
this["NeedsUpgrade"] = value;
}
}
[global::System.Configuration.UserScopedSettingAttribute()]
[global::System.Configuration.SettingsProviderAttribute(typeof(Palaso.Settings.CrossPlatformSettingsProvider))]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public global::Palaso.Reporting.ReportingSettings ReportingSettings {
get {
return ((global::Palaso.Reporting.ReportingSettings)(this["ReportingSettings"]));
}
set {
this["ReportingSettings"] = value;
}
}
}
}
| {'content_hash': 'd8b6495ea3e6d54ae3941ba5938d8e52', 'timestamp': '', 'source': 'github', 'line_count': 51, 'max_line_length': 148, 'avg_line_length': 37.09803921568628, 'alnum_prop': 0.6908033826638478, 'repo_name': 'phillip-hopper/libpalaso', 'id': 'b5bf5a3d9ce56ded12943d83dfcd50067cb2ac42', 'size': '1894', 'binary': False, 'copies': '6', 'ref': 'refs/heads/master', 'path': 'TestApps/ReportingTest/Properties/Settings1.Designer.cs', 'mode': '33188', 'license': 'mit', 'language': []} |
/// Copyright (c) 2012 Ecma International. All rights reserved.
/// Ecma International makes this code available under the terms and conditions set
/// forth on http://hg.ecmascript.org/tests/test262/raw-file/tip/LICENSE (the
/// "Use Terms"). Any redistribution of this code must retain the above
/// copyright and this notice and otherwise comply with the Use Terms.
/**
* @path ch15/15.2/15.2.3/15.2.3.6/15.2.3.6-4-383.js
* @description ES5 Attributes - [[Value]] attribute of data property is a string
*/
function testcase() {
var obj = {};
Object.defineProperty(obj, "prop", {
value: "ThisIsAString"
});
var desc = Object.getOwnPropertyDescriptor(obj, "prop");
return obj.prop === "ThisIsAString" && desc.value === "ThisIsAString";
}
runTestCase(testcase);
| {'content_hash': 'dddc7e86b696c9cb8c884cf1ed582890', 'timestamp': '', 'source': 'github', 'line_count': 23, 'max_line_length': 83, 'avg_line_length': 37.17391304347826, 'alnum_prop': 0.6456140350877193, 'repo_name': 'fdecampredon/jsx-typescript-old-version', 'id': '5df8b370c0895dc0e224aff3d4bec1597a65b8cc', 'size': '855', 'binary': False, 'copies': '5', 'ref': 'refs/heads/jsx', 'path': 'tests/Fidelity/test262/suite/ch15/15.2/15.2.3/15.2.3.6/15.2.3.6-4-383.js', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Elixir', 'bytes': '3294'}, {'name': 'JavaScript', 'bytes': '23705444'}, {'name': 'Shell', 'bytes': '386'}, {'name': 'TypeScript', 'bytes': '18404576'}]} |
/*global jQuery */
/*!
* Lettering.JS 0.6.1
*
* Copyright 2010, Dave Rupert http://daverupert.com
* Released under the WTFPL license
* http://sam.zoy.org/wtfpl/
*
* Thanks to Paul Irish - http://paulirish.com - for the feedback.
*
* Date: Mon Sep 20 17:14:00 2010 -0600
*/
(function($){
function injector(t, splitter, klass, after) {
var a = t.text().split(splitter), inject = '';
if (a.length) {
$(a).each(function(i, item) {
inject += '<span class="'+klass+(i+1)+'">'+item+'</span>'+after;
});
t.empty().append(inject);
}
}
var methods = {
init : function() {
return this.each(function() {
injector($(this), '', 'char', '');
});
},
words : function() {
return this.each(function() {
injector($(this), ' ', 'word', ' ');
});
},
lines : function() {
return this.each(function() {
var r = "eefec303079ad17405c889e092e105b0";
// Because it's hard to split a <br/> tag consistently across browsers,
// (*ahem* IE *ahem*), we replaces all <br/> instances with an md5 hash
// (of the word "split"). If you're trying to use this plugin on that
// md5 hash string, it will fail because you're being ridiculous.
injector($(this).children("br").replaceWith(r).end(), r, 'line', '');
});
}
};
$.fn.lettering = function( method ) {
// Method calling logic
if ( method && methods[method] ) {
return methods[ method ].apply( this, [].slice.call( arguments, 1 ));
} else if ( method === 'letters' || ! method ) {
return methods.init.apply( this, [].slice.call( arguments, 0 ) ); // always pass an array
}
$.error( 'Method ' + method + ' does not exist on jQuery.lettering' );
return this;
};
})(jQuery);
| {'content_hash': '6025025ff34aa85056694319fc971b92', 'timestamp': '', 'source': 'github', 'line_count': 66, 'max_line_length': 92, 'avg_line_length': 25.87878787878788, 'alnum_prop': 0.5960187353629977, 'repo_name': 'jbbarquero/building-microservices', 'id': 'fba4aaa4349129bb4e698fda1fdaee6b1a962514', 'size': '1708', 'binary': False, 'copies': '12', 'ref': 'refs/heads/master', 'path': 'livelessons-web/livelessons-web-resources/src/main/resources/resources/js/jquery.lettering.js', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '6743'}, {'name': 'Groovy', 'bytes': '110'}, {'name': 'HTML', 'bytes': '9842'}, {'name': 'Java', 'bytes': '119699'}, {'name': 'JavaScript', 'bytes': '24986'}, {'name': 'Shell', 'bytes': '104'}]} |
<div class="tooltip-arrow"></div>
<div class="tooltip-inner" ng-bind="content"></div>
| {'content_hash': '6041b3f56f8f7c35731719f45159662d', 'timestamp': '', 'source': 'github', 'line_count': 2, 'max_line_length': 51, 'avg_line_length': 43.0, 'alnum_prop': 0.686046511627907, 'repo_name': 'msumaran/Framework-Codeigniter', 'id': 'e2dfc0873d9c7424092850df721c4f6705acee77', 'size': '86', 'binary': False, 'copies': '97', 'ref': 'refs/heads/master', 'path': 'static/bower_components/angular-ui-bootstrap/template/tooltip/tooltip-popup.html', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'CSS', 'bytes': '995167'}, {'name': 'HTML', 'bytes': '598648'}, {'name': 'Hack', 'bytes': '2'}, {'name': 'JavaScript', 'bytes': '82559'}, {'name': 'PHP', 'bytes': '1883724'}]} |
from PySide import QtCore, QtGui
from androguard.core import androconf
from androguard.gui.helpers import display2classmethod, class2func, classmethod2display, method2func
class XrefDialogClass(QtGui.QDialog):
'''Dialog holding our Xref listview.
parent: SourceWindow that started the new XrefDialog
path: complete path of the class we are looking an xref from
method (optional): method of the class we are looking xref from
xrefs_list: the list of "Class -> Method" strings representing the xrefs
path/method are used for the title of the window
xrefs_list for the content of the QListView
'''
def __init__(self, parent=None, win=None, current_class=None, class_analysis=None):
super(XrefDialogClass, self).__init__(parent)
self.current_class = current_class
self.class_analysis = class_analysis
title = "Xrefs for the class %s" % current_class
self.setWindowTitle(title)
xrefs_list = []
ref_kind_map = {0:"Class instanciation", 1:"Class reference"}
xrefs_from = class_analysis.get_xref_from()
for ref_class in xrefs_from:
for ref_kind, ref_method in xrefs_from[ref_class]:
xrefs_list.append(('From', ref_kind_map[ref_kind], ref_method, ref_class.get_vm_class()))
xrefs_to = class_analysis.get_xref_to()
for ref_class in xrefs_to:
for ref_kind, ref_method in xrefs_to[ref_class]:
xrefs_list.append(('To', ref_kind_map[ref_kind], ref_method, ref_class.get_vm_class()))
closeButton = QtGui.QPushButton("Close")
closeButton.clicked.connect(self.close)
xreflayout = QtGui.QGridLayout()
xrefwin = XrefListView(self, win=win, xrefs=xrefs_list, headers=["Origin", "Kind", "Method"])
xreflayout.addWidget(xrefwin, 0, 0)
buttonsLayout = QtGui.QHBoxLayout()
buttonsLayout.addStretch(1)
buttonsLayout.addWidget(closeButton)
mainLayout = QtGui.QVBoxLayout()
mainLayout.addLayout(xreflayout)
mainLayout.addLayout(buttonsLayout)
self.setLayout(mainLayout)
class XrefDialogMethod(QtGui.QDialog):
def __init__(self, parent=None, win=None, current_class=None, class_analysis=None, method_analysis=None):
super(XrefDialogMethod, self).__init__(parent)
self.current_class = current_class
self.class_analysis = class_analysis
self.method_analysis = method_analysis
title = "Xrefs for the method %s" % self.method_analysis.method
self.setWindowTitle(title)
xrefs_list = []
xrefs_from = self.method_analysis.get_xref_from()
for ref_class, ref_method in xrefs_from:
xrefs_list.append(('From', ref_method, ref_class.get_vm_class()))
xrefs_to = self.method_analysis.get_xref_to()
for ref_class, ref_method in xrefs_to:
xrefs_list.append(('To', ref_method, ref_class.get_vm_class()))
closeButton = QtGui.QPushButton("Close")
closeButton.clicked.connect(self.close)
xreflayout = QtGui.QGridLayout()
xrefwin = XrefListView(self, win=win, xrefs=xrefs_list)
xreflayout.addWidget(xrefwin, 0, 0)
buttonsLayout = QtGui.QHBoxLayout()
buttonsLayout.addStretch(1)
buttonsLayout.addWidget(closeButton)
mainLayout = QtGui.QVBoxLayout()
mainLayout.addLayout(xreflayout)
mainLayout.addLayout(buttonsLayout)
self.setLayout(mainLayout)
class XrefDialogField(QtGui.QDialog):
def __init__(self, parent=None, win=None, current_class=None, class_analysis=None, field_analysis=None):
super(XrefDialogField, self).__init__(parent)
self.current_class = current_class
self.class_analysis = class_analysis
self.field_analysis = field_analysis
title = "Xrefs for the field %s" % self.field_analysis.field
self.setWindowTitle(title)
xrefs_list = []
xrefs_read = self.field_analysis.get_xref_read()
for ref_class, ref_method in xrefs_read:
xrefs_list.append(('Read', ref_method, ref_class.get_vm_class()))
xrefs_write = self.field_analysis.get_xref_write()
for ref_class, ref_method in xrefs_write:
xrefs_list.append(('Write', ref_method, ref_class.get_vm_class()))
closeButton = QtGui.QPushButton("Close")
closeButton.clicked.connect(self.close)
xreflayout = QtGui.QGridLayout()
xrefwin = XrefListView(self, win=win, xrefs=xrefs_list)
xreflayout.addWidget(xrefwin, 0, 0)
buttonsLayout = QtGui.QHBoxLayout()
buttonsLayout.addStretch(1)
buttonsLayout.addWidget(closeButton)
mainLayout = QtGui.QVBoxLayout()
mainLayout.addLayout(xreflayout)
mainLayout.addLayout(buttonsLayout)
self.setLayout(mainLayout)
class XrefDialogString(QtGui.QDialog):
def __init__(self, parent=None, win=None, string_analysis=None):
super(XrefDialogString, self).__init__(parent)
self.string_analysis = string_analysis
title = "Xrefs for the string %s" % self.string_analysis.value
self.setWindowTitle(title)
xrefs_list = []
xrefs_from = self.string_analysis.get_xref_from()
for ref_class, ref_method in xrefs_from:
xrefs_list.append(('From', ref_method, ref_class.get_vm_class()))
closeButton = QtGui.QPushButton("Close")
closeButton.clicked.connect(self.close)
xreflayout = QtGui.QGridLayout()
xrefwin = XrefListView(self, win=win, xrefs=xrefs_list)
xreflayout.addWidget(xrefwin, 0, 0)
buttonsLayout = QtGui.QHBoxLayout()
buttonsLayout.addStretch(1)
buttonsLayout.addWidget(closeButton)
mainLayout = QtGui.QVBoxLayout()
mainLayout.addLayout(xreflayout)
mainLayout.addLayout(buttonsLayout)
self.setLayout(mainLayout)
class XrefDialog(QtGui.QDialog):
'''Dialog holding our Xref listview.
parent: SourceWindow that started the new XrefDialog
path: complete path of the class we are looking an xref from
method (optional): method of the class we are looking xref from
xrefs_list: the list of "Class -> Method" strings representing the xrefs
path/method are used for the title of the window
xrefs_list for the content of the QListView
'''
def __init__(self, parent=None, win=None, xrefs_list=None, method=""):
super(XrefDialog, self).__init__(parent)
if not isinstance(xrefs_list, list) or len(xrefs_list) == 0:
androconf.warning("Bad XrefDialog creation")
return
if not method:
title = "Xrefs to %s" % path.split("/")[-1]
else:
title = "Xrefs to %s -> %s" % (path.split("/")[-1], method)
self.setWindowTitle(title)
layout = QtGui.QGridLayout()
xrefwin = XrefListView(self, win=win, xrefs=xrefs_list)
layout.addWidget(xrefwin, 0, 0)
self.setLayout(layout)
@classmethod
def get_xrefs_list(cls, class_item, method=None):
'''Static method called before creating a XrefDialog
to check if there are xrefs to display
method (optional): method of the class we are looking xref from
'''
androconf.debug("Getting XREF for %s" % class_item)
item = class_item
if method:
item = method
return XrefDialog.get_xrefs_list_from_element(item)
@classmethod
def get_xrefs_list_from_element(cls, element):
'''Helper for get_xrefs_list
element is a ClassDefItem or MethodDefItem
At the end of the function, we lost if we worked on
a class or method but we do not care for now.
'''
xref_items = element.XREFfrom.items
androconf.debug("%d XREFs found" % len(xref_items))
# print xref_items
xrefs = []
for xref_item in xref_items:
class_ = xref_item[0].get_class_name()
method_ = xref_item[0].get_name()
descriptor_ = xref_item[0].get_descriptor()
xrefs.append(classmethod2display(class_, method_, descriptor_))
# print xrefs
return xrefs
class XrefListView(QtGui.QWidget):
def __init__(self, parent=None, win=None, xrefs=None, headers=["Origin", "Method"]):
super(XrefListView, self).__init__(parent)
self.parent = parent
self.mainwin = win
self.xrefs = xrefs
self.headers = headers
self.setMinimumSize(600, 400)
self.filterPatternLineEdit = QtGui.QLineEdit()
self.filterPatternLabel = QtGui.QLabel("&Filter origin pattern:")
self.filterPatternLabel.setBuddy(self.filterPatternLineEdit)
self.filterPatternLineEdit.textChanged.connect(self.filterRegExpChanged)
self.xrefwindow = XrefValueWindow(self, win, self.xrefs, self.headers)
sourceLayout = QtGui.QVBoxLayout()
sourceLayout.addWidget(self.xrefwindow)
sourceLayout.addWidget(self.filterPatternLabel)
sourceLayout.addWidget(self.filterPatternLineEdit)
self.setLayout(sourceLayout)
def filterRegExpChanged(self, value):
regExp = QtCore.QRegExp(value)
self.xrefwindow.proxyModel.setFilterRegExp(regExp)
def close(self):
self.parent.close()
class XrefValueWindow(QtGui.QTreeView):
def __init__(self, parent=None, win=None, xrefs=None, headers=None):
super(XrefValueWindow, self).__init__(parent)
self.parent = parent
self.mainwin = win
self.xrefs = xrefs
self.headers = headers
self.reverse_strings = {}
self.proxyModel = QtGui.QSortFilterProxyModel()
self.proxyModel.setDynamicSortFilter(True)
self.model = QtGui.QStandardItemModel(len(self.xrefs), len(self.headers), self)
column = 0
for header in headers:
self.model.setHeaderData(column, QtCore.Qt.Horizontal, header)
column += 1
row = 0
for ref in xrefs:
for column in range(len(self.headers)):
self.model.setData(self.model.index(row, column, QtCore.QModelIndex()), "%s" % ref[column])
row += 1
self.proxyModel.setSourceModel(self.model)
self.setRootIsDecorated(False)
self.setAlternatingRowColors(True)
self.setModel(self.proxyModel)
self.setSortingEnabled(True)
self.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self.doubleClicked.connect(self.slotDoubleClicked)
def slotDoubleClicked(self, mi):
mi = self.proxyModel.mapToSource(mi)
row = mi.row()
column = mi.column()
if column == len(self.headers) - 1:
data = mi.data()
xref_method = None
xref_class = None
for xref in self.xrefs:
if str(xref[-2]) == data:
xref_method = xref[-2]
xref_class = xref[-1]
break
if xref_class and xref_method:
self.mainwin.openSourceWindow(current_class=xref_class,
method=xref_method)
self.parent.close()
return
else:
self.mainwin.showStatus("Impossible to find the xref ....")
return
| {'content_hash': '537c3b98a68882d91c22384d60902a52', 'timestamp': '', 'source': 'github', 'line_count': 318, 'max_line_length': 109, 'avg_line_length': 36.11006289308176, 'alnum_prop': 0.6320647914308107, 'repo_name': 'kiennquit2011/androguard', 'id': 'd0a92b9d70b52101c88771c1c24096e7119a6ad8', 'size': '11483', 'binary': False, 'copies': '16', 'ref': 'refs/heads/master', 'path': 'androguard/gui/xrefwindow.py', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'C', 'bytes': '384130'}, {'name': 'C++', 'bytes': '57006'}, {'name': 'Makefile', 'bytes': '6008'}, {'name': 'Python', 'bytes': '27558667'}]} |
package java.net;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.nio.channels.SocketChannel;
import java.security.AccessController;
import java.security.PrivilegedExceptionAction;
import java.security.PrivilegedAction;
/**
* This class implements client sockets (also called just
* "sockets"). A socket is an endpoint for communication
* between two machines.
* <p>
* The actual work of the socket is performed by an instance of the
* <code>SocketImpl</code> class. An application, by changing
* the socket factory that creates the socket implementation,
* can configure itself to create sockets appropriate to the local
* firewall.
*
* @author unascribed
* @see java.net.Socket#setSocketImplFactory(java.net.SocketImplFactory)
* @see java.net.SocketImpl
* @see java.nio.channels.SocketChannel
* @since JDK1.0
*/
public
class Socket implements java.io.Closeable {
/**
* Various states of this socket.
*/
private boolean created = false;
private boolean bound = false;
private boolean connected = false;
private boolean closed = false;
private Object closeLock = new Object();
private boolean shutIn = false;
private boolean shutOut = false;
/**
* The implementation of this Socket.
*/
SocketImpl impl;
/**
* Are we using an older SocketImpl?
*/
private boolean oldImpl = false;
/**
* Creates an unconnected socket, with the
* system-default type of SocketImpl.
*
* @since JDK1.1
* @revised 1.4
*/
public Socket() {
setImpl();
}
/**
* Creates an unconnected socket, specifying the type of proxy, if any,
* that should be used regardless of any other settings.
* <P>
* If there is a security manager, its <code>checkConnect</code> method
* is called with the proxy host address and port number
* as its arguments. This could result in a SecurityException.
* <P>
* Examples:
* <UL> <LI><code>Socket s = new Socket(Proxy.NO_PROXY);</code> will create
* a plain socket ignoring any other proxy configuration.</LI>
* <LI><code>Socket s = new Socket(new Proxy(Proxy.Type.SOCKS, new InetSocketAddress("socks.mydom.com", 1080)));</code>
* will create a socket connecting through the specified SOCKS proxy
* server.</LI>
* </UL>
*
* @param proxy a {@link java.net.Proxy Proxy} object specifying what kind
* of proxying should be used.
* @throws IllegalArgumentException if the proxy is of an invalid type
* or <code>null</code>.
* @throws SecurityException if a security manager is present and
* permission to connect to the proxy is
* denied.
* @see java.net.ProxySelector
* @see java.net.Proxy
*
* @since 1.5
*/
public Socket(Proxy proxy) {
// Create a copy of Proxy as a security measure
if (proxy == null) {
throw new IllegalArgumentException("Invalid Proxy");
}
Proxy p = proxy == Proxy.NO_PROXY ? Proxy.NO_PROXY : sun.net.ApplicationProxy.create(proxy);
if (p.type() == Proxy.Type.SOCKS) {
SecurityManager security = System.getSecurityManager();
InetSocketAddress epoint = (InetSocketAddress) p.address();
if (epoint.getAddress() != null) {
checkAddress (epoint.getAddress(), "Socket");
}
if (security != null) {
if (epoint.isUnresolved())
epoint = new InetSocketAddress(epoint.getHostName(), epoint.getPort());
if (epoint.isUnresolved())
security.checkConnect(epoint.getHostName(), epoint.getPort());
else
security.checkConnect(epoint.getAddress().getHostAddress(),
epoint.getPort());
}
impl = new SocksSocketImpl(p);
impl.setSocket(this);
} else {
if (p == Proxy.NO_PROXY) {
if (factory == null) {
impl = new PlainSocketImpl();
impl.setSocket(this);
} else
setImpl();
} else
throw new IllegalArgumentException("Invalid Proxy");
}
}
/**
* Creates an unconnected Socket with a user-specified
* SocketImpl.
* <P>
* @param impl an instance of a <B>SocketImpl</B>
* the subclass wishes to use on the Socket.
*
* @exception SocketException if there is an error in the underlying protocol,
* such as a TCP error.
* @since JDK1.1
*/
protected Socket(SocketImpl impl) throws SocketException {
this.impl = impl;
if (impl != null) {
checkOldImpl();
this.impl.setSocket(this);
}
}
/**
* Creates a stream socket and connects it to the specified port
* number on the named host.
* <p>
* If the specified host is <tt>null</tt> it is the equivalent of
* specifying the address as <tt>{@link java.net.InetAddress#getByName InetAddress.getByName}(null)</tt>.
* In other words, it is equivalent to specifying an address of the
* loopback interface. </p>
* <p>
* If the application has specified a server socket factory, that
* factory's <code>createSocketImpl</code> method is called to create
* the actual socket implementation. Otherwise a "plain" socket is created.
* <p>
* If there is a security manager, its
* <code>checkConnect</code> method is called
* with the host address and <code>port</code>
* as its arguments. This could result in a SecurityException.
*
* @param host the host name, or <code>null</code> for the loopback address.
* @param port the port number.
*
* @exception UnknownHostException if the IP address of
* the host could not be determined.
*
* @exception IOException if an I/O error occurs when creating the socket.
* @exception SecurityException if a security manager exists and its
* <code>checkConnect</code> method doesn't allow the operation.
* @exception IllegalArgumentException if the port parameter is outside
* the specified range of valid port values, which is between
* 0 and 65535, inclusive.
* @see java.net.Socket#setSocketImplFactory(java.net.SocketImplFactory)
* @see java.net.SocketImpl
* @see java.net.SocketImplFactory#createSocketImpl()
* @see SecurityManager#checkConnect
*/
public Socket(String host, int port)
throws UnknownHostException, IOException
{
this(host != null ? new InetSocketAddress(host, port) :
new InetSocketAddress(InetAddress.getByName(null), port),
(SocketAddress) null, true);
}
/**
* Creates a stream socket and connects it to the specified port
* number at the specified IP address.
* <p>
* If the application has specified a socket factory, that factory's
* <code>createSocketImpl</code> method is called to create the
* actual socket implementation. Otherwise a "plain" socket is created.
* <p>
* If there is a security manager, its
* <code>checkConnect</code> method is called
* with the host address and <code>port</code>
* as its arguments. This could result in a SecurityException.
*
* @param address the IP address.
* @param port the port number.
* @exception IOException if an I/O error occurs when creating the socket.
* @exception SecurityException if a security manager exists and its
* <code>checkConnect</code> method doesn't allow the operation.
* @exception IllegalArgumentException if the port parameter is outside
* the specified range of valid port values, which is between
* 0 and 65535, inclusive.
* @exception NullPointerException if <code>address</code> is null.
* @see java.net.Socket#setSocketImplFactory(java.net.SocketImplFactory)
* @see java.net.SocketImpl
* @see java.net.SocketImplFactory#createSocketImpl()
* @see SecurityManager#checkConnect
*/
public Socket(InetAddress address, int port) throws IOException {
this(address != null ? new InetSocketAddress(address, port) : null,
(SocketAddress) null, true);
}
/**
* Creates a socket and connects it to the specified remote host on
* the specified remote port. The Socket will also bind() to the local
* address and port supplied.
* <p>
* If the specified host is <tt>null</tt> it is the equivalent of
* specifying the address as <tt>{@link java.net.InetAddress#getByName InetAddress.getByName}(null)</tt>.
* In other words, it is equivalent to specifying an address of the
* loopback interface. </p>
* <p>
* A local port number of <code>zero</code> will let the system pick up a
* free port in the <code>bind</code> operation.</p>
* <p>
* If there is a security manager, its
* <code>checkConnect</code> method is called
* with the host address and <code>port</code>
* as its arguments. This could result in a SecurityException.
*
* @param host the name of the remote host, or <code>null</code> for the loopback address.
* @param port the remote port
* @param localAddr the local address the socket is bound to, or
* <code>null</code> for the <code>anyLocal</code> address.
* @param localPort the local port the socket is bound to, or
* <code>zero</code> for a system selected free port.
* @exception IOException if an I/O error occurs when creating the socket.
* @exception SecurityException if a security manager exists and its
* <code>checkConnect</code> method doesn't allow the operation.
* @exception IllegalArgumentException if the port parameter or localPort
* parameter is outside the specified range of valid port values,
* which is between 0 and 65535, inclusive.
* @see SecurityManager#checkConnect
* @since JDK1.1
*/
public Socket(String host, int port, InetAddress localAddr,
int localPort) throws IOException {
this(host != null ? new InetSocketAddress(host, port) :
new InetSocketAddress(InetAddress.getByName(null), port),
new InetSocketAddress(localAddr, localPort), true);
}
/**
* Creates a socket and connects it to the specified remote address on
* the specified remote port. The Socket will also bind() to the local
* address and port supplied.
* <p>
* If the specified local address is <tt>null</tt> it is the equivalent of
* specifying the address as the AnyLocal address (see <tt>{@link java.net.InetAddress#isAnyLocalAddress InetAddress.isAnyLocalAddress}()</tt>).
* <p>
* A local port number of <code>zero</code> will let the system pick up a
* free port in the <code>bind</code> operation.</p>
* <p>
* If there is a security manager, its
* <code>checkConnect</code> method is called
* with the host address and <code>port</code>
* as its arguments. This could result in a SecurityException.
*
* @param address the remote address
* @param port the remote port
* @param localAddr the local address the socket is bound to, or
* <code>null</code> for the <code>anyLocal</code> address.
* @param localPort the local port the socket is bound to or
* <code>zero</code> for a system selected free port.
* @exception IOException if an I/O error occurs when creating the socket.
* @exception SecurityException if a security manager exists and its
* <code>checkConnect</code> method doesn't allow the operation.
* @exception IllegalArgumentException if the port parameter or localPort
* parameter is outside the specified range of valid port values,
* which is between 0 and 65535, inclusive.
* @exception NullPointerException if <code>address</code> is null.
* @see SecurityManager#checkConnect
* @since JDK1.1
*/
public Socket(InetAddress address, int port, InetAddress localAddr,
int localPort) throws IOException {
this(address != null ? new InetSocketAddress(address, port) : null,
new InetSocketAddress(localAddr, localPort), true);
}
/**
* Creates a stream socket and connects it to the specified port
* number on the named host.
* <p>
* If the specified host is <tt>null</tt> it is the equivalent of
* specifying the address as <tt>{@link java.net.InetAddress#getByName InetAddress.getByName}(null)</tt>.
* In other words, it is equivalent to specifying an address of the
* loopback interface. </p>
* <p>
* If the stream argument is <code>true</code>, this creates a
* stream socket. If the stream argument is <code>false</code>, it
* creates a datagram socket.
* <p>
* If the application has specified a server socket factory, that
* factory's <code>createSocketImpl</code> method is called to create
* the actual socket implementation. Otherwise a "plain" socket is created.
* <p>
* If there is a security manager, its
* <code>checkConnect</code> method is called
* with the host address and <code>port</code>
* as its arguments. This could result in a SecurityException.
* <p>
* If a UDP socket is used, TCP/IP related socket options will not apply.
*
* @param host the host name, or <code>null</code> for the loopback address.
* @param port the port number.
* @param stream a <code>boolean</code> indicating whether this is
* a stream socket or a datagram socket.
* @exception IOException if an I/O error occurs when creating the socket.
* @exception SecurityException if a security manager exists and its
* <code>checkConnect</code> method doesn't allow the operation.
* @exception IllegalArgumentException if the port parameter is outside
* the specified range of valid port values, which is between
* 0 and 65535, inclusive.
* @see java.net.Socket#setSocketImplFactory(java.net.SocketImplFactory)
* @see java.net.SocketImpl
* @see java.net.SocketImplFactory#createSocketImpl()
* @see SecurityManager#checkConnect
* @deprecated Use DatagramSocket instead for UDP transport.
*/
@Deprecated
public Socket(String host, int port, boolean stream) throws IOException {
this(host != null ? new InetSocketAddress(host, port) :
new InetSocketAddress(InetAddress.getByName(null), port),
(SocketAddress) null, stream);
}
/**
* Creates a socket and connects it to the specified port number at
* the specified IP address.
* <p>
* If the stream argument is <code>true</code>, this creates a
* stream socket. If the stream argument is <code>false</code>, it
* creates a datagram socket.
* <p>
* If the application has specified a server socket factory, that
* factory's <code>createSocketImpl</code> method is called to create
* the actual socket implementation. Otherwise a "plain" socket is created.
*
* <p>If there is a security manager, its
* <code>checkConnect</code> method is called
* with <code>host.getHostAddress()</code> and <code>port</code>
* as its arguments. This could result in a SecurityException.
* <p>
* If UDP socket is used, TCP/IP related socket options will not apply.
*
* @param host the IP address.
* @param port the port number.
* @param stream if <code>true</code>, create a stream socket;
* otherwise, create a datagram socket.
* @exception IOException if an I/O error occurs when creating the socket.
* @exception SecurityException if a security manager exists and its
* <code>checkConnect</code> method doesn't allow the operation.
* @exception IllegalArgumentException if the port parameter is outside
* the specified range of valid port values, which is between
* 0 and 65535, inclusive.
* @exception NullPointerException if <code>host</code> is null.
* @see java.net.Socket#setSocketImplFactory(java.net.SocketImplFactory)
* @see java.net.SocketImpl
* @see java.net.SocketImplFactory#createSocketImpl()
* @see SecurityManager#checkConnect
* @deprecated Use DatagramSocket instead for UDP transport.
*/
@Deprecated
public Socket(InetAddress host, int port, boolean stream) throws IOException {
this(host != null ? new InetSocketAddress(host, port) : null,
new InetSocketAddress(0), stream);
}
private Socket(SocketAddress address, SocketAddress localAddr,
boolean stream) throws IOException {
setImpl();
// backward compatibility
if (address == null)
throw new NullPointerException();
try {
createImpl(stream);
if (localAddr != null)
bind(localAddr);
if (address != null)
connect(address);
} catch (IOException e) {
close();
throw e;
}
}
/**
* Creates the socket implementation.
*
* @param stream a <code>boolean</code> value : <code>true</code> for a TCP socket,
* <code>false</code> for UDP.
* @throws IOException if creation fails
* @since 1.4
*/
void createImpl(boolean stream) throws SocketException {
if (impl == null)
setImpl();
try {
impl.create(stream);
created = true;
} catch (IOException e) {
throw new SocketException(e.getMessage());
}
}
private void checkOldImpl() {
if (impl == null)
return;
// SocketImpl.connect() is a protected method, therefore we need to use
// getDeclaredMethod, therefore we need permission to access the member
oldImpl = AccessController.doPrivileged
(new PrivilegedAction<Boolean>() {
public Boolean run() {
Class[] cl = new Class[2];
cl[0] = SocketAddress.class;
cl[1] = Integer.TYPE;
Class clazz = impl.getClass();
while (true) {
try {
clazz.getDeclaredMethod("connect", cl);
return Boolean.FALSE;
} catch (NoSuchMethodException e) {
clazz = clazz.getSuperclass();
// java.net.SocketImpl class will always have this abstract method.
// If we have not found it by now in the hierarchy then it does not
// exist, we are an old style impl.
if (clazz.equals(java.net.SocketImpl.class)) {
return Boolean.TRUE;
}
}
}
}
});
}
/**
* Sets impl to the system-default type of SocketImpl.
* @since 1.4
*/
void setImpl() {
if (factory != null) {
impl = factory.createSocketImpl();
checkOldImpl();
} else {
// No need to do a checkOldImpl() here, we know it's an up to date
// SocketImpl!
impl = new SocksSocketImpl();
}
if (impl != null)
impl.setSocket(this);
}
/**
* Get the <code>SocketImpl</code> attached to this socket, creating
* it if necessary.
*
* @return the <code>SocketImpl</code> attached to that ServerSocket.
* @throws SocketException if creation fails
* @since 1.4
*/
SocketImpl getImpl() throws SocketException {
if (!created)
createImpl(true);
return impl;
}
/**
* Connects this socket to the server.
*
* @param endpoint the <code>SocketAddress</code>
* @throws IOException if an error occurs during the connection
* @throws java.nio.channels.IllegalBlockingModeException
* if this socket has an associated channel,
* and the channel is in non-blocking mode
* @throws IllegalArgumentException if endpoint is null or is a
* SocketAddress subclass not supported by this socket
* @since 1.4
* @spec JSR-51
*/
public void connect(SocketAddress endpoint) throws IOException {
connect(endpoint, 0);
}
/**
* Connects this socket to the server with a specified timeout value.
* A timeout of zero is interpreted as an infinite timeout. The connection
* will then block until established or an error occurs.
*
* @param endpoint the <code>SocketAddress</code>
* @param timeout the timeout value to be used in milliseconds.
* @throws IOException if an error occurs during the connection
* @throws SocketTimeoutException if timeout expires before connecting
* @throws java.nio.channels.IllegalBlockingModeException
* if this socket has an associated channel,
* and the channel is in non-blocking mode
* @throws IllegalArgumentException if endpoint is null or is a
* SocketAddress subclass not supported by this socket
* @since 1.4
* @spec JSR-51
*/
public void connect(SocketAddress endpoint, int timeout) throws IOException {
if (endpoint == null)
throw new IllegalArgumentException("connect: The address can't be null");
if (timeout < 0)
throw new IllegalArgumentException("connect: timeout can't be negative");
if (isClosed())
throw new SocketException("Socket is closed");
if (!oldImpl && isConnected())
throw new SocketException("already connected");
if (!(endpoint instanceof InetSocketAddress))
throw new IllegalArgumentException("Unsupported address type");
InetSocketAddress epoint = (InetSocketAddress) endpoint;
InetAddress addr = epoint.getAddress ();
int port = epoint.getPort();
checkAddress(addr, "connect");
SecurityManager security = System.getSecurityManager();
if (security != null) {
if (epoint.isUnresolved())
security.checkConnect(epoint.getHostName(), port);
else
security.checkConnect(addr.getHostAddress(), port);
}
if (!created)
createImpl(true);
if (!oldImpl)
impl.connect(epoint, timeout);
else if (timeout == 0) {
if (epoint.isUnresolved())
impl.connect(addr.getHostName(), port);
else
impl.connect(addr, port);
} else
throw new UnsupportedOperationException("SocketImpl.connect(addr, timeout)");
connected = true;
/*
* If the socket was not bound before the connect, it is now because
* the kernel will have picked an ephemeral port & a local address
*/
bound = true;
}
/**
* Binds the socket to a local address.
* <P>
* If the address is <code>null</code>, then the system will pick up
* an ephemeral port and a valid local address to bind the socket.
*
* @param bindpoint the <code>SocketAddress</code> to bind to
* @throws IOException if the bind operation fails, or if the socket
* is already bound.
* @throws IllegalArgumentException if bindpoint is a
* SocketAddress subclass not supported by this socket
*
* @since 1.4
* @see #isBound
*/
public void bind(SocketAddress bindpoint) throws IOException {
if (isClosed())
throw new SocketException("Socket is closed");
if (!oldImpl && isBound())
throw new SocketException("Already bound");
if (bindpoint != null && (!(bindpoint instanceof InetSocketAddress)))
throw new IllegalArgumentException("Unsupported address type");
InetSocketAddress epoint = (InetSocketAddress) bindpoint;
if (epoint != null && epoint.isUnresolved())
throw new SocketException("Unresolved address");
if (epoint == null) {
epoint = new InetSocketAddress(0);
}
InetAddress addr = epoint.getAddress();
int port = epoint.getPort();
checkAddress (addr, "bind");
getImpl().bind (addr, port);
bound = true;
}
private void checkAddress (InetAddress addr, String op) {
if (addr == null) {
return;
}
if (!(addr instanceof Inet4Address || addr instanceof Inet6Address)) {
throw new IllegalArgumentException(op + ": invalid address type");
}
}
/**
* set the flags after an accept() call.
*/
final void postAccept() {
connected = true;
created = true;
bound = true;
}
void setCreated() {
created = true;
}
void setBound() {
bound = true;
}
void setConnected() {
connected = true;
}
/**
* Returns the address to which the socket is connected.
* <p>
* If the socket was connected prior to being {@link #close closed},
* then this method will continue to return the connected address
* after the socket is closed.
*
* @return the remote IP address to which this socket is connected,
* or <code>null</code> if the socket is not connected.
*/
public InetAddress getInetAddress() {
if (!isConnected())
return null;
try {
return getImpl().getInetAddress();
} catch (SocketException e) {
}
return null;
}
/**
* Gets the local address to which the socket is bound.
*
* @return the local address to which the socket is bound, or
* the {@link InetAddress#isAnyLocalAddress wildcard} address
* if the socket is closed or not bound yet.
* @since JDK1.1
*/
public InetAddress getLocalAddress() {
// This is for backward compatibility
if (!isBound())
return InetAddress.anyLocalAddress();
InetAddress in = null;
try {
in = (InetAddress) getImpl().getOption(SocketOptions.SO_BINDADDR);
if (in.isAnyLocalAddress()) {
in = InetAddress.anyLocalAddress();
}
} catch (Exception e) {
in = InetAddress.anyLocalAddress(); // "0.0.0.0"
}
return in;
}
/**
* Returns the remote port number to which this socket is connected.
* <p>
* If the socket was connected prior to being {@link #close closed},
* then this method will continue to return the connected port number
* after the socket is closed.
*
* @return the remote port number to which this socket is connected, or
* 0 if the socket is not connected yet.
*/
public int getPort() {
if (!isConnected())
return 0;
try {
return getImpl().getPort();
} catch (SocketException e) {
// Shouldn't happen as we're connected
}
return -1;
}
/**
* Returns the local port number to which this socket is bound.
* <p>
* If the socket was bound prior to being {@link #close closed},
* then this method will continue to return the local port number
* after the socket is closed.
*
* @return the local port number to which this socket is bound or -1
* if the socket is not bound yet.
*/
public int getLocalPort() {
if (!isBound())
return -1;
try {
return getImpl().getLocalPort();
} catch(SocketException e) {
// shouldn't happen as we're bound
}
return -1;
}
/**
* Returns the address of the endpoint this socket is connected to, or
* <code>null</code> if it is unconnected.
* <p>
* If the socket was connected prior to being {@link #close closed},
* then this method will continue to return the connected address
* after the socket is closed.
*
* @return a <code>SocketAddress</code> representing the remote endpoint of this
* socket, or <code>null</code> if it is not connected yet.
* @see #getInetAddress()
* @see #getPort()
* @see #connect(SocketAddress, int)
* @see #connect(SocketAddress)
* @since 1.4
*/
public SocketAddress getRemoteSocketAddress() {
if (!isConnected())
return null;
return new InetSocketAddress(getInetAddress(), getPort());
}
/**
* Returns the address of the endpoint this socket is bound to, or
* <code>null</code> if it is not bound yet.
* <p>
* If a socket bound to an endpoint represented by an
* <code>InetSocketAddress </code> is {@link #close closed},
* then this method will continue to return an <code>InetSocketAddress</code>
* after the socket is closed. In that case the returned
* <code>InetSocketAddress</code>'s address is the
* {@link InetAddress#isAnyLocalAddress wildcard} address
* and its port is the local port that it was bound to.
*
* @return a <code>SocketAddress</code> representing the local endpoint of this
* socket, or <code>null</code> if it is not bound yet.
* @see #getLocalAddress()
* @see #getLocalPort()
* @see #bind(SocketAddress)
* @since 1.4
*/
public SocketAddress getLocalSocketAddress() {
if (!isBound())
return null;
return new InetSocketAddress(getLocalAddress(), getLocalPort());
}
/**
* Returns the unique {@link java.nio.channels.SocketChannel SocketChannel}
* object associated with this socket, if any.
*
* <p> A socket will have a channel if, and only if, the channel itself was
* created via the {@link java.nio.channels.SocketChannel#open
* SocketChannel.open} or {@link
* java.nio.channels.ServerSocketChannel#accept ServerSocketChannel.accept}
* methods.
*
* @return the socket channel associated with this socket,
* or <tt>null</tt> if this socket was not created
* for a channel
*
* @since 1.4
* @spec JSR-51
*/
public SocketChannel getChannel() {
return null;
}
/**
* Returns an input stream for this socket.
*
* <p> If this socket has an associated channel then the resulting input
* stream delegates all of its operations to the channel. If the channel
* is in non-blocking mode then the input stream's <tt>read</tt> operations
* will throw an {@link java.nio.channels.IllegalBlockingModeException}.
*
* <p>Under abnormal conditions the underlying connection may be
* broken by the remote host or the network software (for example
* a connection reset in the case of TCP connections). When a
* broken connection is detected by the network software the
* following applies to the returned input stream :-
*
* <ul>
*
* <li><p>The network software may discard bytes that are buffered
* by the socket. Bytes that aren't discarded by the network
* software can be read using {@link java.io.InputStream#read read}.
*
* <li><p>If there are no bytes buffered on the socket, or all
* buffered bytes have been consumed by
* {@link java.io.InputStream#read read}, then all subsequent
* calls to {@link java.io.InputStream#read read} will throw an
* {@link java.io.IOException IOException}.
*
* <li><p>If there are no bytes buffered on the socket, and the
* socket has not been closed using {@link #close close}, then
* {@link java.io.InputStream#available available} will
* return <code>0</code>.
*
* </ul>
*
* <p> Closing the returned {@link java.io.InputStream InputStream}
* will close the associated socket.
*
* @return an input stream for reading bytes from this socket.
* @exception IOException if an I/O error occurs when creating the
* input stream, the socket is closed, the socket is
* not connected, or the socket input has been shutdown
* using {@link #shutdownInput()}
*
* @revised 1.4
* @spec JSR-51
*/
public InputStream getInputStream() throws IOException {
if (isClosed())
throw new SocketException("Socket is closed");
if (!isConnected())
throw new SocketException("Socket is not connected");
if (isInputShutdown())
throw new SocketException("Socket input is shutdown");
final Socket s = this;
InputStream is = null;
try {
is = AccessController.doPrivileged(
new PrivilegedExceptionAction<InputStream>() {
public InputStream run() throws IOException {
return impl.getInputStream();
}
});
} catch (java.security.PrivilegedActionException e) {
throw (IOException) e.getException();
}
return is;
}
/**
* Returns an output stream for this socket.
*
* <p> If this socket has an associated channel then the resulting output
* stream delegates all of its operations to the channel. If the channel
* is in non-blocking mode then the output stream's <tt>write</tt>
* operations will throw an {@link
* java.nio.channels.IllegalBlockingModeException}.
*
* <p> Closing the returned {@link java.io.OutputStream OutputStream}
* will close the associated socket.
*
* @return an output stream for writing bytes to this socket.
* @exception IOException if an I/O error occurs when creating the
* output stream or if the socket is not connected.
* @revised 1.4
* @spec JSR-51
*/
public OutputStream getOutputStream() throws IOException {
if (isClosed())
throw new SocketException("Socket is closed");
if (!isConnected())
throw new SocketException("Socket is not connected");
if (isOutputShutdown())
throw new SocketException("Socket output is shutdown");
final Socket s = this;
OutputStream os = null;
try {
os = AccessController.doPrivileged(
new PrivilegedExceptionAction<OutputStream>() {
public OutputStream run() throws IOException {
return impl.getOutputStream();
}
});
} catch (java.security.PrivilegedActionException e) {
throw (IOException) e.getException();
}
return os;
}
/**
* Enable/disable TCP_NODELAY (disable/enable Nagle's algorithm).
*
* @param on <code>true</code> to enable TCP_NODELAY,
* <code>false</code> to disable.
*
* @exception SocketException if there is an error
* in the underlying protocol, such as a TCP error.
*
* @since JDK1.1
*
* @see #getTcpNoDelay()
*/
public void setTcpNoDelay(boolean on) throws SocketException {
if (isClosed())
throw new SocketException("Socket is closed");
getImpl().setOption(SocketOptions.TCP_NODELAY, Boolean.valueOf(on));
}
/**
* Tests if TCP_NODELAY is enabled.
*
* @return a <code>boolean</code> indicating whether or not TCP_NODELAY is enabled.
* @exception SocketException if there is an error
* in the underlying protocol, such as a TCP error.
* @since JDK1.1
* @see #setTcpNoDelay(boolean)
*/
public boolean getTcpNoDelay() throws SocketException {
if (isClosed())
throw new SocketException("Socket is closed");
return ((Boolean) getImpl().getOption(SocketOptions.TCP_NODELAY)).booleanValue();
}
/**
* Enable/disable SO_LINGER with the specified linger time in seconds.
* The maximum timeout value is platform specific.
*
* The setting only affects socket close.
*
* @param on whether or not to linger on.
* @param linger how long to linger for, if on is true.
* @exception SocketException if there is an error
* in the underlying protocol, such as a TCP error.
* @exception IllegalArgumentException if the linger value is negative.
* @since JDK1.1
* @see #getSoLinger()
*/
public void setSoLinger(boolean on, int linger) throws SocketException {
if (isClosed())
throw new SocketException("Socket is closed");
if (!on) {
getImpl().setOption(SocketOptions.SO_LINGER, new Boolean(on));
} else {
if (linger < 0) {
throw new IllegalArgumentException("invalid value for SO_LINGER");
}
if (linger > 65535)
linger = 65535;
getImpl().setOption(SocketOptions.SO_LINGER, new Integer(linger));
}
}
/**
* Returns setting for SO_LINGER. -1 returns implies that the
* option is disabled.
*
* The setting only affects socket close.
*
* @return the setting for SO_LINGER.
* @exception SocketException if there is an error
* in the underlying protocol, such as a TCP error.
* @since JDK1.1
* @see #setSoLinger(boolean, int)
*/
public int getSoLinger() throws SocketException {
if (isClosed())
throw new SocketException("Socket is closed");
Object o = getImpl().getOption(SocketOptions.SO_LINGER);
if (o instanceof Integer) {
return ((Integer) o).intValue();
} else {
return -1;
}
}
/**
* Send one byte of urgent data on the socket. The byte to be sent is the lowest eight
* bits of the data parameter. The urgent byte is
* sent after any preceding writes to the socket OutputStream
* and before any future writes to the OutputStream.
* @param data The byte of data to send
* @exception IOException if there is an error
* sending the data.
* @since 1.4
*/
public void sendUrgentData (int data) throws IOException {
if (!getImpl().supportsUrgentData ()) {
throw new SocketException ("Urgent data not supported");
}
getImpl().sendUrgentData (data);
}
/**
* Enable/disable OOBINLINE (receipt of TCP urgent data)
*
* By default, this option is disabled and TCP urgent data received on a
* socket is silently discarded. If the user wishes to receive urgent data, then
* this option must be enabled. When enabled, urgent data is received
* inline with normal data.
* <p>
* Note, only limited support is provided for handling incoming urgent
* data. In particular, no notification of incoming urgent data is provided
* and there is no capability to distinguish between normal data and urgent
* data unless provided by a higher level protocol.
*
* @param on <code>true</code> to enable OOBINLINE,
* <code>false</code> to disable.
*
* @exception SocketException if there is an error
* in the underlying protocol, such as a TCP error.
*
* @since 1.4
*
* @see #getOOBInline()
*/
public void setOOBInline(boolean on) throws SocketException {
if (isClosed())
throw new SocketException("Socket is closed");
getImpl().setOption(SocketOptions.SO_OOBINLINE, Boolean.valueOf(on));
}
/**
* Tests if OOBINLINE is enabled.
*
* @return a <code>boolean</code> indicating whether or not OOBINLINE is enabled.
* @exception SocketException if there is an error
* in the underlying protocol, such as a TCP error.
* @since 1.4
* @see #setOOBInline(boolean)
*/
public boolean getOOBInline() throws SocketException {
if (isClosed())
throw new SocketException("Socket is closed");
return ((Boolean) getImpl().getOption(SocketOptions.SO_OOBINLINE)).booleanValue();
}
/**
* Enable/disable SO_TIMEOUT with the specified timeout, in
* milliseconds. With this option set to a non-zero timeout,
* a read() call on the InputStream associated with this Socket
* will block for only this amount of time. If the timeout expires,
* a <B>java.net.SocketTimeoutException</B> is raised, though the
* Socket is still valid. The option <B>must</B> be enabled
* prior to entering the blocking operation to have effect. The
* timeout must be > 0.
* A timeout of zero is interpreted as an infinite timeout.
* @param timeout the specified timeout, in milliseconds.
* @exception SocketException if there is an error
* in the underlying protocol, such as a TCP error.
* @since JDK 1.1
* @see #getSoTimeout()
*/
public synchronized void setSoTimeout(int timeout) throws SocketException {
if (isClosed())
throw new SocketException("Socket is closed");
if (timeout < 0)
throw new IllegalArgumentException("timeout can't be negative");
getImpl().setOption(SocketOptions.SO_TIMEOUT, new Integer(timeout));
}
/**
* Returns setting for SO_TIMEOUT. 0 returns implies that the
* option is disabled (i.e., timeout of infinity).
* @return the setting for SO_TIMEOUT
* @exception SocketException if there is an error
* in the underlying protocol, such as a TCP error.
* @since JDK1.1
* @see #setSoTimeout(int)
*/
public synchronized int getSoTimeout() throws SocketException {
if (isClosed())
throw new SocketException("Socket is closed");
Object o = getImpl().getOption(SocketOptions.SO_TIMEOUT);
/* extra type safety */
if (o instanceof Integer) {
return ((Integer) o).intValue();
} else {
return 0;
}
}
/**
* Sets the SO_SNDBUF option to the specified value for this
* <tt>Socket</tt>. The SO_SNDBUF option is used by the platform's
* networking code as a hint for the size to set
* the underlying network I/O buffers.
*
* <p>Because SO_SNDBUF is a hint, applications that want to
* verify what size the buffers were set to should call
* {@link #getSendBufferSize()}.
*
* @exception SocketException if there is an error
* in the underlying protocol, such as a TCP error.
*
* @param size the size to which to set the send buffer
* size. This value must be greater than 0.
*
* @exception IllegalArgumentException if the
* value is 0 or is negative.
*
* @see #getSendBufferSize()
* @since 1.2
*/
public synchronized void setSendBufferSize(int size)
throws SocketException{
if (!(size > 0)) {
throw new IllegalArgumentException("negative send size");
}
if (isClosed())
throw new SocketException("Socket is closed");
getImpl().setOption(SocketOptions.SO_SNDBUF, new Integer(size));
}
/**
* Get value of the SO_SNDBUF option for this <tt>Socket</tt>,
* that is the buffer size used by the platform
* for output on this <tt>Socket</tt>.
* @return the value of the SO_SNDBUF option for this <tt>Socket</tt>.
*
* @exception SocketException if there is an error
* in the underlying protocol, such as a TCP error.
*
* @see #setSendBufferSize(int)
* @since 1.2
*/
public synchronized int getSendBufferSize() throws SocketException {
if (isClosed())
throw new SocketException("Socket is closed");
int result = 0;
Object o = getImpl().getOption(SocketOptions.SO_SNDBUF);
if (o instanceof Integer) {
result = ((Integer)o).intValue();
}
return result;
}
/**
* Sets the SO_RCVBUF option to the specified value for this
* <tt>Socket</tt>. The SO_RCVBUF option is used by the platform's
* networking code as a hint for the size to set
* the underlying network I/O buffers.
*
* <p>Increasing the receive buffer size can increase the performance of
* network I/O for high-volume connection, while decreasing it can
* help reduce the backlog of incoming data.
*
* <p>Because SO_RCVBUF is a hint, applications that want to
* verify what size the buffers were set to should call
* {@link #getReceiveBufferSize()}.
*
* <p>The value of SO_RCVBUF is also used to set the TCP receive window
* that is advertized to the remote peer. Generally, the window size
* can be modified at any time when a socket is connected. However, if
* a receive window larger than 64K is required then this must be requested
* <B>before</B> the socket is connected to the remote peer. There are two
* cases to be aware of:<p>
* <ol>
* <li>For sockets accepted from a ServerSocket, this must be done by calling
* {@link ServerSocket#setReceiveBufferSize(int)} before the ServerSocket
* is bound to a local address.<p></li>
* <li>For client sockets, setReceiveBufferSize() must be called before
* connecting the socket to its remote peer.<p></li></ol>
* @param size the size to which to set the receive buffer
* size. This value must be greater than 0.
*
* @exception IllegalArgumentException if the value is 0 or is
* negative.
*
* @exception SocketException if there is an error
* in the underlying protocol, such as a TCP error.
*
* @see #getReceiveBufferSize()
* @see ServerSocket#setReceiveBufferSize(int)
* @since 1.2
*/
public synchronized void setReceiveBufferSize(int size)
throws SocketException{
if (size <= 0) {
throw new IllegalArgumentException("invalid receive size");
}
if (isClosed())
throw new SocketException("Socket is closed");
getImpl().setOption(SocketOptions.SO_RCVBUF, new Integer(size));
}
/**
* Gets the value of the SO_RCVBUF option for this <tt>Socket</tt>,
* that is the buffer size used by the platform for
* input on this <tt>Socket</tt>.
*
* @return the value of the SO_RCVBUF option for this <tt>Socket</tt>.
* @exception SocketException if there is an error
* in the underlying protocol, such as a TCP error.
* @see #setReceiveBufferSize(int)
* @since 1.2
*/
public synchronized int getReceiveBufferSize()
throws SocketException{
if (isClosed())
throw new SocketException("Socket is closed");
int result = 0;
Object o = getImpl().getOption(SocketOptions.SO_RCVBUF);
if (o instanceof Integer) {
result = ((Integer)o).intValue();
}
return result;
}
/**
* Enable/disable SO_KEEPALIVE.
*
* @param on whether or not to have socket keep alive turned on.
* @exception SocketException if there is an error
* in the underlying protocol, such as a TCP error.
* @since 1.3
* @see #getKeepAlive()
*/
public void setKeepAlive(boolean on) throws SocketException {
if (isClosed())
throw new SocketException("Socket is closed");
getImpl().setOption(SocketOptions.SO_KEEPALIVE, Boolean.valueOf(on));
}
/**
* Tests if SO_KEEPALIVE is enabled.
*
* @return a <code>boolean</code> indicating whether or not SO_KEEPALIVE is enabled.
* @exception SocketException if there is an error
* in the underlying protocol, such as a TCP error.
* @since 1.3
* @see #setKeepAlive(boolean)
*/
public boolean getKeepAlive() throws SocketException {
if (isClosed())
throw new SocketException("Socket is closed");
return ((Boolean) getImpl().getOption(SocketOptions.SO_KEEPALIVE)).booleanValue();
}
/**
* Sets traffic class or type-of-service octet in the IP
* header for packets sent from this Socket.
* As the underlying network implementation may ignore this
* value applications should consider it a hint.
*
* <P> The tc <B>must</B> be in the range <code> 0 <= tc <=
* 255</code> or an IllegalArgumentException will be thrown.
* <p>Notes:
* <p>For Internet Protocol v4 the value consists of an
* <code>integer</code>, the least significant 8 bits of which
* represent the value of the TOS octet in IP packets sent by
* the socket.
* RFC 1349 defines the TOS values as follows:
* <p>
* <UL>
* <LI><CODE>IPTOS_LOWCOST (0x02)</CODE></LI>
* <LI><CODE>IPTOS_RELIABILITY (0x04)</CODE></LI>
* <LI><CODE>IPTOS_THROUGHPUT (0x08)</CODE></LI>
* <LI><CODE>IPTOS_LOWDELAY (0x10)</CODE></LI>
* </UL>
* The last low order bit is always ignored as this
* corresponds to the MBZ (must be zero) bit.
* <p>
* Setting bits in the precedence field may result in a
* SocketException indicating that the operation is not
* permitted.
* <p>
* As RFC 1122 section 4.2.4.2 indicates, a compliant TCP
* implementation should, but is not required to, let application
* change the TOS field during the lifetime of a connection.
* So whether the type-of-service field can be changed after the
* TCP connection has been established depends on the implementation
* in the underlying platform. Applications should not assume that
* they can change the TOS field after the connection.
* <p>
* For Internet Protocol v6 <code>tc</code> is the value that
* would be placed into the sin6_flowinfo field of the IP header.
*
* @param tc an <code>int</code> value for the bitset.
* @throws SocketException if there is an error setting the
* traffic class or type-of-service
* @since 1.4
* @see #getTrafficClass
*/
public void setTrafficClass(int tc) throws SocketException {
if (tc < 0 || tc > 255)
throw new IllegalArgumentException("tc is not in range 0 -- 255");
if (isClosed())
throw new SocketException("Socket is closed");
getImpl().setOption(SocketOptions.IP_TOS, new Integer(tc));
}
/**
* Gets traffic class or type-of-service in the IP header
* for packets sent from this Socket
* <p>
* As the underlying network implementation may ignore the
* traffic class or type-of-service set using {@link #setTrafficClass(int)}
* this method may return a different value than was previously
* set using the {@link #setTrafficClass(int)} method on this Socket.
*
* @return the traffic class or type-of-service already set
* @throws SocketException if there is an error obtaining the
* traffic class or type-of-service value.
* @since 1.4
* @see #setTrafficClass(int)
*/
public int getTrafficClass() throws SocketException {
return ((Integer) (getImpl().getOption(SocketOptions.IP_TOS))).intValue();
}
/**
* Enable/disable the SO_REUSEADDR socket option.
* <p>
* When a TCP connection is closed the connection may remain
* in a timeout state for a period of time after the connection
* is closed (typically known as the <tt>TIME_WAIT</tt> state
* or <tt>2MSL</tt> wait state).
* For applications using a well known socket address or port
* it may not be possible to bind a socket to the required
* <tt>SocketAddress</tt> if there is a connection in the
* timeout state involving the socket address or port.
* <p>
* Enabling <tt>SO_REUSEADDR</tt> prior to binding the socket
* using {@link #bind(SocketAddress)} allows the socket to be
* bound even though a previous connection is in a timeout
* state.
* <p>
* When a <tt>Socket</tt> is created the initial setting
* of <tt>SO_REUSEADDR</tt> is disabled.
* <p>
* The behaviour when <tt>SO_REUSEADDR</tt> is enabled or
* disabled after a socket is bound (See {@link #isBound()})
* is not defined.
*
* @param on whether to enable or disable the socket option
* @exception SocketException if an error occurs enabling or
* disabling the <tt>SO_RESUEADDR</tt> socket option,
* or the socket is closed.
* @since 1.4
* @see #getReuseAddress()
* @see #bind(SocketAddress)
* @see #isClosed()
* @see #isBound()
*/
public void setReuseAddress(boolean on) throws SocketException {
if (isClosed())
throw new SocketException("Socket is closed");
getImpl().setOption(SocketOptions.SO_REUSEADDR, Boolean.valueOf(on));
}
/**
* Tests if SO_REUSEADDR is enabled.
*
* @return a <code>boolean</code> indicating whether or not SO_REUSEADDR is enabled.
* @exception SocketException if there is an error
* in the underlying protocol, such as a TCP error.
* @since 1.4
* @see #setReuseAddress(boolean)
*/
public boolean getReuseAddress() throws SocketException {
if (isClosed())
throw new SocketException("Socket is closed");
return ((Boolean) (getImpl().getOption(SocketOptions.SO_REUSEADDR))).booleanValue();
}
/**
* Closes this socket.
* <p>
* Any thread currently blocked in an I/O operation upon this socket
* will throw a {@link SocketException}.
* <p>
* Once a socket has been closed, it is not available for further networking
* use (i.e. can't be reconnected or rebound). A new socket needs to be
* created.
*
* <p> Closing this socket will also close the socket's
* {@link java.io.InputStream InputStream} and
* {@link java.io.OutputStream OutputStream}.
*
* <p> If this socket has an associated channel then the channel is closed
* as well.
*
* @exception IOException if an I/O error occurs when closing this socket.
* @revised 1.4
* @spec JSR-51
* @see #isClosed
*/
public synchronized void close() throws IOException {
synchronized(closeLock) {
if (isClosed())
return;
if (created)
impl.close();
closed = true;
}
}
/**
* Places the input stream for this socket at "end of stream".
* Any data sent to the input stream side of the socket is acknowledged
* and then silently discarded.
* <p>
* If you read from a socket input stream after invoking
* shutdownInput() on the socket, the stream will return EOF.
*
* @exception IOException if an I/O error occurs when shutting down this
* socket.
*
* @since 1.3
* @see java.net.Socket#shutdownOutput()
* @see java.net.Socket#close()
* @see java.net.Socket#setSoLinger(boolean, int)
* @see #isInputShutdown
*/
public void shutdownInput() throws IOException
{
if (isClosed())
throw new SocketException("Socket is closed");
if (!isConnected())
throw new SocketException("Socket is not connected");
if (isInputShutdown())
throw new SocketException("Socket input is already shutdown");
getImpl().shutdownInput();
shutIn = true;
}
/**
* Disables the output stream for this socket.
* For a TCP socket, any previously written data will be sent
* followed by TCP's normal connection termination sequence.
*
* If you write to a socket output stream after invoking
* shutdownOutput() on the socket, the stream will throw
* an IOException.
*
* @exception IOException if an I/O error occurs when shutting down this
* socket.
*
* @since 1.3
* @see java.net.Socket#shutdownInput()
* @see java.net.Socket#close()
* @see java.net.Socket#setSoLinger(boolean, int)
* @see #isOutputShutdown
*/
public void shutdownOutput() throws IOException
{
if (isClosed())
throw new SocketException("Socket is closed");
if (!isConnected())
throw new SocketException("Socket is not connected");
if (isOutputShutdown())
throw new SocketException("Socket output is already shutdown");
getImpl().shutdownOutput();
shutOut = true;
}
/**
* Converts this socket to a <code>String</code>.
*
* @return a string representation of this socket.
*/
public String toString() {
try {
if (isConnected())
return "Socket[addr=" + getImpl().getInetAddress() +
",port=" + getImpl().getPort() +
",localport=" + getImpl().getLocalPort() + "]";
} catch (SocketException e) {
}
return "Socket[unconnected]";
}
/**
* Returns the connection state of the socket.
* <p>
* Note: Closing a socket doesn't clear its connection state, which means
* this method will return <code>true</code> for a closed socket
* (see {@link #isClosed()}) if it was successfuly connected prior
* to being closed.
*
* @return true if the socket was successfuly connected to a server
* @since 1.4
*/
public boolean isConnected() {
// Before 1.3 Sockets were always connected during creation
return connected || oldImpl;
}
/**
* Returns the binding state of the socket.
* <p>
* Note: Closing a socket doesn't clear its binding state, which means
* this method will return <code>true</code> for a closed socket
* (see {@link #isClosed()}) if it was successfuly bound prior
* to being closed.
*
* @return true if the socket was successfuly bound to an address
* @since 1.4
* @see #bind
*/
public boolean isBound() {
// Before 1.3 Sockets were always bound during creation
return bound || oldImpl;
}
/**
* Returns the closed state of the socket.
*
* @return true if the socket has been closed
* @since 1.4
* @see #close
*/
public boolean isClosed() {
synchronized(closeLock) {
return closed;
}
}
/**
* Returns whether the read-half of the socket connection is closed.
*
* @return true if the input of the socket has been shutdown
* @since 1.4
* @see #shutdownInput
*/
public boolean isInputShutdown() {
return shutIn;
}
/**
* Returns whether the write-half of the socket connection is closed.
*
* @return true if the output of the socket has been shutdown
* @since 1.4
* @see #shutdownOutput
*/
public boolean isOutputShutdown() {
return shutOut;
}
/**
* The factory for all client sockets.
*/
private static SocketImplFactory factory = null;
/**
* Sets the client socket implementation factory for the
* application. The factory can be specified only once.
* <p>
* When an application creates a new client socket, the socket
* implementation factory's <code>createSocketImpl</code> method is
* called to create the actual socket implementation.
* <p>
* Passing <code>null</code> to the method is a no-op unless the factory
* was already set.
* <p>If there is a security manager, this method first calls
* the security manager's <code>checkSetFactory</code> method
* to ensure the operation is allowed.
* This could result in a SecurityException.
*
* @param fac the desired factory.
* @exception IOException if an I/O error occurs when setting the
* socket factory.
* @exception SocketException if the factory is already defined.
* @exception SecurityException if a security manager exists and its
* <code>checkSetFactory</code> method doesn't allow the operation.
* @see java.net.SocketImplFactory#createSocketImpl()
* @see SecurityManager#checkSetFactory
*/
public static synchronized void setSocketImplFactory(SocketImplFactory fac)
throws IOException
{
if (factory != null) {
throw new SocketException("factory already defined");
}
SecurityManager security = System.getSecurityManager();
if (security != null) {
security.checkSetFactory();
}
factory = fac;
}
/**
* Sets performance preferences for this socket.
*
* <p> Sockets use the TCP/IP protocol by default. Some implementations
* may offer alternative protocols which have different performance
* characteristics than TCP/IP. This method allows the application to
* express its own preferences as to how these tradeoffs should be made
* when the implementation chooses from the available protocols.
*
* <p> Performance preferences are described by three integers
* whose values indicate the relative importance of short connection time,
* low latency, and high bandwidth. The absolute values of the integers
* are irrelevant; in order to choose a protocol the values are simply
* compared, with larger values indicating stronger preferences. Negative
* values represent a lower priority than positive values. If the
* application prefers short connection time over both low latency and high
* bandwidth, for example, then it could invoke this method with the values
* <tt>(1, 0, 0)</tt>. If the application prefers high bandwidth above low
* latency, and low latency above short connection time, then it could
* invoke this method with the values <tt>(0, 1, 2)</tt>.
*
* <p> Invoking this method after this socket has been connected
* will have no effect.
*
* @param connectionTime
* An <tt>int</tt> expressing the relative importance of a short
* connection time
*
* @param latency
* An <tt>int</tt> expressing the relative importance of low
* latency
*
* @param bandwidth
* An <tt>int</tt> expressing the relative importance of high
* bandwidth
*
* @since 1.5
*/
public void setPerformancePreferences(int connectionTime,
int latency,
int bandwidth)
{
/* Not implemented yet */
}
}
| {'content_hash': 'a0d1af2ae00557a5d2b13950ab38c87b', 'timestamp': '', 'source': 'github', 'line_count': 1640, 'max_line_length': 148, 'avg_line_length': 38.922560975609755, 'alnum_prop': 0.6157786724734855, 'repo_name': 'haikuowuya/android_system_code', 'id': 'e763b55859527e71520cfa119a47ff3a7bad466c', 'size': '64048', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'src/java/net/Socket.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'C', 'bytes': '182432'}, {'name': 'Java', 'bytes': '124952631'}]} |
package org.pentaho.di.trans;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URISyntaxException;
import java.util.concurrent.CountDownLatch;
import org.apache.commons.io.IOUtils;
import org.apache.commons.vfs.FileObject;
import org.apache.tools.ant.filters.StringInputStream;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mockito.Mockito;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.ProgressMonitorListener;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.logging.LogChannel;
import org.pentaho.di.core.logging.LogChannelInterface;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.repository.RepositoryDirectoryInterface;
public class TransTest {
int count = 10000;
Trans trans;
TransMeta meta;
@BeforeClass
public static void beforeClass() throws KettleException {
KettleEnvironment.init();
}
@Before
public void beforeTest() throws KettleException {
meta = new TransMeta();
trans = new Trans( meta );
trans.setLog( Mockito.mock( LogChannelInterface.class ) );
trans.prepareExecution( null );
trans.startThreads();
}
@Test
public void testFindDatabaseWithEncodedConnectionName() {
DatabaseMeta dbMeta1 =
new DatabaseMeta( "encoded_DBConnection", "Oracle", "localhost", "access", "test", "111", "test", "test" );
dbMeta1.setDisplayName( "encoded.DBConnection" );
meta.addDatabase( dbMeta1 );
DatabaseMeta dbMeta2 =
new DatabaseMeta( "normalDBConnection", "Oracle", "localhost", "access", "test", "111", "test", "test" );
dbMeta2.setDisplayName( "normalDBConnection" );
meta.addDatabase( dbMeta2 );
DatabaseMeta databaseMeta = meta.findDatabase( dbMeta1.getDisplayName() );
assertNotNull( databaseMeta );
assertEquals( databaseMeta.getName(), "encoded_DBConnection" );
assertEquals( databaseMeta.getDisplayName(), "encoded.DBConnection" );
}
/**
* PDI-10762 - Trans and TransMeta leak
*/
@Test
public void testLoggingObjectIsNotLeakInMeta() {
String expected = meta.log.getLogChannelId();
meta.clear();
String actual = meta.log.getLogChannelId();
assertEquals( "Use same logChannel for empty constructors, or assign General level for clear() calls",
expected, actual );
}
/**
* PDI-10762 - Trans and TransMeta leak
*/
@Test
public void testLoggingObjectIsNotLeakInTrans() throws KettleException {
Repository rep = Mockito.mock( Repository.class );
RepositoryDirectoryInterface repInt = Mockito.mock( RepositoryDirectoryInterface.class );
Mockito.when(
rep.loadTransformation( Mockito.anyString(), Mockito.any( RepositoryDirectoryInterface.class ), Mockito
.any( ProgressMonitorListener.class ), Mockito.anyBoolean(), Mockito.anyString() ) ).thenReturn( meta );
Mockito.when( rep.findDirectory( Mockito.anyString() ) ).thenReturn( repInt );
Trans trans = new Trans( meta, rep, "junit", "junitDir", "fileName" );
assertEquals( "Log channel General assigned", LogChannel.GENERAL.getLogChannelId(), trans.log
.getLogChannelId() );
}
/**
* PDI-5229 - ConcurrentModificationException when restarting transformation Test that listeners can be accessed
* concurrently during transformation finish
*
* @throws KettleException
* @throws InterruptedException
*/
@Test
public void testTransFinishListenersConcurrentModification() throws KettleException, InterruptedException {
CountDownLatch start = new CountDownLatch( 1 );
TransFinishListenerAdder add = new TransFinishListenerAdder( trans, start );
TransFinishListenerFirer firer = new TransFinishListenerFirer( trans, start );
startThreads( add, firer, start );
assertEquals( "All listeners are added: no ConcurrentModificationException", count, add.c );
assertEquals( "All Finish listeners are iterated over: no ConcurrentModificationException", count, firer.c );
}
/**
* Test that listeners can be accessed concurrently during transformation start
*
* @throws InterruptedException
*/
@Test
public void testTransStartListenersConcurrentModification() throws InterruptedException {
CountDownLatch start = new CountDownLatch( 1 );
TransFinishListenerAdder add = new TransFinishListenerAdder( trans, start );
TransStartListenerFirer starter = new TransStartListenerFirer( trans, start );
startThreads( add, starter, start );
assertEquals( "All listeners are added: no ConcurrentModificationException", count, add.c );
assertEquals( "All Start listeners are iterated over: no ConcurrentModificationException", count, starter.c );
}
/**
* Test that transformation stop listeners can be accessed concurrently
*
* @throws InterruptedException
*/
@Test
public void testTransStoppedListenersConcurrentModification() throws InterruptedException {
CountDownLatch start = new CountDownLatch( 1 );
TransStoppedCaller stopper = new TransStoppedCaller( trans, start );
TransStopListenerAdder adder = new TransStopListenerAdder( trans, start );
startThreads( stopper, adder, start );
assertEquals( "All transformation stop listeners is added", count, adder.c );
assertEquals( "All stop call success", count, stopper.c );
}
@Test
public void testPDI12424ParametersFromMetaAreCopiedToTrans() throws KettleException, URISyntaxException, IOException {
String testParam = "testParam";
String testParamValue = "testParamValue";
TransMeta mockTransMeta = mock( TransMeta.class );
when( mockTransMeta.listVariables() ).thenReturn( new String[] {} );
when( mockTransMeta.listParameters() ).thenReturn( new String[] { testParam } );
when( mockTransMeta.getParameterValue( testParam ) ).thenReturn( testParamValue );
FileObject ktr = KettleVFS.createTempFile( "parameters", ".ktr", "ram://" );
OutputStream outputStream = ktr.getContent().getOutputStream( true );
StringInputStream stringInputStream = new StringInputStream( "<transformation></transformation>" );
IOUtils.copy( stringInputStream, outputStream );
outputStream.close();
Trans trans = new Trans( mockTransMeta, null, null, null, ktr.getURL().toURI().toString() );
assertEquals( testParamValue, trans.getParameterValue( testParam ) );
}
private void startThreads( Runnable one, Runnable two, CountDownLatch start ) throws InterruptedException {
Thread th = new Thread( one );
Thread tt = new Thread( two );
th.start();
tt.start();
start.countDown();
th.join();
tt.join();
}
private abstract class TransKicker implements Runnable {
protected Trans tr;
protected int c = 0;
protected CountDownLatch start;
protected int max = count;
TransKicker( Trans tr, CountDownLatch start ) {
this.tr = tr;
this.start = start;
}
protected boolean isStopped() {
c++;
return c >= max;
}
}
private class TransStoppedCaller extends TransKicker {
TransStoppedCaller( Trans tr, CountDownLatch start ) {
super( tr, start );
}
@Override
public void run() {
try {
start.await();
} catch ( InterruptedException e ) {
throw new RuntimeException();
}
while ( !isStopped() ) {
trans.stopAll();
}
}
}
private class TransStopListenerAdder extends TransKicker {
TransStopListenerAdder( Trans tr, CountDownLatch start ) {
super( tr, start );
}
@Override
public void run() {
try {
start.await();
} catch ( InterruptedException e ) {
throw new RuntimeException();
}
while ( !isStopped() ) {
trans.addTransStoppedListener( transStoppedListener );
}
}
}
private class TransFinishListenerAdder extends TransKicker {
TransFinishListenerAdder( Trans tr, CountDownLatch start ) {
super( tr, start );
}
@Override
public void run() {
try {
start.await();
} catch ( InterruptedException e ) {
throw new RuntimeException();
}
// run
while ( !isStopped() ) {
tr.addTransListener( listener );
}
}
}
private class TransFinishListenerFirer extends TransKicker {
TransFinishListenerFirer( Trans tr, CountDownLatch start ) {
super( tr, start );
}
@Override
public void run() {
try {
start.await();
} catch ( InterruptedException e ) {
throw new RuntimeException();
}
// run
while ( !isStopped() ) {
try {
tr.fireTransFinishedListeners();
// clean array blocking queue
tr.waitUntilFinished();
} catch ( KettleException e ) {
throw new RuntimeException();
}
}
}
}
private class TransStartListenerFirer extends TransKicker {
TransStartListenerFirer( Trans tr, CountDownLatch start ) {
super( tr, start );
}
@Override
public void run() {
try {
start.await();
} catch ( InterruptedException e ) {
throw new RuntimeException();
}
// run
while ( !isStopped() ) {
try {
tr.fireTransStartedListeners();
} catch ( KettleException e ) {
throw new RuntimeException();
}
}
}
}
private final TransListener listener = new TransListener() {
@Override
public void transStarted( Trans trans ) throws KettleException {
}
@Override
public void transActive( Trans trans ) {
}
@Override
public void transFinished( Trans trans ) throws KettleException {
}
};
private final TransStoppedListener transStoppedListener = new TransStoppedListener() {
@Override
public void transStopped( Trans trans ) {
}
};
}
| {'content_hash': 'a2a7766e8850478853606bb202151718', 'timestamp': '', 'source': 'github', 'line_count': 311, 'max_line_length': 120, 'avg_line_length': 32.61736334405145, 'alnum_prop': 0.6901616719242902, 'repo_name': 'ivanpogodin/pentaho-kettle', 'id': 'bd4edb2279c1e692f34ffe63965e7af294e515eb', 'size': '10144', 'binary': False, 'copies': '4', 'ref': 'refs/heads/master', 'path': 'engine/test-src/org/pentaho/di/trans/TransTest.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '20530'}, {'name': 'GAP', 'bytes': '4005'}, {'name': 'HTML', 'bytes': '69938'}, {'name': 'Java', 'bytes': '33052993'}, {'name': 'JavaScript', 'bytes': '16314'}, {'name': 'Shell', 'bytes': '28479'}, {'name': 'XSLT', 'bytes': '5600'}]} |
JS Sequence Diagrams [](https://libraries.io/bower/js-sequence-diagrams) [](https://travis-ci.org/bramp/js-sequence-diagrams) [](https://codeclimate.com/github/bramp/js-sequence-diagrams) 
=============================================
**Generates UML sequence diagrams from simple text**
<https://bramp.github.io/js-sequence-diagrams/>
by [Andrew Brampton](https://bramp.net) 2012-2017
Example
-------
We turn
Alice->Bob: Hello Bob, how are you?
Note right of Bob: Bob thinks
Bob-->Alice: I am good thanks!
into

Requirements
------------
You will need [Snap.svg](http://snapsvg.io/), [Web Font Loader](https://github.com/typekit/webfontloader) (if you wish to use custom fonts), [underscore.js](http://underscorejs.org/) (or [lodash](https://lodash.com/)), and optionally [jQuery](https://jquery.com/).
Installation
----------------------
### bower
Run `bower install bramp/js-sequence-diagrams` and include the scripts below:
```html
<script src="{{ bower directory }}/bower-webfontloader/webfont.js" />
<script src="{{ bower directory }}/snap.svg/dist/snap.svg-min.js" />
<script src="{{ bower directory }}/underscore/underscore-min.js" />
<script src="{{ bower directory }}/js-sequence-diagrams/dist/sequence-diagram-min.js" />
```
also import the CSS if you plan to use the hand drawn theme:
```html
<link href="{{ bower directory }}/js-sequence-diagrams/dist/sequence-diagram-min.css" rel="stylesheet" />
```
Not using bower? No problem. Just download the dependencies, and include them yourself.
Usage
-----
You can use the Diagram class like:
```html
<div id="diagram">Diagram will be placed here</div>
<script>
var d = Diagram.parse("A->B: Does something");
var options = {theme: 'simple'};
d.drawSVG('diagram', options);
</script>
```
or use jQuery to do all the work:
```html
<script src="{{ bower directory }}/jquery/dist/jquery.min.js" />
<div class="diagram">A->B: Message</div>
<script>
var options = {theme: 'hand'};
$(".diagram").sequenceDiagram(options);
</script>
```
For full examples check out [the demo site](https://bramp.github.io/js-sequence-diagrams/).
Options
-------
```javascript
var options = {
// Change the styling of the diagram, typically one of 'simple', 'hand'. New themes can be registered with registerTheme(...).
theme: string,
// CSS style to apply to the diagram's svg tag. (Only supported if using snap.svg)
css_class: string,
};
```
Styling
-------
The following CSS classes are applied to the SVG diagram when using snap.svg:
* `sequence`: Applies to main SVG tag.
* `title`: Applied to the title of the diagram.
* `actor`: Applied to the actors.
* `signal`: Applied to the signals.
* `note`: Applied to all notes.
The diagram can then be customised, for example:
```css
.signal text {
fill: #000000;
}
.signal text:hover {
fill: #aaaaaa
}
.note rect, .note path {
fill: #ffff00;
}
.title rect, .title path,
.actor rect, .actor path {
fill: #ffffff
}
```
Raphaël Deprecation
-------------------
Version 1.x of this library used [Raphaël](http://raphaeljs.com/) for drawing the diagrams, however, Raphaël had some limitations, and since disappeared from the Internet. We've decided to move to [Snap.svg](http://snapsvg.io/), which is a pure SVG implementation, instead of Raphaël which in addition to SVG, also supported VML (on Internet Explorer). This support of VML made it impossible to use some newer SVG capabilities. Native SVG allows us to use CSS styling, better font support, animations and more.
To aid in the transition Version 2.x will support both Raphaël and Snap.svg (preferring Snap.svg). If you include Raphaël instead of snap.svg, it will default to using Raphaël as the rendering library. For example
```html
<script src="{{ bower directory }}/raphael/raphael-min.js"></script>
```
There are also four transitional themes, 'snapSimple', 'snapHand', 'raphaelSimple', 'raphaelHand', which force the use of either Snap.svg, or Raphaël.
The plan is to drop support for Raphaël in a future release, simplifying the library, and reducing the file size.
### Adding a Font
Raphael requires Cufon style fonts. Find the font you want in ttf or otf format, visit [Cufon's site](http://cufon.shoqolate.com/generate/) and process it into a javascript file. Then ensure the font is included via the HTML, or recompile after altering main.js. So far only the hand drawn font, Daniel Bold, has been included.
Build requirements
------------------
The build is managed by a Makefile, and uses various tools available from npm. Thus both `make` and [npm](https://github.com/npm/npm) are required, and can easily be installed on any Linux or Mac machine.
```bash
make
```
The Makefile will use npm to install all the dev dependencies, build, and test.
Testing
-------
We use [qunit](https://qunitjs.com/) for testing. It can be ran from the command line, or via a browser. The command line actually tests multiple permutations of [lodash](https://lodash.com/), [Underscore](http://underscorejs.org/), and with and without minification.
```bash
make test
...
Global summary:
┌───────┬───────┬────────────┬────────┬────────┬─────────┐
│ Files │ Tests │ Assertions │ Failed │ Passed │ Runtime │
├───────┼───────┼────────────┼────────┼────────┼─────────┤
│ 1 │ 13 │ 231 │ 0 │ 231 │ 250 │
└───────┴───────┴────────────┴────────┴────────┴─────────┘
```
or `make` and then open test/qunit.html in a browser. Finally a simple playground is available at test/test.html.
How to release
--------------
* Make sure all changes checked in
* Bump version in src/main.js and bower.json
* ``make clean``
* ``make``
* ``git add -f src/main.js bower.json dist/*``
* ``git commit -m "Released version 2.x.x"``
* ``git push origin master``
* ``git tag -a v2.x.x -m v2.x.x``
* ``git push origin v2.x.x``
TODO
----
* Other themes
* Automate the release process
* Testing that checks the generated SVG is correct
* Improve the hand drawn theme
* "Note left of Bob: " generates a small empty box.
* The font seems to have extra margin at the bottom.
* The wiggly lines don't always touch.
* Dozens of other issues on [https://github.com/bramp/js-sequence-diagrams/issues](https://github.com/bramp/js-sequence-diagrams/issues)
Contributors
------------
via [GitHub](https://github.com/bramp/js-sequence-diagrams/graphs/contributors)
Thanks
------
This project makes use of [Jison](https://zaach.github.io/jison/), snap.svg, underscore.js, and the awersome [Daniel font](http://www.dafont.com/daniel.font) (which is free to use for any purpose).
Many thanks to [Web Sequence Diagrams](http://www.websequencediagrams.com/) which greatly inspired this project, and forms the basis for the syntax.
Related
-------
* [Web Sequence Diagrams](http://www.websequencediagrams.com/) Server side version with a commercial offering
* [flowchart.js](https://adrai.github.io/flowchart.js/) A similar project that draws flow charts in the browser
Licence (Simplified BSD License)
-------
Copyright (c) 2012-2017, Andrew Brampton
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
| {'content_hash': '3fb26ed7b65df0289f83879665e6b338', 'timestamp': '', 'source': 'github', 'line_count': 218, 'max_line_length': 755, 'avg_line_length': 40.03669724770642, 'alnum_prop': 0.7062328139321723, 'repo_name': 'dugaldmorrow/sequence-diagrams', 'id': '24371736ea3744b8e02eaad59546001364bc57f0', 'size': '9113', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'README.md', 'mode': '33261', 'license': 'bsd-2-clause', 'language': [{'name': 'CSS', 'bytes': '12918'}, {'name': 'HTML', 'bytes': '132505'}, {'name': 'JavaScript', 'bytes': '308642'}, {'name': 'Makefile', 'bytes': '3750'}]} |
<!DOCTYPE html>
<!-- DO NOT EDIT! This test has been generated by tools/gentest.py. -->
<title>Canvas test: 2d.path.isPointInPath.outside</title>
<meta name="author" content="Philip Taylor">
<script src="../../resources/testharness.js"></script>
<script src="../../resources/testharnessreport.js"></script>
<script src="common/canvas-tests.js"></script>
<link rel="stylesheet" href="common/canvas-tests.css">
<body class="show_output">
<h1>2d.path.isPointInPath.outside</h1>
<p class="desc">isPointInPath() works on paths outside the canvas</p>
<p class="output">Actual output:</p>
<canvas id="c" class="output" width="100" height="50"><p class="fallback">FAIL (fallback content)</p></canvas>
<ul id="d"></ul>
<script>
var t = async_test("isPointInPath() works on paths outside the canvas");
_addTest(function(canvas, ctx) {
ctx.rect(0, -100, 20, 20);
ctx.rect(20, -10, 20, 20);
_assertSame(ctx.isPointInPath(10, -110), false, "ctx.isPointInPath(10, -110)", "false");
_assertSame(ctx.isPointInPath(10, -90), true, "ctx.isPointInPath(10, -90)", "true");
_assertSame(ctx.isPointInPath(10, -70), false, "ctx.isPointInPath(10, -70)", "false");
_assertSame(ctx.isPointInPath(30, -20), false, "ctx.isPointInPath(30, -20)", "false");
_assertSame(ctx.isPointInPath(30, 0), true, "ctx.isPointInPath(30, 0)", "true");
_assertSame(ctx.isPointInPath(30, 20), false, "ctx.isPointInPath(30, 20)", "false");
});
</script>
| {'content_hash': 'e634303b68a5507712be11211253758e', 'timestamp': '', 'source': 'github', 'line_count': 35, 'max_line_length': 110, 'avg_line_length': 40.42857142857143, 'alnum_prop': 0.6918727915194346, 'repo_name': 'Shao-Feng/crosswalk-test-suite', 'id': '105ec8692fd8dfc5d4712c1da560dac097395493', 'size': '1415', 'binary': False, 'copies': '54', 'ref': 'refs/heads/master', 'path': 'webapi/tct-canvas-html5-tests/canvas-py/w3c/2d.path.isPointInPath.outside.html', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'Batchfile', 'bytes': '1544'}, {'name': 'C', 'bytes': '28136'}, {'name': 'CSS', 'bytes': '403019'}, {'name': 'CoffeeScript', 'bytes': '18978'}, {'name': 'Cucumber', 'bytes': '107732'}, {'name': 'GLSL', 'bytes': '6990'}, {'name': 'Groff', 'bytes': '12'}, {'name': 'HTML', 'bytes': '40882344'}, {'name': 'Java', 'bytes': '924791'}, {'name': 'JavaScript', 'bytes': '4759531'}, {'name': 'Logos', 'bytes': '12'}, {'name': 'Makefile', 'bytes': '1044'}, {'name': 'PHP', 'bytes': '45437'}, {'name': 'Python', 'bytes': '4122683'}, {'name': 'Shell', 'bytes': '851780'}]} |
<?php
namespace Psy;
use Symfony\Component\Finder\Finder;
/**
* A Psy Shell Phar compiler.
*/
class Compiler
{
/**
* Compiles psysh into a single phar file.
*
* @param string $pharFile The full path to the file to create
*/
public function compile($pharFile = 'psysh.phar')
{
if (file_exists($pharFile)) {
unlink($pharFile);
}
$this->version = Shell::VERSION;
$phar = new \Phar($pharFile, 0, 'psysh.phar');
$phar->setSignatureAlgorithm(\Phar::SHA1);
$phar->startBuffering();
$finder = Finder::create()
->files()
->ignoreVCS(true)
->name('*.php')
->notName('Compiler.php')
->notName('Autoloader.php')
->in(__DIR__ . '/..');
foreach ($finder as $file) {
$this->addFile($phar, $file);
}
$finder = Finder::create()
->files()
->ignoreVCS(true)
->name('*.php')
->exclude('Tests')
->in(__DIR__ . '/../../vendor/dnoegel/php-xdg-base-dir/src')
->in(__DIR__ . '/../../vendor/jakub-onderka/php-console-color')
->in(__DIR__ . '/../../vendor/jakub-onderka/php-console-highlighter')
->in(__DIR__ . '/../../vendor/nikic/php-parser/lib')
->in(__DIR__ . '/../../vendor/symfony/console')
->in(__DIR__ . '/../../vendor/symfony/var-dumper')
->in(__DIR__ . '/../../vendor/symfony/yaml');
foreach ($finder as $file) {
$this->addFile($phar, $file);
}
$this->addFile($phar, new \SplFileInfo(__DIR__ . '/../../vendor/autoload.php'));
$this->addFile($phar, new \SplFileInfo(__DIR__ . '/../../vendor/composer/include_paths.php'));
$this->addFile($phar, new \SplFileInfo(__DIR__ . '/../../vendor/composer/autoload_files.php'));
$this->addFile($phar, new \SplFileInfo(__DIR__ . '/../../vendor/composer/autoload_psr4.php'));
$this->addFile($phar, new \SplFileInfo(__DIR__ . '/../../vendor/composer/autoload_real.php'));
$this->addFile($phar, new \SplFileInfo(__DIR__ . '/../../vendor/composer/autoload_namespaces.php'));
$this->addFile($phar, new \SplFileInfo(__DIR__ . '/../../vendor/composer/autoload_classmap.php'));
$this->addFile($phar, new \SplFileInfo(__DIR__ . '/../../vendor/composer/ClassLoader.php'));
// Stubs
$phar->setStub($this->getStub());
$phar->stopBuffering();
// $this->addFile($phar, new \SplFileInfo(__DIR__.'/../../LICENSE'), false);
unset($phar);
}
/**
* Add a file to the psysh Phar.
*
* @param Phar $phar
* @param SplFileInfo $file
* @param bool $strip (default: true)
*/
private function addFile($phar, $file, $strip = true)
{
$path = str_replace(dirname(dirname(__DIR__)) . DIRECTORY_SEPARATOR, '', $file->getRealPath());
$content = file_get_contents($file);
if ($strip) {
$content = $this->stripWhitespace($content);
} elseif ('LICENSE' === basename($file)) {
$content = "\n" . $content . "\n";
}
$phar->addFromString($path, $content);
}
/**
* Removes whitespace from a PHP source string while preserving line numbers.
*
* @param string $source A PHP string
*
* @return string The PHP string with the whitespace removed
*/
private function stripWhitespace($source)
{
if (!function_exists('token_get_all')) {
return $source;
}
$output = '';
foreach (token_get_all($source) as $token) {
if (is_string($token)) {
$output .= $token;
} elseif (in_array($token[0], array(T_COMMENT, T_DOC_COMMENT))) {
$output .= str_repeat("\n", substr_count($token[1], "\n"));
} elseif (T_WHITESPACE === $token[0]) {
// reduce wide spaces
$whitespace = preg_replace('{[ \t]+}', ' ', $token[1]);
// normalize newlines to \n
$whitespace = preg_replace('{(?:\r\n|\r|\n)}', "\n", $whitespace);
// trim leading spaces
$whitespace = preg_replace('{\n +}', "\n", $whitespace);
$output .= $whitespace;
} else {
$output .= $token[1];
}
}
return $output;
}
/**
* Get a Phar stub for psysh.
*
* This is basically the psysh bin, with the autoload require statements swapped out.
*
* @return string
*/
private function getStub()
{
$autoload = <<<'EOS'
Phar::mapPhar('psysh.phar');
require 'phar://psysh.phar/vendor/autoload.php';
EOS;
$content = file_get_contents(__DIR__ . '/../../bin/psysh');
$content = preg_replace('{/\* <<<.*?>>> \*/}sm', $autoload, $content);
$content .= '__HALT_COMPILER();';
return $content;
}
}
| {'content_hash': 'cb171b120dc7a810323ee376573aa645', 'timestamp': '', 'source': 'github', 'line_count': 156, 'max_line_length': 108, 'avg_line_length': 32.31410256410256, 'alnum_prop': 0.5024796667327911, 'repo_name': 'edom-huang/myweb', 'id': '1d7d2a73625141d762ebe899322ea110a304e65e', 'size': '5246', 'binary': False, 'copies': '87', 'ref': 'refs/heads/master', 'path': 'vendor/psy/psysh/src/Psy/Compiler.php', 'mode': '33188', 'license': 'bsd-2-clause', 'language': [{'name': 'ApacheConf', 'bytes': '575'}, {'name': 'CSS', 'bytes': '5891'}, {'name': 'JavaScript', 'bytes': '88413'}, {'name': 'PHP', 'bytes': '118607'}]} |
<?xml version="1.0" encoding="utf-8"?>
<data>
<record jsxid="version" type="string">3.2</record>
<record jsxid="caption" type="string">32sample-drillDown</record>
<record jsxid="mode" type="boolean">false</record>
<record jsxid="namespace" type="string">sampledrillDown</record>
<record jsxid="cancelerror" type="boolean">true</record>
<record jsxid="cancelrightclick" type="boolean">true</record>
<record jsxid="left" type="number">0</record>
<record jsxid="top" type="number">0</record>
<record jsxid="width" type="string">100%</record>
<record jsxid="height" type="string">100%</record>
<record jsxid="position" type="number">0</record>
<record jsxid="overflow" type="number">0</record>
<record jsxid="eventsvers" type="string">3.1</record>
<record jsxid="onload" type="string"/>
<record jsxid="objectseturl" type="string">components/appCanvas.xml</record>
<record jsxid="includes" type="array">
<record jsxid="0" type="map">
<record jsxid="id" type="string">appCanvas_component</record>
<record jsxid="owner" type="string">application</record>
<record jsxid="type" type="string">component</record>
<record jsxid="onLoad" type="boolean">false</record>
<record jsxid="src" type="string">components/appCanvas.xml</record>
</record>
<record jsxid="1" type="map">
<record jsxid="id" type="string">logic_js</record>
<record jsxid="owner" type="string">application</record>
<record jsxid="type" type="string">script</record>
<record jsxid="onLoad" type="boolean">true</record>
<record jsxid="src" type="string">js/logic.js</record>
</record>
<record jsxid="2" type="map">
<record jsxid="id" type="string">source_xml</record>
<record jsxid="type" type="string">xml</record>
<record jsxid="owner" type="string">application</record>
<record jsxid="onLoad" type="string"/>
<record jsxid="required" type="boolean">true</record>
<record jsxid="src" type="string">xml/source.xml</record>
</record>
<record jsxid="3" type="map">
<record jsxid="id" type="string">3_xml</record>
<record jsxid="type" type="string">xml</record>
<record jsxid="owner" type="string">application</record>
<record jsxid="onLoad" type="string"/>
<record jsxid="required" type="boolean">true</record>
<record jsxid="src" type="string">xml/3.xml</record>
</record>
<record jsxid="4" type="map">
<record jsxid="id" type="string">7_xml</record>
<record jsxid="type" type="string">xml</record>
<record jsxid="owner" type="string">application</record>
<record jsxid="onLoad" type="null"/>
<record jsxid="required" type="boolean">true</record>
<record jsxid="src" type="string">xml/7.xml</record>
</record>
</record>
<record jsxid="watermark" type="string">right</record>
<record jsxid="unicode" type="boolean">false</record>
<record jsxid="bodyhotkeys" type="boolean">false</record>
<record jsxid="jsxversion" type="string">3.2</record>
<record jsxid="default_locale" type="string"/>
</data> | {'content_hash': '24a2b2a211098a810cbe82ec1df86424', 'timestamp': '', 'source': 'github', 'line_count': 63, 'max_line_length': 78, 'avg_line_length': 49.22222222222222, 'alnum_prop': 0.6617220251531764, 'repo_name': 'dineshkummarc/Gitak_r982', 'id': '41e191e11884abc254e339cdf5eb072f3a1d2834', 'size': '3101', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'samples/gi32/32sample-drillDown/config.xml', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'C#', 'bytes': '196418'}, {'name': 'Java', 'bytes': '3028945'}, {'name': 'JavaScript', 'bytes': '1186492'}, {'name': 'PHP', 'bytes': '127467'}, {'name': 'Perl', 'bytes': '180087'}, {'name': 'Python', 'bytes': '88187'}, {'name': 'Shell', 'bytes': '79'}]} |
package org.apache.calcite.rel.rules;
import org.apache.calcite.plan.RelOptRuleCall;
import org.apache.calcite.plan.RelRule;
import org.apache.calcite.rel.core.Aggregate;
import org.apache.calcite.rel.core.AggregateCall;
import org.apache.calcite.rel.core.Values;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexLiteral;
import org.apache.calcite.tools.RelBuilder;
import org.apache.calcite.tools.RelBuilderFactory;
import org.apache.calcite.util.Util;
import com.google.common.collect.ImmutableList;
import org.immutables.value.Value;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.List;
/**
* Rule that applies {@link Aggregate} to a {@link Values} (currently just an
* empty {@code Value}s).
*
* <p>This is still useful because {@link PruneEmptyRules#AGGREGATE_INSTANCE}
* doesn't handle {@code Aggregate}, which is in turn because {@code Aggregate}
* of empty relations need some special handling: a single row will be
* generated, where each column's value depends on the specific aggregate calls
* (e.g. COUNT is 0, SUM is NULL).
*
* <p>Sample query where this matters:
*
* <blockquote><code>SELECT COUNT(*) FROM s.foo WHERE 1 = 0</code></blockquote>
*
* <p>This rule only applies to "grand totals", that is, {@code GROUP BY ()}.
* Any non-empty {@code GROUP BY} clause will return one row per group key
* value, and each group will consist of at least one row.
*
* @see CoreRules#AGGREGATE_VALUES
*/
@Value.Enclosing
public class AggregateValuesRule
extends RelRule<AggregateValuesRule.Config>
implements SubstitutionRule {
/** Creates an AggregateValuesRule. */
protected AggregateValuesRule(Config config) {
super(config);
}
@Deprecated // to be removed before 2.0
public AggregateValuesRule(RelBuilderFactory relBuilderFactory) {
this(Config.DEFAULT
.withRelBuilderFactory(relBuilderFactory)
.as(Config.class));
}
@Override public void onMatch(RelOptRuleCall call) {
final Aggregate aggregate = call.rel(0);
final Values values = call.rel(1);
Util.discard(values);
final RelBuilder relBuilder = call.builder();
final RexBuilder rexBuilder = relBuilder.getRexBuilder();
final List<RexLiteral> literals = new ArrayList<>();
for (final AggregateCall aggregateCall : aggregate.getAggCallList()) {
switch (aggregateCall.getAggregation().getKind()) {
case COUNT:
case SUM0:
literals.add(
rexBuilder.makeLiteral(BigDecimal.ZERO, aggregateCall.getType()));
break;
case MIN:
case MAX:
case SUM:
literals.add(rexBuilder.makeNullLiteral(aggregateCall.getType()));
break;
default:
// Unknown what this aggregate call should do on empty Values. Bail out to be safe.
return;
}
}
call.transformTo(
relBuilder.values(ImmutableList.of(literals), aggregate.getRowType())
.build());
// New plan is absolutely better than old plan.
call.getPlanner().prune(aggregate);
}
/** Rule configuration. */
@Value.Immutable
public interface Config extends RelRule.Config {
Config DEFAULT = ImmutableAggregateValuesRule.Config.of()
.withOperandFor(Aggregate.class, Values.class);
@Override default AggregateValuesRule toRule() {
return new AggregateValuesRule(this);
}
/** Defines an operand tree for the given classes. */
default Config withOperandFor(Class<? extends Aggregate> aggregateClass,
Class<? extends Values> valuesClass) {
return withOperandSupplier(b0 ->
b0.operand(aggregateClass)
.predicate(aggregate -> aggregate.getGroupCount() == 0)
.oneInput(b1 ->
b1.operand(valuesClass)
.predicate(values -> values.getTuples().isEmpty())
.noInputs()))
.as(Config.class);
}
}
}
| {'content_hash': '2d23ce1fc257e5dc4e5a97a3fcf1e0ff', 'timestamp': '', 'source': 'github', 'line_count': 119, 'max_line_length': 91, 'avg_line_length': 33.109243697478995, 'alnum_prop': 0.6926395939086294, 'repo_name': 'looker-open-source/calcite', 'id': 'e4fb82ca6b94b6ab2932b44be7a5ddb5a8ddf12f', 'size': '4737', 'binary': False, 'copies': '4', 'ref': 'refs/heads/looker', 'path': 'core/src/main/java/org/apache/calcite/rel/rules/AggregateValuesRule.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '4392'}, {'name': 'FreeMarker', 'bytes': '20115'}, {'name': 'HTML', 'bytes': '40994'}, {'name': 'Java', 'bytes': '21031100'}, {'name': 'Kotlin', 'bytes': '157472'}, {'name': 'PigLatin', 'bytes': '1419'}, {'name': 'Python', 'bytes': '1610'}, {'name': 'Ruby', 'bytes': '2851'}, {'name': 'SCSS', 'bytes': '36833'}, {'name': 'Shell', 'bytes': '8012'}]} |
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("ElevetorLab")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("ElevetorLab")]
[assembly: AssemblyCopyright("Copyright © 2017")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("0e3b3a9f-b28e-45d4-9843-a5074bc63f3a")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| {'content_hash': '263f189372ac4e2a892036adcea5bfdf', 'timestamp': '', 'source': 'github', 'line_count': 36, 'max_line_length': 84, 'avg_line_length': 38.75, 'alnum_prop': 0.7448028673835125, 'repo_name': 'martinmihov/Programming-Fundamentals', 'id': '23fb027e813776ef81ba783decc8554f14120010', 'size': '1398', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'Data Types and Variables - Lab/ElevetorLab/Properties/AssemblyInfo.cs', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C#', 'bytes': '473725'}]} |
window['DomTerm'] = DomTerm;
DomTerm.prototype.processInputCharacters = function(str) {
this['processInputCharacters'](str);
};
DomTerm.prototype['initializeTerminal'] = DomTerm.prototype.initializeTerminal;
DomTerm.prototype['insertBytes'] = DomTerm.prototype.insertBytes;
DomTerm.prototype['insertString'] = DomTerm.prototype.insertString;
DomTerm.prototype['reportEvent'] = DomTerm.prototype.reportEvent;
DomTerm.prototype['setInputMode'] = DomTerm.prototype.setInputMode;
DomTerm.prototype['doPaste'] = DomTerm.prototype.doPaste;
DomTerm.prototype['doCopy'] = DomTerm.prototype.doCopy;
DomTerm.prototype['doSaveAs'] = DomTerm.prototype.doSaveAs;
DomTerm.prototype['setCaretStyle'] = DomTerm.prototype.setCaretStyle;
DomTerm.prototype['processInputCharacters'] = DomTerm.prototype.processInputCharacters;
| {'content_hash': '703ff4b877125eaf615dc3180d8e305d', 'timestamp': '', 'source': 'github', 'line_count': 14, 'max_line_length': 87, 'avg_line_length': 58.0, 'alnum_prop': 0.8103448275862069, 'repo_name': 'rwhogg/DomTerm', 'id': '3c034193a06c6e3dfaa17ef8e783abbe059c0c53', 'size': '929', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'hlib/closure-externs.js', 'mode': '33188', 'license': 'bsd-3-clause', 'language': [{'name': 'C', 'bytes': '169714'}, {'name': 'C++', 'bytes': '251415'}, {'name': 'CSS', 'bytes': '19079'}, {'name': 'HTML', 'bytes': '3287'}, {'name': 'Java', 'bytes': '101977'}, {'name': 'JavaScript', 'bytes': '560075'}, {'name': 'M4', 'bytes': '6143'}, {'name': 'Makefile', 'bytes': '14762'}, {'name': 'QMake', 'bytes': '1421'}, {'name': 'Shell', 'bytes': '13836'}]} |
package org.apache.dubbo.config;
import org.apache.dubbo.config.support.Parameter;
import java.util.Map;
/**
* MonitorConfig
*
* @export
*/
public class MonitorConfig extends AbstractConfig {
private static final long serialVersionUID = -1184681514659198203L;
private String protocol;
private String address;
private String username;
private String password;
private String group;
private String version;
private String interval;
// customized parameters
private Map<String, String> parameters;
// if it's default
private Boolean isDefault;
public MonitorConfig() {
}
public MonitorConfig(String address) {
this.address = address;
}
@Parameter(excluded = true)
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
@Parameter(excluded = true)
public String getProtocol() {
return protocol;
}
public void setProtocol(String protocol) {
this.protocol = protocol;
}
@Parameter(excluded = true)
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
@Parameter(excluded = true)
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getGroup() {
return group;
}
public void setGroup(String group) {
this.group = group;
}
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
public Map<String, String> getParameters() {
return parameters;
}
public void setParameters(Map<String, String> parameters) {
checkParameterName(parameters);
this.parameters = parameters;
}
public Boolean isDefault() {
return isDefault;
}
public void setDefault(Boolean isDefault) {
this.isDefault = isDefault;
}
public void setInterval(String interval){
this.interval = interval;
}
public String getInterval(){
return interval;
}
} | {'content_hash': '972457c77f45d66b204a6b29c894b35e', 'timestamp': '', 'source': 'github', 'line_count': 121, 'max_line_length': 71, 'avg_line_length': 19.917355371900825, 'alnum_prop': 0.6012448132780083, 'repo_name': 'JasonHZXie/dubbo', 'id': '55155afb55e493be07c953912253bf72bd609788', 'size': '3226', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'dubbo-config/dubbo-config-api/src/main/java/org/apache/dubbo/config/MonitorConfig.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '1177'}, {'name': 'Java', 'bytes': '5838112'}, {'name': 'Lex', 'bytes': '2076'}, {'name': 'Shell', 'bytes': '7199'}, {'name': 'Thrift', 'bytes': '664'}]} |
package org.onosproject.vtnweb.resources;
import static javax.ws.rs.core.Response.Status.OK;
import static org.onlab.util.Tools.nullIsNotFound;
import java.io.IOException;
import java.io.InputStream;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.onosproject.rest.AbstractWebResource;
import org.onosproject.vtnrsc.FlowClassifier;
import org.onosproject.vtnrsc.FlowClassifierId;
import org.onosproject.vtnrsc.flowclassifier.FlowClassifierService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
/**
* Query and program flow classifier.
*/
@Path("flow_classifiers")
public class FlowClassifierWebResource extends AbstractWebResource {
private final Logger log = LoggerFactory.getLogger(FlowClassifierWebResource.class);
public static final String FLOW_CLASSIFIER_NOT_FOUND = "Flow classifier not found";
/**
* Get all flow classifiers created.
*
* @return 200 OK
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
public Response getFlowClassifiers() {
Iterable<FlowClassifier> flowClassifiers = get(FlowClassifierService.class).getFlowClassifiers();
ObjectNode result = mapper().createObjectNode();
ArrayNode flowClassifierEntry = result.putArray("flow_classifiers");
if (flowClassifiers != null) {
for (final FlowClassifier flowClassifier : flowClassifiers) {
flowClassifierEntry.add(codec(FlowClassifier.class).encode(flowClassifier, this));
}
}
return ok(result.toString()).build();
}
/**
* Get details of a flow classifier.
*
* @param id
* flow classifier id
* @return 200 OK , 404 if given identifier does not exist
*/
@GET
@Path("{flow_id}")
@Produces(MediaType.APPLICATION_JSON)
public Response getFlowClassifier(@PathParam("flow_id") String id) {
FlowClassifier flowClassifier = nullIsNotFound(get(FlowClassifierService.class)
.getFlowClassifier(FlowClassifierId.of(id)), FLOW_CLASSIFIER_NOT_FOUND);
ObjectNode result = mapper().createObjectNode();
result.set("flow_classifier", codec(FlowClassifier.class).encode(flowClassifier, this));
return ok(result.toString()).build();
}
/**
* Creates and stores a new flow classifier.
*
* @param stream
* flow classifier from JSON
* @return status of the request - CREATED if the JSON is correct,
* BAD_REQUEST if the JSON is invalid
*/
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response createFlowClassifier(InputStream stream) {
try {
ObjectNode jsonTree = (ObjectNode) mapper().readTree(stream);
JsonNode flow = jsonTree.get("flow_classifier");
FlowClassifier flowClassifier = codec(FlowClassifier.class).decode((ObjectNode) flow, this);
Boolean issuccess = nullIsNotFound(get(FlowClassifierService.class).createFlowClassifier(flowClassifier),
FLOW_CLASSIFIER_NOT_FOUND);
return Response.status(OK).entity(issuccess.toString()).build();
} catch (IOException ex) {
log.error("Exception while creating flow classifier {}.", ex.toString());
throw new IllegalArgumentException(ex);
}
}
/**
* Update details of a flow classifier.
*
* @param id
* flow classifier id
* @param stream
* InputStream
* @return 200 OK, 404 if given identifier does not exist
*/
@PUT
@Path("{flow_id}")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Response updateFlowClassifier(@PathParam("flow_id") String id, final InputStream stream) {
try {
JsonNode jsonTree = mapper().readTree(stream);
JsonNode flow = jsonTree.get("flow_classifier");
FlowClassifier flowClassifier = codec(FlowClassifier.class).decode((ObjectNode) flow, this);
Boolean result = nullIsNotFound(get(FlowClassifierService.class).updateFlowClassifier(flowClassifier),
FLOW_CLASSIFIER_NOT_FOUND);
return Response.status(OK).entity(result.toString()).build();
} catch (IOException e) {
log.error("Update flow classifier failed because of exception {}.", e.toString());
throw new IllegalArgumentException(e);
}
}
/**
* Delete details of a flow classifier.
*
* @param id
* flow classifier id
*/
@Path("{flow_id}")
@DELETE
public void deleteFlowClassifier(@PathParam("flow_id") String id) {
log.debug("Deletes flow classifier by identifier {}.", id);
FlowClassifierId flowClassifierId = FlowClassifierId.of(id);
Boolean issuccess = nullIsNotFound(get(FlowClassifierService.class).removeFlowClassifier(flowClassifierId),
FLOW_CLASSIFIER_NOT_FOUND);
}
}
| {'content_hash': 'c6f42e0833e9285a73597606800bdaa1', 'timestamp': '', 'source': 'github', 'line_count': 150, 'max_line_length': 117, 'avg_line_length': 36.82666666666667, 'alnum_prop': 0.6594858797972484, 'repo_name': 'planoAccess/clonedONOS', 'id': '4fd3fa485f0339d22f398fd18c81529324500170', 'size': '6133', 'binary': False, 'copies': '2', 'ref': 'refs/heads/master', 'path': 'apps/vtn/vtnweb/src/main/java/org/onosproject/vtnweb/resources/FlowClassifierWebResource.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'CSS', 'bytes': '163048'}, {'name': 'HTML', 'bytes': '487623'}, {'name': 'Java', 'bytes': '16142711'}, {'name': 'JavaScript', 'bytes': '3060983'}, {'name': 'Protocol Buffer', 'bytes': '3932'}, {'name': 'Shell', 'bytes': '2625'}]} |
<?php
namespace Symfony\Component\Form\Extension\Core\Type;
use Symfony\Component\Form\AbstractType;
use Symfony\Component\Form\ChoiceList\Factory\CachingFactoryDecorator;
use Symfony\Component\Form\ChoiceList\Factory\PropertyAccessDecorator;
use Symfony\Component\Form\ChoiceList\LegacyChoiceListAdapter;
use Symfony\Component\Form\ChoiceList\View\ChoiceGroupView;
use Symfony\Component\Form\ChoiceList\ChoiceListInterface;
use Symfony\Component\Form\ChoiceList\Factory\DefaultChoiceListFactory;
use Symfony\Component\Form\ChoiceList\Factory\ChoiceListFactoryInterface;
use Symfony\Component\Form\ChoiceList\View\ChoiceListView;
use Symfony\Component\Form\ChoiceList\View\ChoiceView;
use Symfony\Component\Form\Exception\TransformationFailedException;
use Symfony\Component\Form\Extension\Core\DataMapper\RadioListMapper;
use Symfony\Component\Form\Extension\Core\DataMapper\CheckboxListMapper;
use Symfony\Component\Form\FormBuilderInterface;
use Symfony\Component\Form\FormEvent;
use Symfony\Component\Form\FormEvents;
use Symfony\Component\Form\FormInterface;
use Symfony\Component\Form\FormView;
use Symfony\Component\Form\Extension\Core\ChoiceList\ChoiceListInterface as LegacyChoiceListInterface;
use Symfony\Component\Form\Extension\Core\EventListener\MergeCollectionListener;
use Symfony\Component\Form\Extension\Core\DataTransformer\ChoiceToValueTransformer;
use Symfony\Component\Form\Extension\Core\DataTransformer\ChoicesToValuesTransformer;
use Symfony\Component\Form\Util\FormUtil;
use Symfony\Component\OptionsResolver\Options;
use Symfony\Component\OptionsResolver\OptionsResolver;
class ChoiceType extends AbstractType
{
/**
* Caches created choice lists.
*
* @var ChoiceListFactoryInterface
*/
private $choiceListFactory;
public function __construct(ChoiceListFactoryInterface $choiceListFactory = null)
{
$this->choiceListFactory = $choiceListFactory ?: new CachingFactoryDecorator(
new PropertyAccessDecorator(
new DefaultChoiceListFactory()
)
);
}
/**
* {@inheritdoc}
*/
public function buildForm(FormBuilderInterface $builder, array $options)
{
if ($options['expanded']) {
$builder->setDataMapper($options['multiple'] ? new CheckboxListMapper() : new RadioListMapper());
// Initialize all choices before doing the index check below.
// This helps in cases where index checks are optimized for non
// initialized choice lists. For example, when using an SQL driver,
// the index check would read in one SQL query and the initialization
// requires another SQL query. When the initialization is done first,
// one SQL query is sufficient.
$choiceListView = $this->createChoiceListView($options['choice_list'], $options);
$builder->setAttribute('choice_list_view', $choiceListView);
// Check if the choices already contain the empty value
// Only add the placeholder option if this is not the case
if (null !== $options['placeholder'] && 0 === count($options['choice_list']->getChoicesForValues(array('')))) {
$placeholderView = new ChoiceView(null, '', $options['placeholder']);
// "placeholder" is a reserved name
$this->addSubForm($builder, 'placeholder', $placeholderView, $options);
}
$this->addSubForms($builder, $choiceListView->preferredChoices, $options);
$this->addSubForms($builder, $choiceListView->choices, $options);
// Make sure that scalar, submitted values are converted to arrays
// which can be submitted to the checkboxes/radio buttons
$builder->addEventListener(FormEvents::PRE_SUBMIT, function (FormEvent $event) {
$form = $event->getForm();
$data = $event->getData();
if (null === $data) {
$emptyData = $form->getConfig()->getEmptyData();
if (false === FormUtil::isEmpty($emptyData) && array() !== $emptyData) {
$data = is_callable($emptyData) ? call_user_func($emptyData, $form, $data) : $emptyData;
}
}
// Convert the submitted data to a string, if scalar, before
// casting it to an array
if (!is_array($data)) {
$data = (array) (string) $data;
}
// A map from submitted values to integers
$valueMap = array_flip($data);
// Make a copy of the value map to determine whether any unknown
// values were submitted
$unknownValues = $valueMap;
// Reconstruct the data as mapping from child names to values
$data = array();
foreach ($form as $child) {
$value = $child->getConfig()->getOption('value');
// Add the value to $data with the child's name as key
if (isset($valueMap[$value])) {
$data[$child->getName()] = $value;
unset($unknownValues[$value]);
continue;
}
}
// The empty value is always known, independent of whether a
// field exists for it or not
unset($unknownValues['']);
// Throw exception if unknown values were submitted
if (count($unknownValues) > 0) {
throw new TransformationFailedException(sprintf(
'The choices "%s" do not exist in the choice list.',
implode('", "', array_keys($unknownValues))
));
}
$event->setData($data);
});
}
if ($options['multiple']) {
// <select> tag with "multiple" option or list of checkbox inputs
$builder->addViewTransformer(new ChoicesToValuesTransformer($options['choice_list']));
} else {
// <select> tag without "multiple" option or list of radio inputs
$builder->addViewTransformer(new ChoiceToValueTransformer($options['choice_list']));
}
if ($options['multiple'] && $options['by_reference']) {
// Make sure the collection created during the client->norm
// transformation is merged back into the original collection
$builder->addEventSubscriber(new MergeCollectionListener(true, true));
}
}
/**
* {@inheritdoc}
*/
public function buildView(FormView $view, FormInterface $form, array $options)
{
$choiceTranslationDomain = $options['choice_translation_domain'];
if ($view->parent && null === $choiceTranslationDomain) {
$choiceTranslationDomain = $view->vars['translation_domain'];
}
/** @var ChoiceListView $choiceListView */
$choiceListView = $form->getConfig()->hasAttribute('choice_list_view')
? $form->getConfig()->getAttribute('choice_list_view')
: $this->createChoiceListView($options['choice_list'], $options);
$view->vars = array_replace($view->vars, array(
'multiple' => $options['multiple'],
'expanded' => $options['expanded'],
'preferred_choices' => $choiceListView->preferredChoices,
'choices' => $choiceListView->choices,
'separator' => '-------------------',
'placeholder' => null,
'choice_translation_domain' => $choiceTranslationDomain,
));
// The decision, whether a choice is selected, is potentially done
// thousand of times during the rendering of a template. Provide a
// closure here that is optimized for the value of the form, to
// avoid making the type check inside the closure.
if ($options['multiple']) {
$view->vars['is_selected'] = function ($choice, array $values) {
return in_array($choice, $values, true);
};
} else {
$view->vars['is_selected'] = function ($choice, $value) {
return $choice === $value;
};
}
// Check if the choices already contain the empty value
$view->vars['placeholder_in_choices'] = $choiceListView->hasPlaceholder();
// Only add the empty value option if this is not the case
if (null !== $options['placeholder'] && !$view->vars['placeholder_in_choices']) {
$view->vars['placeholder'] = $options['placeholder'];
}
// BC
$view->vars['empty_value'] = $view->vars['placeholder'];
$view->vars['empty_value_in_choices'] = $view->vars['placeholder_in_choices'];
if ($options['multiple'] && !$options['expanded']) {
// Add "[]" to the name in case a select tag with multiple options is
// displayed. Otherwise only one of the selected options is sent in the
// POST request.
$view->vars['full_name'] .= '[]';
}
}
/**
* {@inheritdoc}
*/
public function finishView(FormView $view, FormInterface $form, array $options)
{
if ($options['expanded']) {
// Radio buttons should have the same name as the parent
$childName = $view->vars['full_name'];
// Checkboxes should append "[]" to allow multiple selection
if ($options['multiple']) {
$childName .= '[]';
}
foreach ($view as $childView) {
$childView->vars['full_name'] = $childName;
}
}
}
/**
* {@inheritdoc}
*/
public function configureOptions(OptionsResolver $resolver)
{
$choiceLabels = (object) array('labels' => array());
$choiceListFactory = $this->choiceListFactory;
$emptyData = function (Options $options) {
if ($options['expanded'] && !$options['multiple']) {
return;
}
if ($options['multiple']) {
return array();
}
return '';
};
$placeholder = function (Options $options) {
return $options['required'] ? null : '';
};
// BC closure, to be removed in 3.0
$choicesNormalizer = function (Options $options, $choices) use ($choiceLabels) {
// Unset labels from previous invocations
$choiceLabels->labels = array();
// This closure is irrelevant when "choices_as_values" is set to true
if ($options['choices_as_values']) {
return $choices;
}
if (null === $choices) {
return;
}
return ChoiceType::normalizeLegacyChoices($choices, $choiceLabels);
};
// BC closure, to be removed in 3.0
$choiceLabel = function (Options $options) use ($choiceLabels) {
// If the choices contain duplicate labels, the normalizer of the
// "choices" option stores them in the $choiceLabels variable
// Trigger the normalizer
$options->offsetGet('choices');
// Pick labels from $choiceLabels if available
if ($choiceLabels->labels) {
// Don't pass the labels by reference. We do want to create a
// copy here so that every form has an own version of that
// variable (contrary to the $choiceLabels object shared by all
// forms)
$labels = $choiceLabels->labels;
// The $choiceLabels object is shared with the 'choices' closure.
// Since that normalizer can be replaced, labels have to be cleared here.
$choiceLabels->labels = array();
return function ($choice, $key) use ($labels) {
return $labels[$key];
};
}
return;
};
$that = $this;
$choiceListNormalizer = function (Options $options, $choiceList) use ($choiceListFactory, $that) {
if ($choiceList) {
@trigger_error(sprintf('The "choice_list" option of the "%s" form type (%s) is deprecated since version 2.7 and will be removed in 3.0. Use "choice_loader" instead.', $that->getName(), __CLASS__), E_USER_DEPRECATED);
if ($choiceList instanceof LegacyChoiceListInterface) {
return new LegacyChoiceListAdapter($choiceList);
}
return $choiceList;
}
if (null !== $options['choice_loader']) {
return $choiceListFactory->createListFromLoader(
$options['choice_loader'],
$options['choice_value']
);
}
// Harden against NULL values (like in EntityType and ModelType)
$choices = null !== $options['choices'] ? $options['choices'] : array();
// BC when choices are in the keys, not in the values
if (!$options['choices_as_values']) {
return $choiceListFactory->createListFromFlippedChoices($choices, $options['choice_value'], false);
}
return $choiceListFactory->createListFromChoices($choices, $options['choice_value']);
};
$choicesAsValuesNormalizer = function (Options $options, $choicesAsValues) use ($that) {
if (true !== $choicesAsValues) {
@trigger_error(sprintf('The value "false" for the "choices_as_values" option of the "%s" form type (%s) is deprecated since version 2.8 and will not be supported anymore in 3.0. Set this option to "true" and flip the contents of the "choices" option instead.', $that->getName(), __CLASS__), E_USER_DEPRECATED);
}
return $choicesAsValues;
};
$placeholderNormalizer = function (Options $options, $placeholder) use ($that) {
if (!is_object($options['empty_value']) || !$options['empty_value'] instanceof \Exception) {
@trigger_error(sprintf('The form option "empty_value" of the "%s" form type (%s) is deprecated since version 2.6 and will be removed in 3.0. Use "placeholder" instead.', $that->getName(), __CLASS__), E_USER_DEPRECATED);
if (null === $placeholder || '' === $placeholder) {
$placeholder = $options['empty_value'];
}
}
if ($options['multiple']) {
// never use an empty value for this case
return;
} elseif ($options['required'] && ($options['expanded'] || isset($options['attr']['size']) && $options['attr']['size'] > 1)) {
// placeholder for required radio buttons or a select with size > 1 does not make sense
return;
} elseif (false === $placeholder) {
// an empty value should be added but the user decided otherwise
return;
} elseif ($options['expanded'] && '' === $placeholder) {
// never use an empty label for radio buttons
return 'None';
}
// empty value has been set explicitly
return $placeholder;
};
$compound = function (Options $options) {
return $options['expanded'];
};
$choiceTranslationDomainNormalizer = function (Options $options, $choiceTranslationDomain) {
if (true === $choiceTranslationDomain) {
return $options['translation_domain'];
}
return $choiceTranslationDomain;
};
$resolver->setDefaults(array(
'multiple' => false,
'expanded' => false,
'choice_list' => null, // deprecated
'choices' => array(),
'choices_as_values' => false,
'choice_loader' => null,
'choice_label' => $choiceLabel,
'choice_name' => null,
'choice_value' => null,
'choice_attr' => null,
'preferred_choices' => array(),
'group_by' => null,
'empty_data' => $emptyData,
'empty_value' => new \Exception(), // deprecated
'placeholder' => $placeholder,
'error_bubbling' => false,
'compound' => $compound,
// The view data is always a string, even if the "data" option
// is manually set to an object.
// See https://github.com/symfony/symfony/pull/5582
'data_class' => null,
'choice_translation_domain' => true,
));
$resolver->setNormalizer('choices', $choicesNormalizer);
$resolver->setNormalizer('choice_list', $choiceListNormalizer);
$resolver->setNormalizer('placeholder', $placeholderNormalizer);
$resolver->setNormalizer('choice_translation_domain', $choiceTranslationDomainNormalizer);
$resolver->setNormalizer('choices_as_values', $choicesAsValuesNormalizer);
$resolver->setAllowedTypes('choice_list', array('null', 'Symfony\Component\Form\ChoiceList\ChoiceListInterface', 'Symfony\Component\Form\Extension\Core\ChoiceList\ChoiceListInterface'));
$resolver->setAllowedTypes('choices', array('null', 'array', '\Traversable'));
$resolver->setAllowedTypes('choice_translation_domain', array('null', 'bool', 'string'));
$resolver->setAllowedTypes('choices_as_values', 'bool');
$resolver->setAllowedTypes('choice_loader', array('null', 'Symfony\Component\Form\ChoiceList\Loader\ChoiceLoaderInterface'));
$resolver->setAllowedTypes('choice_label', array('null', 'bool', 'callable', 'string', 'Symfony\Component\PropertyAccess\PropertyPath'));
$resolver->setAllowedTypes('choice_name', array('null', 'callable', 'string', 'Symfony\Component\PropertyAccess\PropertyPath'));
$resolver->setAllowedTypes('choice_value', array('null', 'callable', 'string', 'Symfony\Component\PropertyAccess\PropertyPath'));
$resolver->setAllowedTypes('choice_attr', array('null', 'array', 'callable', 'string', 'Symfony\Component\PropertyAccess\PropertyPath'));
$resolver->setAllowedTypes('preferred_choices', array('array', '\Traversable', 'callable', 'string', 'Symfony\Component\PropertyAccess\PropertyPath'));
$resolver->setAllowedTypes('group_by', array('null', 'callable', 'string', 'Symfony\Component\PropertyAccess\PropertyPath'));
}
/**
* {@inheritdoc}
*/
public function getName()
{
return $this->getBlockPrefix();
}
/**
* {@inheritdoc}
*/
public function getBlockPrefix()
{
return 'choice';
}
/**
* Adds the sub fields for an expanded choice field.
*
* @param FormBuilderInterface $builder The form builder
* @param array $choiceViews The choice view objects
* @param array $options The build options
*/
private function addSubForms(FormBuilderInterface $builder, array $choiceViews, array $options)
{
foreach ($choiceViews as $name => $choiceView) {
// Flatten groups
if (is_array($choiceView)) {
$this->addSubForms($builder, $choiceView, $options);
continue;
}
if ($choiceView instanceof ChoiceGroupView) {
$this->addSubForms($builder, $choiceView->choices, $options);
continue;
}
$this->addSubForm($builder, $name, $choiceView, $options);
}
}
/**
* @param FormBuilderInterface $builder
* @param $name
* @param $choiceView
* @param array $options
*
* @return mixed
*/
private function addSubForm(FormBuilderInterface $builder, $name, ChoiceView $choiceView, array $options)
{
$choiceOpts = array(
'value' => $choiceView->value,
'label' => $choiceView->label,
'attr' => $choiceView->attr,
'translation_domain' => $options['translation_domain'],
'block_name' => 'entry',
);
if ($options['multiple']) {
$choiceType = __NAMESPACE__.'\CheckboxType';
// The user can check 0 or more checkboxes. If required
// is true, he is required to check all of them.
$choiceOpts['required'] = false;
} else {
$choiceType = __NAMESPACE__.'\RadioType';
}
$builder->add($name, $choiceType, $choiceOpts);
}
private function createChoiceListView(ChoiceListInterface $choiceList, array $options)
{
return $this->choiceListFactory->createView(
$choiceList,
$options['preferred_choices'],
$options['choice_label'],
$options['choice_name'],
$options['group_by'],
$options['choice_attr']
);
}
/**
* When "choices_as_values" is set to false, the choices are in the keys and
* their labels in the values. Labels may occur twice. The form component
* flips the choices array in the new implementation, so duplicate labels
* are lost. Store them in a utility array that is used from the
* "choice_label" closure by default.
*
* @param array|\Traversable $choices The choice labels indexed by choices
* @param object $choiceLabels The object that receives the choice labels
* indexed by generated keys.
* @param int $nextKey The next generated key
*
* @return array The choices in a normalized array with labels replaced by generated keys
*
* @internal Public only to be accessible from closures on PHP 5.3. Don't
* use this method as it may be removed without notice and will be in 3.0.
*/
public static function normalizeLegacyChoices($choices, $choiceLabels, &$nextKey = 0)
{
$normalizedChoices = array();
foreach ($choices as $choice => $choiceLabel) {
if (is_array($choiceLabel) || $choiceLabel instanceof \Traversable) {
$normalizedChoices[$choice] = self::normalizeLegacyChoices($choiceLabel, $choiceLabels, $nextKey);
continue;
}
$choiceLabels->labels[$nextKey] = $choiceLabel;
$normalizedChoices[$choice] = $nextKey++;
}
return $normalizedChoices;
}
}
| {'content_hash': 'a2bdb578329171079a16c7554f2a37d5', 'timestamp': '', 'source': 'github', 'line_count': 538, 'max_line_length': 326, 'avg_line_length': 42.36431226765799, 'alnum_prop': 0.5813004563004563, 'repo_name': 'mickael-matelli/ws', 'id': '25ebbd35d814fac4b162dd08f598050a379bde3a', 'size': '23021', 'binary': False, 'copies': '23', 'ref': 'refs/heads/master', 'path': 'vendor/symfony/symfony/src/Symfony/Component/Form/Extension/Core/Type/ChoiceType.php', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'ApacheConf', 'bytes': '3606'}, {'name': 'Batchfile', 'bytes': '517'}, {'name': 'CSS', 'bytes': '400245'}, {'name': 'HTML', 'bytes': '24869'}, {'name': 'JavaScript', 'bytes': '1008789'}, {'name': 'PHP', 'bytes': '103404'}, {'name': 'Shell', 'bytes': '2477'}]} |
##Description
Mayhem 2 is a multiplayer (2 - 4) flight shooter with a really simple objective - destroy your opponents before they destroy you.

[Mayhem](http://www.lemonamiga.com/games/details.php?id=2972) was originally released as Public Domain / Shareware on the [Amiga](https://en.wikipedia.org/wiki/Amiga). It was ported to the PC in 2002 by [devpack](https://github.com/devpack) and released onto github in 2011 [github.com/devpack/mayhem](https://github.com/devpack/mayhem).
[Video of the original amiga game](https://www.youtube.com/watch?v=fs30DLGxqhs).
[Video of Mayhem 2](https://youtu.be/Vxozz0Ijdr0)
[Martin O'Hanlon](https://github.com/martinohanlon) picked it up from [code.google.com/p/mayhem](https://code.google.com/archive/p/mayhem/) ([New BSD License](https://opensource.org/licenses/BSD-3-Clause)) and with the help of Lee Taylor added new levels and features. [Jonas Karlsson](https://github.com/karjonas) migrated Mayhem 2 to [allegro 5](http://liballeg.org/).
[Video of a new level in Mayhem 2](https://youtu.be/E3mho6J6OG8)
This is the [Raspberry Pi](https://www.raspberrypi.org) port of the game, you can find the Windows PC fork at [github.com/martinohanlon/mayhem](https://github.com/martinohanlon/mayhem).
[Blog post](http://www.stuffaboutcode.com/2016/04/mayhem-classic-amiga-game-ported-to.html)
##Gameplay
Destroy your opponents by shooting them, you win when all other players have run out of lives.
Your ship has limited fuel which will run down when you boost, if you run out you will be unable to control your ship, to refuel, land on any flat surface.
You can protect yourself from attack using your shields which will stop all bullets, be careful though your shields run down quickly and you wont be able to boost while your shields are on.
Powerups are dropped (sometimes) when a player is destroyed (by either crashing or being shot) and when collected will give you a temporary boost.
###Options
Levels 1-3 are the original game levels, all other levels are new to Mayhem 2.
DCA are anti spaceship guns which will fire at the player if they get too close.
Wall collision can be turned off for new players to get used to the controls and playing the game.
##Controls
###Joysticks
Default joystick controls, assume an "xbox / ps like" joystick:
Control | Action
--- | ---
Stick 1 | Left / Right
Button 1 (A) | Thrust
Button 2 (B) | Shield
Button 6 (RB) | Fire
Joystick controls can changed using the menu.
###Keys
If joysticks are connected, they are used as the players controls, if there are less than 4 joysticks connected, keys are used for the rest of the players in order:
Key | Left | Right | Shield | Thrust | Fire
--- | --- | --- | --- | --- | ---
1 | z | x | c | v | g
2 | left | right | pad del | pad 0 | pad enter
3 | b | n | , | m | l
4 | y | u | o | i | 0
##Setup
Mayhem 2 needs at least 96MB of RAM dedicated to the GPU, you can set this using `raspi-config`
```
sudo raspi-config
```
`Advanced Options` > `Memory Split` > change value to 96
##Install
You will need to download the game and install libdumb1 dependency
```
sudo apt-get install libdumb1
git clone -b allegro5 https://github.com/martinohanlon/mayhem-pi
```
##Run
```
cd mayhem-pi
./start.sh
```
##Compile
Compiling Mayhem2 will need [allegro 5](https://http://liballeg.org/) to be compiled and installed - this will take a while, particularly on a Pi 0/1.
```
cd mayhem-pi
./compile_allegro5
```
To compile Mayhem2.
```
make
```
To compile Mayhem2 to distribute, including static linking to the allegro libraries. Modify compile_allegro to include the additional flags, recompile and use Makefilestatic.
```
make clean
make -f Makefilestatic
```
## RetroPie
To install Mayhem to the 'ports' section of RetroPie:
```
cd /home/pi/RetroPie/roms/ports
sudo apt-get install libdumb1
git clone -b allegro5 https://github.com/martinohanlon/mayhem-pi
```
##Version history
* beta - compiles, runs, ongoing mods
* 0.1 - added new levels and features
* 0.2 - seamless warping across map edges
* 0.4 - menu changes + new 'option' (powerup) changes
* 0.5 - timing and powerups are now dropped when players explode
* 0.6 - new thrust powerup + bug fix
* 0.7 - level structure changes
* 0.8 - joystick control
* 0.9 - new powerup icons
* 0.10 - screen resolution & perf improvements
* 0.11 - screen rendering, timing changes, bug fix
* 0.12 - allegro5 migration
##Contributors
[Anthony Prieur](https://github.com/devpack)
[Martin O'Hanlon](https://github.com/martinohanlon)
Lee Taylor
[Jonas Karlsson](https://github.com/karjonas)
| {'content_hash': '02ebc0eabc132b04120c7277fb13c8d7', 'timestamp': '', 'source': 'github', 'line_count': 146, 'max_line_length': 370, 'avg_line_length': 32.71232876712329, 'alnum_prop': 0.7085427135678392, 'repo_name': 'martinohanlon/mayhem-pi', 'id': '2d92b1ed08f7d22a2f0f1993158deb8c881d3a0c', 'size': '4879', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'README.md', 'mode': '33261', 'license': 'bsd-3-clause', 'language': [{'name': 'C', 'bytes': '18763'}, {'name': 'C++', 'bytes': '156023'}, {'name': 'Makefile', 'bytes': '3109'}, {'name': 'Shell', 'bytes': '358'}]} |
require 'bio-ucsc'
describe "Bio::Ucsc::Hg18::ChainRheMac2" do
describe "#find_by_interval" do
context 'given range chrX:1-1,000,000' do
it 'returns a record with column accessors (r.tName == "chrX")' do
Bio::Ucsc::Hg18::DBConnection.default
Bio::Ucsc::Hg18::DBConnection.connect
i = Bio::GenomicInterval.parse("chrX:1-1,000,000")
r = Bio::Ucsc::Hg18::ChainRheMac2.find_by_interval(i)
r.tName.should == "chrX"
end
end
end
end
| {'content_hash': 'b67ded89eee38bc86676ce78916913d4', 'timestamp': '', 'source': 'github', 'line_count': 16, 'max_line_length': 72, 'avg_line_length': 30.8125, 'alnum_prop': 0.6328600405679513, 'repo_name': 'misshie/bioruby-ucsc-api', 'id': 'b9b7cc5a662d6451b10ea2fb9fff26124e028b14', 'size': '493', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'spec/hg18/chainrhemac2_spec.rb', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'Ruby', 'bytes': '4117804'}, {'name': 'sed', 'bytes': '320'}]} |
<!doctype html>
<html>
<head>
<title>iron-file-icons</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<script src="../../webcomponentsjs/webcomponents-loader.js"></script>
<script src="../../web-component-tester/browser.js"></script>
<script src="../../test-fixture/test-fixture-mocha.js"></script>
<link rel="import" href="../file-icons.html">
<link rel="import" href="../../test-fixture/test-fixture.html">
</head>
<body>
<test-fixture id="TrivialElement">
<template>
</template>
</test-fixture>
<script>
suite('<iron-file-icons>', function() {
suite('basic behavior', function() {
});
});
</script>
</body>
</html>
| {'content_hash': 'da17735ba2f4e573f6a2625a4289ebc4', 'timestamp': '', 'source': 'github', 'line_count': 30, 'max_line_length': 72, 'avg_line_length': 24.366666666666667, 'alnum_prop': 0.6210670314637483, 'repo_name': 'Collaborne/iron-file-icons', 'id': '74c5452ddcb8377875f00ab3481627c04e4af84a', 'size': '731', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'test/file-icons.html', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'HTML', 'bytes': '5545'}]} |
package org.apache.stratos.messaging.event.health.stat;
import org.apache.stratos.messaging.event.Event;
/**
* This event is fired by Event processing engine to send second derivative of Load average
*/
public class SecondDerivativeOfLoadAverageEvent extends Event {
private final String networkPartitionId;
private final String clusterId;
private final String clusterInstanceId;
private final float value;
public SecondDerivativeOfLoadAverageEvent(String networkPartitionId, String clusterId, String clusterInstanceId, float value) {
this.networkPartitionId = networkPartitionId;
this.clusterId = clusterId;
this.clusterInstanceId = clusterInstanceId;
this.value = value;
}
public String getClusterId() {
return clusterId;
}
public float getValue() {
return value;
}
public String getNetworkPartitionId() {
return networkPartitionId;
}
public String getClusterInstanceId() {
return clusterInstanceId;
}
}
| {'content_hash': 'f4fe7d3bcd08bdf86c71a06c04828ccf', 'timestamp': '', 'source': 'github', 'line_count': 40, 'max_line_length': 131, 'avg_line_length': 25.975, 'alnum_prop': 0.7179980750721848, 'repo_name': 'dinithis/stratos', 'id': '6f5b9c6edd59a9ef5bb7cdd11b63f16cb695e534', 'size': '1848', 'binary': False, 'copies': '13', 'ref': 'refs/heads/master', 'path': 'components/org.apache.stratos.messaging/src/main/java/org/apache/stratos/messaging/event/health/stat/SecondDerivativeOfLoadAverageEvent.java', 'mode': '33188', 'license': 'apache-2.0', 'language': [{'name': 'Batchfile', 'bytes': '17184'}, {'name': 'C', 'bytes': '27195'}, {'name': 'CSS', 'bytes': '71867'}, {'name': 'HTML', 'bytes': '7586'}, {'name': 'Handlebars', 'bytes': '136406'}, {'name': 'Java', 'bytes': '5946275'}, {'name': 'JavaScript', 'bytes': '743237'}, {'name': 'Python', 'bytes': '518762'}, {'name': 'Ruby', 'bytes': '3546'}, {'name': 'Shell', 'bytes': '124630'}]} |
#include "CCPhysicsJoint.h"
#ifdef CC_USE_PHYSICS
#if (CC_PHYSICS_ENGINE == CC_PHYSICS_CHIPMUNK)
#include "chipmunk.h"
#elif (CC_PHYSICS_ENGINE == CCPHYSICS_BOX2D)
#include "Box2D.h"
#endif
#include "CCPhysicsBody.h"
#include "chipmunk/CCPhysicsJointInfo.h"
#include "Box2D/CCPhysicsJointInfo.h"
#include "chipmunk/CCPhysicsBodyInfo.h"
#include "Box2D/CCPhysicsBodyInfo.h"
#include "chipmunk/CCPhysicsHelper.h"
#include "Box2D/CCPhysicsHelper.h"
NS_CC_BEGIN
PhysicsJoint::PhysicsJoint()
: _bodyA(nullptr)
, _bodyB(nullptr)
, _info(nullptr)
{
}
PhysicsJoint::~PhysicsJoint()
{
CC_SAFE_DELETE(_info);
CC_SAFE_RELEASE(_bodyA);
CC_SAFE_RELEASE(_bodyB);
}
bool PhysicsJoint::init(cocos2d::PhysicsBody *a, cocos2d::PhysicsBody *b)
{
do
{
CC_BREAK_IF(a == nullptr || b == nullptr);
CC_BREAK_IF(!(_info = new PhysicsJointInfo()));
_bodyA = a;
_bodyA->retain();
_bodyA->_joints.push_back(this);
_bodyB = b;
_bodyB->retain();
_bodyB->_joints.push_back(this);
return true;
} while (false);
return false;
}
PhysicsJointPin::PhysicsJointPin()
{
}
PhysicsJointPin::~PhysicsJointPin()
{
}
PhysicsJointFixed::PhysicsJointFixed()
{
}
PhysicsJointFixed::~PhysicsJointFixed()
{
}
PhysicsJointSliding::PhysicsJointSliding()
{
}
PhysicsJointSliding::~PhysicsJointSliding()
{
}
PhysicsJointLimit::PhysicsJointLimit()
{
}
PhysicsJointLimit::~PhysicsJointLimit()
{
}
#if (CC_PHYSICS_ENGINE == CC_PHYSICS_CHIPMUNK)
PhysicsBodyInfo* PhysicsJoint::bodyInfo(PhysicsBody* body) const
{
return body->_info;
}
PhysicsJointFixed* PhysicsJointFixed::create(PhysicsBody* a, PhysicsBody* b, const Point& anchr)
{
PhysicsJointFixed* joint = new PhysicsJointFixed();
if (joint && joint->init(a, b, anchr))
{
joint->autorelease();
return joint;
}
CC_SAFE_DELETE(joint);
return nullptr;
}
bool PhysicsJointFixed::init(PhysicsBody* a, PhysicsBody* b, const Point& anchr)
{
do
{
CC_BREAK_IF(!PhysicsJoint::init(a, b));
_info->joint = cpPivotJointNew(bodyInfo(a)->body, bodyInfo(b)->body,
PhysicsHelper::point2cpv(anchr));
return true;
} while (false);
return false;
}
PhysicsJointPin* PhysicsJointPin::create(PhysicsBody* a, PhysicsBody* b, const Point& anchr1, const Point& anchr2)
{
PhysicsJointPin* joint = new PhysicsJointPin();
if (joint && joint->init(a, b, anchr1, anchr2))
{
joint->autorelease();
return joint;
}
CC_SAFE_DELETE(joint);
return nullptr;
}
bool PhysicsJointPin::init(PhysicsBody *a, PhysicsBody *b, const Point& anchr1, const Point& anchr2)
{
do
{
CC_BREAK_IF(!PhysicsJoint::init(a, b));
_info->joint = cpPinJointNew(bodyInfo(a)->body, bodyInfo(b)->body, PhysicsHelper::point2cpv(anchr1), PhysicsHelper::point2cpv(anchr2));
return true;
} while (false);
return false;
}
PhysicsJointSliding* PhysicsJointSliding::create(PhysicsBody* a, PhysicsBody* b, const Point& grooveA, const Point& grooveB, const Point& anchr)
{
PhysicsJointSliding* joint = new PhysicsJointSliding();
if (joint && joint->init(a, b, grooveA, grooveB, anchr))
{
return joint;
}
CC_SAFE_DELETE(joint);
return nullptr;
}
bool PhysicsJointSliding::init(PhysicsBody* a, PhysicsBody* b, const Point& grooveA, const Point& grooveB, const Point& anchr)
{
do
{
CC_BREAK_IF(!PhysicsJoint::init(a, b));
_info->joint = cpGrooveJointNew(bodyInfo(a)->body, bodyInfo(b)->body,
PhysicsHelper::point2cpv(grooveA),
PhysicsHelper::point2cpv(grooveB),
PhysicsHelper::point2cpv(anchr));
return true;
} while (false);
return false;
}
PhysicsJointLimit* PhysicsJointLimit::create(PhysicsBody* a, PhysicsBody* b, const Point& anchr1, const Point& anchr2, float min, float max)
{
PhysicsJointLimit* joint = new PhysicsJointLimit();
if (joint && joint->init(a, b, anchr1, anchr2, min, max))
{
return joint;
}
CC_SAFE_DELETE(joint);
return nullptr;
}
bool PhysicsJointLimit::init(PhysicsBody* a, PhysicsBody* b, const Point& anchr1, const Point& anchr2, float min, float max)
{
do
{
CC_BREAK_IF(!PhysicsJoint::init(a, b));
_info->joint = cpSlideJointNew(bodyInfo(a)->body, bodyInfo(b)->body,
PhysicsHelper::point2cpv(anchr1),
PhysicsHelper::point2cpv(anchr2),
PhysicsHelper::float2cpfloat(min),
PhysicsHelper::float2cpfloat(max));
return true;
} while (false);
return false;
}
#elif (CC_PHYSICS_ENGINE == CC_PHYSICS_BOX2D)
#endif
NS_CC_END
#endif // CC_USE_PHYSICS
| {'content_hash': '86155ea6e530438f959238b269f5cfb6', 'timestamp': '', 'source': 'github', 'line_count': 233, 'max_line_length': 144, 'avg_line_length': 22.13733905579399, 'alnum_prop': 0.602559131446297, 'repo_name': 'gibtang/CCNSCoding', 'id': '3e60d246ab372f326dd93e173f6c4c17d0f397b2', 'size': '6418', 'binary': False, 'copies': '1', 'ref': 'refs/heads/master', 'path': 'cocos2dx/physics/CCPhysicsJoint.cpp', 'mode': '33188', 'license': 'mit', 'language': [{'name': 'C', 'bytes': '52298226'}, {'name': 'C#', 'bytes': '3520'}, {'name': 'C++', 'bytes': '26358013'}, {'name': 'CSS', 'bytes': '27667'}, {'name': 'Clojure', 'bytes': '8891'}, {'name': 'F#', 'bytes': '5688'}, {'name': 'IDL', 'bytes': '11995'}, {'name': 'Java', 'bytes': '285202'}, {'name': 'JavaScript', 'bytes': '36912223'}, {'name': 'Lua', 'bytes': '558824'}, {'name': 'Objective-C', 'bytes': '1387273'}, {'name': 'Perl', 'bytes': '13787'}, {'name': 'Python', 'bytes': '1629302'}, {'name': 'Ruby', 'bytes': '120900'}, {'name': 'Shell', 'bytes': '1517397'}, {'name': 'TeX', 'bytes': '60330'}, {'name': 'Visual Basic', 'bytes': '2884'}]} |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.