hexsha
stringlengths 40
40
| size
int64 8
1.04M
| content
stringlengths 8
1.04M
| avg_line_length
float64 2.24
100
| max_line_length
int64 4
1k
| alphanum_fraction
float64 0.25
0.97
|
---|---|---|---|---|---|
005ce12ed5bcd364e391d4192eac5c1b02d09cc1 | 3,341 | package cn.allen.iweather.webservice.entity;
import android.os.Parcel;
import android.os.Parcelable;
import com.google.gson.annotations.SerializedName;
/**
* Created by allen on 2017/11/12.
*/
public class LocationEntity extends BaseEntity implements Parcelable {
@SerializedName("id")
private String id;//城市代码
@SerializedName("name")
private String name;//城市名称
@SerializedName("country")
private String country;//国家代码
@SerializedName("path")
private String path;//完整行政等级
@SerializedName("timezone")
private String timezone;//时区
@SerializedName("timezone_offset")
private String timezone_offset;//时区偏移
private boolean isFavorite;
public LocationEntity(String id, String name, String path) {
this.id = id;
this.name = name;
this.path = path;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCountry() {
return country;
}
public void setCountry(String country) {
this.country = country;
}
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
public String getTimezone() {
return timezone;
}
public void setTimezone(String timezone) {
this.timezone = timezone;
}
public String getTimezone_offset() {
return timezone_offset;
}
public void setTimezone_offset(String timezone_offset) {
this.timezone_offset = timezone_offset;
}
public boolean isFavorite() {
return isFavorite;
}
public void setFavorite(boolean favorite) {
isFavorite = favorite;
}
@Override
public String toString() {
return "LocationEntity{" +
"id='" + id + '\'' +
", name='" + name + '\'' +
", country='" + country + '\'' +
", path='" + path + '\'' +
", timezone='" + timezone + '\'' +
", timezone_offset='" + timezone_offset + '\'' +
'}';
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeString(this.id);
dest.writeString(this.name);
dest.writeString(this.country);
dest.writeString(this.path);
dest.writeString(this.timezone);
dest.writeString(this.timezone_offset);
}
public LocationEntity() {
}
protected LocationEntity(Parcel in) {
this.id = in.readString();
this.name = in.readString();
this.country = in.readString();
this.path = in.readString();
this.timezone = in.readString();
this.timezone_offset = in.readString();
}
public static final Creator<LocationEntity> CREATOR = new Creator<LocationEntity>() {
@Override
public LocationEntity createFromParcel(Parcel source) {
return new LocationEntity(source);
}
@Override
public LocationEntity[] newArray(int size) {
return new LocationEntity[size];
}
};
}
| 23.695035 | 89 | 0.587549 |
c2d24d4901b84aacbc71bdc0e8eec39076f53edd | 8,688 | /*
* Copyright 2015-2018 The OpenZipkin Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package zipkin2.collector.kafka08;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ZookeeperConsumerConnector;
import zipkin2.CheckResult;
import zipkin2.collector.Collector;
import zipkin2.collector.CollectorComponent;
import zipkin2.collector.CollectorMetrics;
import zipkin2.collector.CollectorSampler;
import zipkin2.storage.SpanConsumer;
import zipkin2.storage.StorageComponent;
import static kafka.consumer.Consumer.createJavaConsumerConnector;
import static org.apache.kafka.clients.consumer.ConsumerConfig.AUTO_OFFSET_RESET_CONFIG;
import static org.apache.kafka.clients.consumer.ConsumerConfig.GROUP_ID_CONFIG;
/**
* This collector polls a Kafka topic for messages that contain TBinaryProtocol big-endian encoded
* lists of spans. These spans are pushed to a {@link SpanConsumer#accept span consumer}.
*
* <p>This collector remains a Kafka 0.8.x consumer, while Zipkin systems update to 0.9+.
*/
public final class KafkaCollector extends CollectorComponent {
public static Builder builder() {
return new Builder();
}
/** Configuration including defaults needed to consume spans from a Kafka topic. */
public static final class Builder extends CollectorComponent.Builder {
final Properties properties = new Properties();
Collector.Builder delegate = Collector.newBuilder(KafkaCollector.class);
CollectorMetrics metrics = CollectorMetrics.NOOP_METRICS;
String topic = "zipkin";
int streams = 1;
@Override
public Builder storage(StorageComponent storage) {
delegate.storage(storage);
return this;
}
@Override
public Builder sampler(CollectorSampler sampler) {
delegate.sampler(sampler);
return this;
}
@Override
public Builder metrics(CollectorMetrics metrics) {
if (metrics == null) throw new NullPointerException("metrics == null");
this.metrics = metrics.forTransport("kafka");
delegate.metrics(this.metrics);
return this;
}
/** Topic zipkin spans will be consumed from. Defaults to "zipkin" */
public Builder topic(String topic) {
if (topic == null) throw new NullPointerException("topic == null");
this.topic = topic;
return this;
}
/** The zookeeper connect string, ex. 127.0.0.1:2181. No default */
public Builder zookeeper(String zookeeper) {
if (zookeeper == null) throw new NullPointerException("zookeeper == null");
properties.put("zookeeper.connect", zookeeper);
return this;
}
/** The consumer group this process is consuming on behalf of. Defaults to "zipkin" */
public Builder groupId(String groupId) {
if (groupId == null) throw new NullPointerException("groupId == null");
properties.put(GROUP_ID_CONFIG, groupId);
return this;
}
/** Count of threads/streams consuming the topic. Defaults to 1 */
public Builder streams(int streams) {
this.streams = streams;
return this;
}
/** Maximum size of a message containing spans in bytes. Defaults to 1 MiB */
public Builder maxMessageSize(int bytes) {
properties.put("fetch.message.max.bytes", String.valueOf(bytes));
return this;
}
/**
* By default, a consumer will be built from properties derived from builder defaults, as well
* "auto.offset.reset" -> "smallest". Any properties set here will override the consumer config.
*
* <p>For example: Only consume spans since you connected by setting the below.
*
* <pre>{@code
* Map<String, String> overrides = new LinkedHashMap<>();
* overrides.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "largest");
* builder.overrides(overrides);
* }</pre>
*
* @see org.apache.kafka.clients.consumer.ConsumerConfig
*/
public final Builder overrides(Map<String, ?> overrides) {
if (overrides == null) throw new NullPointerException("overrides == null");
properties.putAll(overrides);
return this;
}
@Override
public KafkaCollector build() {
return new KafkaCollector(this);
}
Builder() {
// Settings below correspond to "Old Consumer Configs"
// http://kafka.apache.org/documentation.html
properties.put(GROUP_ID_CONFIG, "zipkin");
properties.put("fetch.message.max.bytes", String.valueOf(1024 * 1024));
// Same default as zipkin-scala, and keeps tests from hanging
properties.put(AUTO_OFFSET_RESET_CONFIG, "smallest");
}
}
final LazyConnector connector;
final LazyStreams streams;
KafkaCollector(Builder builder) {
connector = new LazyConnector(builder);
streams = new LazyStreams(builder, connector);
}
@Override
public KafkaCollector start() {
connector.get();
streams.get();
return this;
}
@Override
public CheckResult check() {
try {
connector.get(); // make sure the connector didn't throw
CheckResult failure = streams.failure.get(); // check the streams didn't quit
if (failure != null) return failure;
return CheckResult.OK;
} catch (RuntimeException e) {
return CheckResult.failed(e);
}
}
static final class LazyConnector {
final ConsumerConfig config;
volatile ZookeeperConsumerConnector connector;
LazyConnector(Builder builder) {
this.config = new ConsumerConfig(builder.properties);
}
ZookeeperConsumerConnector get() {
if (connector == null) {
synchronized (this) {
if (connector == null) {
connector = (ZookeeperConsumerConnector) createJavaConsumerConnector(config);
}
}
}
return connector;
}
void close() {
ZookeeperConsumerConnector maybeConnector = connector;
if (maybeConnector == null) return;
maybeConnector.shutdown();
}
}
@Override
public void close() {
streams.close();
connector.close();
}
static final class LazyStreams {
final int streams;
final String topic;
final Collector collector;
final CollectorMetrics metrics;
final LazyConnector connector;
final AtomicReference<CheckResult> failure = new AtomicReference<>();
volatile ExecutorService pool;
LazyStreams(Builder builder, LazyConnector connector) {
this.streams = builder.streams;
this.topic = builder.topic;
this.collector = builder.delegate.build();
this.metrics = builder.metrics;
this.connector = connector;
}
ExecutorService get() {
if (pool == null) {
synchronized (this) {
if (pool == null) {
pool = compute();
}
}
}
return pool;
}
void close() {
ExecutorService maybePool = pool;
if (maybePool == null) return;
maybePool.shutdownNow();
try {
maybePool.awaitTermination(1, TimeUnit.SECONDS);
} catch (InterruptedException e) {
// at least we tried
}
}
ExecutorService compute() {
ExecutorService pool =
streams == 1
? Executors.newSingleThreadExecutor()
: Executors.newFixedThreadPool(streams);
Map<String, Integer> topicCountMap = new LinkedHashMap<>(1);
topicCountMap.put(topic, streams);
for (KafkaStream<byte[], byte[]> stream :
connector.get().createMessageStreams(topicCountMap).get(topic)) {
pool.execute(guardFailures(new KafkaStreamProcessor(stream, collector, metrics)));
}
return pool;
}
Runnable guardFailures(final Runnable delegate) {
return new Runnable() {
@Override
public void run() {
try {
delegate.run();
} catch (RuntimeException e) {
failure.set(CheckResult.failed(e));
}
}
};
}
}
}
| 31.592727 | 100 | 0.679213 |
f3d631dc0f2ebcb75a2e9c750b60b70c32231dd8 | 192 | package curso_loiane.aulas.aula24.exercicios;
public class LivroLivraria {
String nome;
String autor;
int qtdPaginas;
int anoLancamento;
String isbn;
double preco;
}
| 16 | 45 | 0.703125 |
9b2060d41be442360587976798cedcdfb69c2710 | 4,942 | /*
* Copyright (C) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.hit;
import java.util.ArrayList;
import java.util.HashMap;
/*
* State is a snapshot of all of the heaps, and related meta-data, for
* the runtime at a given instant.
*
* During parsing of the HPROF file HEAP_DUMP_INFO chunks change which heap
* is being referenced.
*/
public class State {
HashMap<Integer, Heap> mHeaps;
Heap mCurrentHeap;
public State() {
mHeaps = new HashMap<Integer, Heap>();
setToDefaultHeap();
}
public Heap setToDefaultHeap() {
return setHeapTo(0, "default");
}
public Heap setHeapTo(int id, String name) {
Heap heap = mHeaps.get(id);
if (heap == null) {
heap = new Heap(name);
heap.mState = this;
mHeaps.put(id, heap);
}
mCurrentHeap = heap;
return mCurrentHeap;
}
public Heap getHeap(int id) {
return mHeaps.get(id);
}
public Heap getHeap(String name) {
for (Heap heap: mHeaps.values()) {
if (heap.mName.equals(name)) {
return heap;
}
}
return null;
}
public final void addStackFrame(StackFrame theFrame) {
mCurrentHeap.addStackFrame(theFrame);
}
public final StackFrame getStackFrame(long id) {
return mCurrentHeap.getStackFrame(id);
}
public final void addStackTrace(StackTrace theTrace) {
mCurrentHeap.addStackTrace(theTrace);
}
public final StackTrace getStackTrace(int traceSerialNumber) {
return mCurrentHeap.getStackTrace(traceSerialNumber);
}
public final StackTrace getStackTraceAtDepth(int traceSerialNumber,
int depth) {
return mCurrentHeap.getStackTraceAtDepth(traceSerialNumber, depth);
}
public final void addRoot(RootObj root) {
mCurrentHeap.addRoot(root);
}
public final void addThread(ThreadObj thread, int serialNumber) {
mCurrentHeap.addThread(thread, serialNumber);
}
public final ThreadObj getThread(int serialNumber) {
return mCurrentHeap.getThread(serialNumber);
}
public final void addInstance(long id, Instance instance) {
mCurrentHeap.addInstance(id, instance);
}
public final void addClass(long id, ClassObj theClass) {
mCurrentHeap.addClass(id, theClass);
}
public final Instance findReference(long id) {
for (Heap heap: mHeaps.values()) {
Instance instance = heap.getInstance(id);
if (instance != null) {
return instance;
}
}
// Couldn't find an instance of a class, look for a class object
return findClass(id);
}
public final ClassObj findClass(long id) {
for (Heap heap: mHeaps.values()) {
ClassObj theClass = heap.getClass(id);
if (theClass != null) {
return theClass;
}
}
return null;
}
public final ClassObj findClass(String name) {
for (Heap heap: mHeaps.values()) {
ClassObj theClass = heap.getClass(name);
if (theClass != null) {
return theClass;
}
}
return null;
}
public final void dumpInstanceCounts() {
for (Heap heap: mHeaps.values()) {
System.out.println(
"+------------------ instance counts for heap: " + heap.mName);
heap.dumpInstanceCounts();
}
}
public final void dumpSizes() {
for (Heap heap: mHeaps.values()) {
System.out.println(
"+------------------ sizes for heap: " + heap.mName);
heap.dumpSizes();
}
}
public final void dumpSubclasses() {
for (Heap heap: mHeaps.values()) {
System.out.println(
"+------------------ subclasses for heap: " + heap.mName);
heap.dumpSubclasses();
}
}
public final void resolveReferences() {
for (Heap heap: mHeaps.values()) {
heap.resolveInstanceRefs(this);
heap.resolveClassStatics(this);
heap.resolveRoots(this);
}
}
}
| 27.455556 | 79 | 0.575476 |
93076a69baa7b0c6a8c92d5c7259a6c0f3708431 | 1,000 | package se.webinfostudio.game.etheder.repository.unit;
import org.jdbi.v3.sqlobject.customizer.Bind;
import org.jdbi.v3.sqlobject.customizer.BindBean;
import org.jdbi.v3.sqlobject.statement.SqlQuery;
import org.jdbi.v3.sqlobject.statement.SqlUpdate;
import ru.vyarus.guicey.jdbi3.installer.repository.JdbiRepository;
import ru.vyarus.guicey.jdbi3.tx.InTransaction;
import se.webinfostudio.game.etheder.entity.unit.UnitData;
@JdbiRepository
@InTransaction
public interface UnitDataRepository {
@SqlUpdate("insert into unitdata(id, name, description, costFood, costGold, costIron, costStone, costWood, attack, armour, defensive, health, speed, unitlevel, ticks, unitType) values(:id, :name, :description, :costFood, :costGold, :costIron, :costStone, :costWood, :attack, :armour, :defensive, :health, :speed, :level, :ticks, :unitType)")
void create(@BindBean UnitData unitData);
@SqlQuery("select * from unitdata where id = :id")
UnitData findById(@Bind("id") Long id);
}
| 45.454545 | 343 | 0.768 |
e2ca40f097c52e8a618a293ef2ef11770a4e5d28 | 918 | package info.orestes.rest.conversion;
import org.apache.tika.mime.MediaType;
import java.util.HashMap;
import java.util.Map;
/**
* Created on 04.08.2014.
*
* @author Malte Lauenroth
*/
public class SimpleContext implements Context {
private final MediaType mediaType;
private final Map<String, Object> arguments = new HashMap<>();
public SimpleContext() {
this(null);
}
protected SimpleContext(MediaType mediaType) {
this.mediaType = mediaType;
}
@Override
@SuppressWarnings("unchecked")
public <T> T getArgument(String name) {
return (T) arguments.get(name);
}
@Override
public void setArgument(String name, Object value) {
arguments.put(name, value);
}
@Override
public MediaType getMediaType() {
return mediaType;
}
public Map<String, Object> getArguments() {
return arguments;
}
}
| 20.4 | 66 | 0.652505 |
dbfd1dd45c41ee72806183a879b210b55b188d46 | 2,116 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.reef.io.serialization;
import javax.inject.Inject;
import java.io.*;
import java.util.logging.Logger;
/**
* A {@link Codec} for {@link Serializable} objects.
* <p/>
* It uses java serialization, use with caution.
*
* @param <T> The type of objects Serialized
*/
public class SerializableCodec<T extends Serializable> implements Codec<T> {
private static final Logger LOG = Logger.getLogger(SerializableCodec.class.getName());
/**
* Default constructor for TANG use.
*/
@Inject
public SerializableCodec() {
}
@Override
public byte[] encode(final T obj) {
try (final ByteArrayOutputStream bout = new ByteArrayOutputStream()) {
try (final ObjectOutputStream out = new ObjectOutputStream(bout)) {
out.writeObject(obj);
}
return bout.toByteArray();
} catch (final IOException ex) {
throw new RuntimeException("Unable to encode: " + obj, ex);
}
}
@Override
public T decode(final byte[] buf) {
try {
try (final ObjectInputStream oin = new ObjectInputStream(new ByteArrayInputStream(buf))) {
final T result = (T) oin.readObject();
return result;
}
} catch (final IOException | ClassNotFoundException ex) {
throw new RuntimeException("Unable to decode.", ex);
}
}
}
| 31.117647 | 96 | 0.700851 |
53b0bbecdee54cd6c2abc7ca34090fd790cfe0c7 | 1,221 | /*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.settings;
import android.util.Log;
/**
* Stub class for showing sub-settings; we can't use the main Settings class
* since for our app it is a special singleTask class.
*/
public class SubSettings extends SettingsActivity {
@Override
public boolean onNavigateUp() {
finish();
return true;
}
@Override
protected boolean isValidFragment(String fragmentName) {
Log.d("SubSettings", "Launching fragment " + fragmentName);
return true;
}
public static class BluetoothSubSettings extends SubSettings { /* empty */ }
}
| 29.780488 | 80 | 0.710074 |
983c04a78fb13d0819bde60392e8a7946d7a0303 | 187 | package com.imnjh.imagepicker;
import java.util.ArrayList;
public interface PickerAction {
void proceedResultAndFinish(ArrayList<String> selected, boolean original, int resultCode);
} | 26.714286 | 92 | 0.818182 |
1f39e2f04e50c5194c63d079e06772a20389e5f8 | 570 | package logic;
import java.io.IOException;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
public class LogicForward extends HttpServlet{
public void forward(HttpServletRequest request, HttpServletResponse response, String send) throws ServletException, IOException{
RequestDispatcher dispatcher = request.getServletContext().getRequestDispatcher(send);
dispatcher.forward(request, response);
}
}
| 31.666667 | 129 | 0.838596 |
6aebcc93bc4cdb9679b3c2910ffdb579c208cac5 | 382 | package org.apache.spark.ml.param.shared;
/**
* Code generator for shared params (sharedParams.scala). Run under the Spark folder with
* <pre><code>
* build/sbt "mllib/runMain org.apache.spark.ml.param.shared.SharedParamsCodeGen"
* </code></pre>
*/
public class SharedParamsCodeGen {
static public void main (java.lang.String[] args) { throw new RuntimeException(); }
}
| 34.727273 | 89 | 0.725131 |
da5dcca64ad363f38e370b0b113455535e739109 | 908 | package com.fa.cim.common.annotations;
import org.springframework.core.annotation.AliasFor;
import org.springframework.web.bind.annotation.RestController;
import java.lang.annotation.*;
/**
* description:
* <p>ListenableController .<br/></p>
* <p>
* change history:
* date defect# person comments
* ---------------------------------------------------------------------------------------------------------------------
* 2019/3/25 ******** Yuri create file
*
* @author: Yuri
* @date: 2019/3/25 18:30
* @copyright: 2019, FA Software (Shanghai) Co., Ltd. All Rights Reserved.
*/
@Documented
@Target({ElementType.TYPE})
@Retention(RetentionPolicy.RUNTIME)
@Inherited
@RestController
public @interface ListenableController {
@AliasFor(annotation = RestController.class)
String value() default "";
String transactionID ();
}
| 26.705882 | 120 | 0.575991 |
d347d27b4da4bb40d9ca4369b53dd666dc4e399f | 80 | package com.concurnas.compiler.visitors;
public class SubTypeChecking {
}
| 10 | 40 | 0.7625 |
5fff5f11a671fd48189d0bf74a02e8d1ff5a9797 | 14,960 | package org.vebqa.vebtal.selenese.command;
import static jp.vmi.selenium.selenese.command.ArgumentType.VALUE;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Map.Entry;
import java.util.Set;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.apache.commons.io.FileUtils;
import org.openqa.selenium.By;
import org.openqa.selenium.Keys;
import org.openqa.selenium.OutputType;
import org.openqa.selenium.TakesScreenshot;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.vebqa.vebtal.annotations.Keyword;
import org.vebqa.vebtal.selenese.commands.CheckUrl;
import org.vebqa.vebtal.selenese.commands.ClearCookies;
import org.vebqa.vebtal.selenese.commands.Close;
import org.vebqa.vebtal.selenese.commands.ResizeWindow;
import org.vebqa.vebtal.selenese.filedownloader.FileDownloader;
import org.vebqa.vebtal.seleneserestserver.SeleneseTestAdaptionPlugin;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import com.google.common.io.Files;
import jp.vmi.selenium.selenese.Context;
import jp.vmi.selenium.selenese.VarsMap;
import jp.vmi.selenium.selenese.command.AbstractCommand;
import jp.vmi.selenium.selenese.command.ICommand;
import jp.vmi.selenium.selenese.command.ICommandFactory;
import jp.vmi.selenium.selenese.result.Failure;
import jp.vmi.selenium.selenese.result.Result;
import jp.vmi.selenium.selenese.result.Success;
import jp.vmi.selenium.selenese.utils.LoggerUtils;
/**
* VEB spezifische Erweiterungen fuer den Selenese Runner.
*
* @author doerges
*
*/
public class AdditionalSeleneseExtensions implements ICommandFactory {
private static final Logger logger = LoggerFactory.getLogger(AdditionalSeleneseExtensions.class);
/**
* Command "downloadFileFromUrl" - lade Daten aus einem Strom herunter.
*
* @author doerges
*
*/
@Keyword(module = SeleneseTestAdaptionPlugin.ID, command = "downloadFileFromUrl", hintTarget = "<URL>", hintValue = "<path/to/file>")
private static class DownloadFileFromUrl extends AbstractCommand {
public DownloadFileFromUrl(int index, String name, String... args) {
super(index, name, args, VALUE, VALUE);
}
@Override
protected Result executeImpl(Context context, String... curArgs) {
// downloadUrl
String tDownloadUrl = curArgs[0];
if (tDownloadUrl.startsWith("$")) {
tDownloadUrl = tDownloadUrl.replaceAll("${", "");
tDownloadUrl = tDownloadUrl.replaceAll("}", "");
LoggerUtils.quote("Key: " + tDownloadUrl);
tDownloadUrl = (String) context.getVarsMap().get(tDownloadUrl.trim());
LoggerUtils.quote("URL: " + tDownloadUrl);
}
// URL erweitern?
if (tDownloadUrl.startsWith("/")) {
tDownloadUrl = context.getCurrentBaseURL() + tDownloadUrl;
LoggerUtils.quote("URL: " + tDownloadUrl);
}
// localStore
String tStoreName = curArgs[1];
// String thisScript = context.getCurrentTestCase().getFilename();
// String thisFolder = thisScript.substring(0, thisScript.lastIndexOf('\\'));
File writeFile = new File(tStoreName);
FileDownloader downloadHandler = new FileDownloader(context.getWrappedDriver());
try {
downloadHandler.setURI(tDownloadUrl);
File downloadedFile = downloadHandler.downloadFile();
Files.copy(downloadedFile, writeFile);
} catch (Exception e) {
return new Failure(e);
}
return new Success("Downloaded file and saved to: " + writeFile.getAbsolutePath());
}
}
/**
* Command "downloadFileFromUrl" - lade Daten aus einem Strom herunter.
*
* @author doerges
*
*/
private static class DownloadFileFromUrlToDisk extends AbstractCommand {
public DownloadFileFromUrlToDisk(int index, String name, String... args) {
super(index, name, args, VALUE, VALUE);
}
@Override
protected Result executeImpl(Context context, String... curArgs) {
// downloadUrl
String tDownloadUrl = curArgs[0];
if (tDownloadUrl.startsWith("$")) {
tDownloadUrl = tDownloadUrl.replaceAll("${", "");
tDownloadUrl = tDownloadUrl.replaceAll("}", "");
LoggerUtils.quote("Key: " + tDownloadUrl);
tDownloadUrl = (String) context.getVarsMap().get(tDownloadUrl.trim());
LoggerUtils.quote("URL: " + tDownloadUrl);
}
// URL erweitern?
if (tDownloadUrl.startsWith("/")) {
tDownloadUrl = context.getCurrentBaseURL() + tDownloadUrl;
LoggerUtils.quote("URL: " + tDownloadUrl);
}
// localStore
String tStoreName = curArgs[1];
File writeFile = new File(tStoreName);
FileDownloader downloadHandler = new FileDownloader(context.getWrappedDriver());
try {
downloadHandler.setURI(tDownloadUrl);
File downloadedFile = downloadHandler.downloadFile();
Files.copy(downloadedFile, writeFile);
} catch (Exception e) {
return new Failure(e);
}
return new Success("Downloaded file and saved to: " + writeFile.getAbsolutePath());
}
}
/**
* Command "downloadFile" - lade Daten aus einem Strom herunter.
*
* @author doerges
*
*/
private static class DownloadFile extends AbstractCommand {
public DownloadFile(int index, String name, String... args) {
super(index, name, args, VALUE, VALUE);
}
@Override
protected Result executeImpl(Context context, String... curArgs) {
// downloadUrl
String tDownloadUrl = curArgs[0];
String url = context.getWrappedDriver().findElement(By.xpath(tDownloadUrl)).getAttribute("href");
// localStore
String tStoreName = curArgs[1];
String thisScript = context.getCurrentTestCase().getFilename();
String thisFolder = thisScript.substring(0, thisScript.lastIndexOf('\\'));
File writeFile = new File(thisFolder + "/" + tStoreName);
FileDownloader downloadHandler = new FileDownloader(context.getWrappedDriver());
try {
downloadHandler.setURI(tDownloadUrl);
File downloadedFile = downloadHandler.downloadFile();
Files.copy(downloadedFile, writeFile);
} catch (Exception e) {
return new Failure(e);
}
return new Success("Downloaded file and saved to: " + writeFile.getAbsolutePath());
}
}
/**
* Command "downloadFileByClick" - Implementierung fuer das Extrakreditportal.
*
* @author doerges
*
*/
private static class DownloadFileByClick extends AbstractCommand {
public DownloadFileByClick(int index, String name, String... args) {
super(index, name, args, VALUE, VALUE);
}
@Override
protected Result executeImpl(Context context, String... curArgs) {
// Id
String tId = curArgs[0];
// localStore
String tStoreName = curArgs[1];
File writeFile = new File(tStoreName);
FileDownloader downloadHandler = new FileDownloader(context.getWrappedDriver());
String tSourceUrl = null;
try {
logger.info("try to get pdf by clicking on: {} ", tId);
WebElement downloadLink = context.getWrappedDriver().findElement(By.id(tId));
downloadLink.click();
// tab wechseln
ArrayList<String> tabs = new ArrayList<String>(context.getWrappedDriver().getWindowHandles());
logger.info("Tab Count: {} ", tabs.size());
context.getWrappedDriver().switchTo().window(tabs.get(1));
downloadHandler.setURI(context.getWrappedDriver().getCurrentUrl());
// Zwischenspeichern fuer das Log
tSourceUrl = context.getWrappedDriver().getCurrentUrl();
File downloadedFile = downloadHandler.downloadFile();
Files.copy(downloadedFile, writeFile);
context.getWrappedDriver().switchTo().window(tabs.get(0));
} catch (Exception e) {
return new Failure(e);
} catch (Throwable te) {
return new Failure(te.getMessage());
}
return new Success(
"Downloaded file from source: " + tSourceUrl + " and saved to: " + writeFile.getAbsolutePath());
}
}
private static class DownloadFileByClickFromBrowser extends AbstractCommand {
public DownloadFileByClickFromBrowser(int index, String name, String... args) {
super(index, name, args, VALUE, VALUE);
}
@Override
protected Result executeImpl(Context context, String... curArgs) {
// Id
String tId = curArgs[0];
// localStore
String tStoreName = curArgs[1];
String thisScript = context.getCurrentTestCase().getFilename();
String thisFolder = thisScript.substring(0, thisScript.lastIndexOf('\\'));
logger.warn("DEPRECATED: resolved folder: {}", thisFolder);
File writeFile = new File(thisFolder + "/" + tStoreName);
FileDownloader downloadHandler = new FileDownloader(context.getWrappedDriver());
try {
logger.info("try to get pdf by clicking on: " + tId);
WebElement downloadLink = context.getWrappedDriver().findElement(By.id(tId));
downloadLink.click();
downloadHandler.setURI(context.getWrappedDriver().getCurrentUrl());
File downloadedFile = downloadHandler.downloadFile();
Files.copy(downloadedFile, writeFile);
} catch (Exception e) {
return new Failure(e);
} catch (Throwable te) {
return new Failure(te.getMessage());
}
return new Success("Downloaded file and saved to: " + writeFile.getAbsolutePath());
}
}
/**
* Command "loadUserCredentials" laedt Benutzerkennung und Passwort aus einer JS
* Datei in der Umgebung. Diese JS Datei ist identisch mit der JS Datei welche
* in der Selenium IDE verwendet wird.
*
* @author doerges
*
*/
private static class LoadUserCredentials extends AbstractCommand {
LoadUserCredentials(int index, String name, String... args) {
super(index, name, args, VALUE, VALUE, VALUE);
}
@Override
protected Result executeImpl(Context context, String... curArgs) {
LoggerUtils.quote("Called executeImpl with Argument lengt: " + curArgs.length);
// credentials specifiction file
String tUserJS = curArgs[0];
LoggerUtils.quote("Read user credentials from: " + tUserJS);
// ApplicationScope
String tScope = curArgs[1];
// Actual folder
String thisScript = context.getCurrentTestCase().getFilename();
String thisFolder = thisScript.substring(0, thisScript.lastIndexOf('\\'));
String aUser = "unknown";
try {
File fXmlFile = new File(thisFolder + "/" + tUserJS);
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document doc = dBuilder.parse(fXmlFile);
doc.getDocumentElement().normalize();
NodeList tList = doc.getElementsByTagName(tScope);
Node tScopeNode = tList.item(0);
Element tElement = (Element) tScopeNode;
// varUserName und varPassword setzen
aUser = tElement.getElementsByTagName("user").item(0).getTextContent();
context.getVarsMap().put("varUserName", aUser);
context.getVarsMap().put("varPassword", tElement.getElementsByTagName("pass").item(0).getTextContent());
} catch (IOException e) {
return new Failure(e);
} catch (SAXException e) {
return new Failure(e);
} catch (ParserConfigurationException e) {
return new Failure(e);
}
return new Success("User Credentials added to variable map (use: varUserName=>" + aUser
+ " and varPassword =>*****)!");
}
}
@Keyword(module = SeleneseTestAdaptionPlugin.ID, command = "takeScreenshot", hintTarget = "<path/to/file.png>")
private static class TakeScreenshot extends AbstractCommand {
TakeScreenshot(int index, String name, String... args) {
super(index, name, args, VALUE, VALUE, VALUE);
}
@Override
protected Result executeImpl(Context context, String... curArgs) {
logger.info("Called executeImpl with Argument lengt: {}", curArgs.length);
String tScreenshotFile = curArgs[0];
File scrFile = ((TakesScreenshot)context.getWrappedDriver()).getScreenshotAs(OutputType.FILE);
File targetFile = new File(tScreenshotFile);
logger.info("target file for screenshot: {}", targetFile);
try {
FileUtils.copyFile(scrFile, targetFile);
} catch (IOException e) {
e.printStackTrace();
}
return new Success("ok");
}
}
/**
* Alternate keyword to realize an enter event without using javascript function.
* Used to automate VAADIN gui.
*
* @author doerges
*
*/
@Keyword(module = SeleneseTestAdaptionPlugin.ID, command = "pressEnter")
private static class PressEnter extends AbstractCommand {
PressEnter(int index, String name, String... args) {
super(index, name, args, VALUE, VALUE, VALUE);
}
protected Result executeImpl(Context context, String... curArgs) {
String locator = curArgs[0];
WebDriver driver = context.getWrappedDriver();
WebElement element = context.getElementFinder().findElement(driver, locator);
element.sendKeys(Keys.ENTER);
return new Success("ok");
}
}
/**
* Load new commands at runtime.
*
* Return null if command was not found.
*
* @author doerges
*/
public ICommand newCommand(int index, String name, String... args) {
LoggerUtils.quote("Called newCommand for " + name);
if (name.contentEquals("loadUserCredentials")) {
return new LoadUserCredentials(index, name, args);
}
if (name.contentEquals("downloadFileFromUrl")) {
return new DownloadFileFromUrl(index, name, args);
}
if (name.contentEquals("downloadFileFromUrlToDisk")) {
return new DownloadFileFromUrlToDisk(index, name, args);
}
if (name.contentEquals("downloadFileByClick")) {
return new DownloadFileByClick(index, name, args);
}
if (name.contentEquals("checkUrl")) {
return new CheckUrl(index, name, args);
}
if (name.contentEquals("downloadFile")) {
return new DownloadFile(index, name, args);
}
if (name.contentEquals("downloadFileByClickFromBrowser")) {
return new DownloadFileByClickFromBrowser(index, name, args);
}
if (name.contentEquals("takeScreenshot")) {
return new TakeScreenshot(index, name, args);
}
if (name.contentEquals("resize")) {
return new ResizeWindow(index, name, args);
}
if (name.contentEquals("close")) {
return new Close(index, name, args);
}
if (name.contentEquals("clearCookies")) {
return new ClearCookies(index, name, args);
}
if (name.contentEquals("pressEnter")) {
return new PressEnter(index, name, args);
}
return null;
}
}
| 34.155251 | 135 | 0.698997 |
5a0236e06d02c6b982c67739fe94a01b279aa43b | 1,405 | package jwk.minecraft.garden.timer;
import static com.google.common.base.Preconditions.checkNotNull;
import java.util.Iterator;
import java.util.List;
import java.util.Queue;
import javax.annotation.Nonnull;
import com.google.common.collect.Lists;
import com.google.common.collect.Queues;
import cpw.mods.fml.relauncher.Side;
public class SidedTickListenerList {
private final Queue<ITickListener> ListenerList = Queues.newConcurrentLinkedQueue();
private final Side Side;
public SidedTickListenerList(@Nonnull Side side) {
Side = checkNotNull(side);
}
public ITickListener addListener(@Nonnull ITickListener listener) {
if (ListenerList.add(listener))
return listener;
return null;
}
public ITickListener removeListener(@Nonnull ITickListener listener) {
if (ListenerList.remove(listener))
return listener;
return null;
}
public boolean isScheduled(@Nonnull ITickListener listener) {
return ListenerList.contains(listener);
}
public void updateListeners() {
for (ITickListener listener : ListenerList)
listener.onUpdate();
}
public void removeAll() {
for (int i=0; i < ListenerList.size(); i++)
ListenerList.remove(i);
}
public Iterator<ITickListener> iterator() { return ListenerList.iterator(); }
public List<ITickListener> getListenerList() { return Lists.<ITickListener>newArrayList(ListenerList.iterator()); }
}
| 22.66129 | 116 | 0.753025 |
14c9a138b2d800357606d8f9dbc10bd315efb257 | 7,295 | package io.kubernetes.client.extended.generic;
import static com.github.tomakehurst.wiremock.client.WireMock.*;
import static org.junit.Assert.*;
import com.github.tomakehurst.wiremock.junit.WireMockRule;
import com.google.gson.Gson;
import io.kubernetes.client.custom.V1Patch;
import io.kubernetes.client.openapi.ApiClient;
import io.kubernetes.client.openapi.models.*;
import io.kubernetes.client.util.ClientBuilder;
import java.io.IOException;
import java.net.SocketTimeoutException;
import java.util.concurrent.TimeUnit;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
public class GenericKubernetesApiTest {
@Rule public WireMockRule wireMockRule = new WireMockRule(8181);
private GenericKubernetesApi<V1Job, V1JobList> jobClient;
@Before
public void setup() throws IOException {
ApiClient apiClient = new ClientBuilder().setBasePath("http://localhost:" + 8181).build();
jobClient =
new GenericKubernetesApi<>(V1Job.class, V1JobList.class, "batch", "v1", "jobs", apiClient);
}
// test delete
@Test
public void deleteNamespacedJobReturningStatus() {
V1Status status = new V1Status().kind("Status").code(200).message("good!");
stubFor(
delete(urlEqualTo("/apis/batch/v1/namespaces/default/jobs/foo1"))
.willReturn(aResponse().withStatus(200).withBody(new Gson().toJson(status))));
KubernetesApiResponse<V1Job> deleteJobResp = jobClient.delete("default", "foo1", null);
assertTrue(deleteJobResp.isSuccess());
assertEquals(status, deleteJobResp.getStatus());
assertNull(deleteJobResp.getObject());
verify(1, deleteRequestedFor(urlPathEqualTo("/apis/batch/v1/namespaces/default/jobs/foo1")));
}
@Test
public void deleteNamespacedJobReturningDeletedObject() {
V1Job foo1 =
new V1Job().kind("Job").metadata(new V1ObjectMeta().namespace("default").name("foo1"));
stubFor(
delete(urlEqualTo("/apis/batch/v1/namespaces/default/jobs/foo1"))
.willReturn(aResponse().withStatus(200).withBody(new Gson().toJson(foo1))));
KubernetesApiResponse<V1Job> deleteJobResp = jobClient.delete("default", "foo1");
assertTrue(deleteJobResp.isSuccess());
assertEquals(foo1, deleteJobResp.getObject());
assertNull(deleteJobResp.getStatus());
verify(1, deleteRequestedFor(urlPathEqualTo("/apis/batch/v1/namespaces/default/jobs/foo1")));
}
@Test
public void deleteNamespacedJobReturningForbiddenStatus() {
V1Status status = new V1Status().kind("Status").code(403).message("good!");
stubFor(
delete(urlEqualTo("/apis/batch/v1/namespaces/default/jobs/foo1"))
.willReturn(aResponse().withStatus(403).withBody(new Gson().toJson(status))));
KubernetesApiResponse<V1Job> deleteJobResp = jobClient.delete("default", "foo1");
assertFalse(deleteJobResp.isSuccess());
assertEquals(status, deleteJobResp.getStatus());
assertNull(deleteJobResp.getObject());
verify(1, deleteRequestedFor(urlPathEqualTo("/apis/batch/v1/namespaces/default/jobs/foo1")));
}
@Test
public void listNamespacedJobReturningObject() {
V1JobList jobList = new V1JobList().kind("JobList").metadata(new V1ListMeta());
stubFor(
get(urlPathEqualTo("/apis/batch/v1/namespaces/default/jobs"))
.willReturn(aResponse().withStatus(200).withBody(new Gson().toJson(jobList))));
KubernetesApiResponse<V1JobList> jobListResp = jobClient.list("default");
assertTrue(jobListResp.isSuccess());
assertEquals(jobList, jobListResp.getObject());
assertNull(jobListResp.getStatus());
verify(1, getRequestedFor(urlPathEqualTo("/apis/batch/v1/namespaces/default/jobs")));
}
@Test
public void listClusterJobReturningObject() {
V1JobList jobList = new V1JobList().kind("JobList").metadata(new V1ListMeta());
stubFor(
get(urlPathEqualTo("/apis/batch/v1/jobs"))
.willReturn(aResponse().withStatus(200).withBody(new Gson().toJson(jobList))));
KubernetesApiResponse<V1JobList> jobListResp = jobClient.list();
assertTrue(jobListResp.isSuccess());
assertEquals(jobList, jobListResp.getObject());
assertNull(jobListResp.getStatus());
verify(1, getRequestedFor(urlPathEqualTo("/apis/batch/v1/jobs")));
}
@Test
public void createNamespacedJobReturningObject() {
V1Job foo1 =
new V1Job().kind("Job").metadata(new V1ObjectMeta().namespace("default").name("foo1"));
stubFor(
post(urlEqualTo("/apis/batch/v1/namespaces/default/jobs"))
.willReturn(aResponse().withStatus(200).withBody(new Gson().toJson(foo1))));
KubernetesApiResponse<V1Job> jobListResp = jobClient.create(foo1);
assertTrue(jobListResp.isSuccess());
assertEquals(foo1, jobListResp.getObject());
assertNull(jobListResp.getStatus());
verify(1, postRequestedFor(urlPathEqualTo("/apis/batch/v1/namespaces/default/jobs")));
}
@Test
public void updateNamespacedJobReturningObject() {
V1Job foo1 =
new V1Job().kind("Job").metadata(new V1ObjectMeta().namespace("default").name("foo1"));
stubFor(
put(urlEqualTo("/apis/batch/v1/namespaces/default/jobs/foo1"))
.willReturn(aResponse().withStatus(200).withBody(new Gson().toJson(foo1))));
KubernetesApiResponse<V1Job> jobListResp = jobClient.update(foo1);
assertTrue(jobListResp.isSuccess());
assertEquals(foo1, jobListResp.getObject());
assertNull(jobListResp.getStatus());
verify(1, putRequestedFor(urlPathEqualTo("/apis/batch/v1/namespaces/default/jobs/foo1")));
}
@Test
public void patchNamespacedJobReturningObject() {
V1Patch v1Patch = new V1Patch("{}");
V1Job foo1 =
new V1Job().kind("Job").metadata(new V1ObjectMeta().namespace("default").name("foo1"));
stubFor(
patch(urlEqualTo("/apis/batch/v1/namespaces/default/jobs/foo1"))
.withHeader("Content-Type", containing(V1Patch.PATCH_FORMAT_STRATEGIC_MERGE_PATCH))
.willReturn(aResponse().withStatus(200).withBody(new Gson().toJson(foo1))));
KubernetesApiResponse<V1Job> jobPatchResp =
jobClient.patch("default", "foo1", V1Patch.PATCH_FORMAT_STRATEGIC_MERGE_PATCH, v1Patch);
assertTrue(jobPatchResp.isSuccess());
assertEquals(foo1, jobPatchResp.getObject());
assertNull(jobPatchResp.getStatus());
verify(1, patchRequestedFor(urlPathEqualTo("/apis/batch/v1/namespaces/default/jobs/foo1")));
}
@Test
public void testReadTimeoutShouldThrowException() {
ApiClient apiClient = new ClientBuilder().setBasePath("http://localhost:" + 8181).build();
apiClient.setHttpClient(
apiClient
.getHttpClient()
.newBuilder()
.readTimeout(1, TimeUnit.MILLISECONDS) // timeout everytime
.build());
stubFor(
get(urlEqualTo("/apis/batch/v1/namespaces/foo/jobs/test"))
.willReturn(aResponse().withFixedDelay(99999).withStatus(200).withBody("")));
jobClient =
new GenericKubernetesApi<>(V1Job.class, V1JobList.class, "batch", "v1", "jobs", apiClient);
try {
KubernetesApiResponse<V1Job> response = jobClient.get("foo", "test");
} catch (Throwable t) {
assertTrue(t.getCause() instanceof SocketTimeoutException);
return;
}
fail("no exception happened");
}
}
| 41.214689 | 99 | 0.710624 |
5c284e4abe0eba6bf24bbf1c6e439dfae016e845 | 1,050 | package net.mostlyoriginal.api.utils.pooling;
import com.artemis.utils.reflect.ClassReflection;
import com.artemis.utils.reflect.Constructor;
import com.artemis.utils.reflect.ReflectionException;
/**
* Object Pool that creats objects through Reflection mechanism.
*
* @author Namek
* @see ObjectPool
*/
public class ReflectionPool<T> extends ObjectPool<T> {
private final Constructor constructor;
public ReflectionPool(Class<T> type) {
super(type);
try {
constructor = ClassReflection.getConstructor(type);
constructor.setAccessible(true);
}
catch (ReflectionException e) {
String error = "Couldn't find parameterless public constructor for given class type " + type.getName();
throw new RuntimeException(error, e);
}
}
@SuppressWarnings("unchecked")
@Override
protected T instantiateObject() {
try {
return (T) constructor.newInstance();
}
catch (ReflectionException e) {
String error = "Couldn't instantiate object of type " + type.getName();
throw new RuntimeException(error, e);
}
}
}
| 25 | 106 | 0.734286 |
fb4b7f69d559aaa8881015451b45f8c6a8933a9b | 4,807 | package uk.ac.ebi.fg.utils.objects;
/*
* Copyright 2009-2014 European Molecular Biology Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
import uk.ac.ebi.fg.utils.ReceivingType;
/**
* Experiment object containing all required fields for PubMed and ontology similarity calculations
*/
public class ExperimentId implements Comparable, Cloneable
{
private String myAccession;
private String mySpecies = "";
private int assayCount = 0; // hybridisations
private int myOWLDist = Integer.MAX_VALUE;
private int myPubMedDist = Integer.MAX_VALUE;
private int numbOfMatches = 0;
private int dist0Count = 0;
private int dist1Count = 0;
private int dist2Count = 0;
private int lowPriorityMatchCount = 0;
private float calculatedDistance = 0;
public ExperimentId clone()
{
try {
return (ExperimentId) super.clone();
} catch (CloneNotSupportedException e) {
System.out.println("Cloning not allowed"); // todo: check error
return null;
}
}
public ExperimentId( String id, ReceivingType rType, String species, int assayCount )
{
this.myAccession = id;
this.mySpecies = species;
if (rType.equals(ReceivingType.PUBMED))
myPubMedDist = Integer.MAX_VALUE - 1;
else if (rType.equals(ReceivingType.OWL))
myOWLDist = Integer.MAX_VALUE - 1;
this.assayCount = assayCount;
}
public String getSpecies()
{
return mySpecies;
}
public ExperimentId( String id, ReceivingType rType, int dist )
{
this.myAccession = id;
if (rType.equals(ReceivingType.PUBMED))
myPubMedDist = dist;
else if (rType.equals(ReceivingType.OWL))
myOWLDist = dist;
}
public String getAccession()
{
return myAccession;
}
public ReceivingType getType()
{
if (myOWLDist != Integer.MAX_VALUE) {
if (myPubMedDist != Integer.MAX_VALUE)
return ReceivingType.PUBMED_AND_OWL;
else
return ReceivingType.OWL;
} else if (myPubMedDist != Integer.MAX_VALUE)
return ReceivingType.PUBMED;
return ReceivingType.NONE;
}
public int getPubMedDistance()
{
return myPubMedDist;
}
public int getOWLDistance()
{
return myOWLDist;
}
public void setNumbOfMatches( int amount )
{
numbOfMatches = amount;
}
public void setDist0Count( int count )
{
dist0Count = count;
}
public void setDist1Count( int count )
{
dist1Count = count;
}
public void setDist2Count( int count )
{
dist2Count = count;
}
public void setLowPriorityMatchCount( int count )
{
lowPriorityMatchCount = count;
}
public void setCalculatedDistance( float distance )
{
calculatedDistance = distance;
}
public void setAssayCount( int count )
{
assayCount = count;
}
public int getNumbOfMatches()
{
return numbOfMatches;
}
public int getDist0Count()
{
return dist0Count;
}
public int getDist1Count()
{
return dist1Count;
}
public int getDist2Count()
{
return dist2Count;
}
public int getLowPriorityMatchCount()
{
return lowPriorityMatchCount;
}
public float getCalculatedDistance()
{
return calculatedDistance;
}
public int getAssayCount()
{
return assayCount;
}
@Override
public int hashCode()
{
return myAccession.hashCode();
}
@Override
public boolean equals( Object other )
{
if (this == other) return true;
if (null == other || getClass() != other.getClass()) return false;
ExperimentId a = (ExperimentId) other;
if (myAccession.equals(a.myAccession))
return true;
return false;
}
public int compareTo( Object other )
{
ExperimentId a = (ExperimentId) other;
return myAccession.compareTo(a.myAccession);
}
public String toString()
{
return "[" + myAccession + ", " + getType() + "] ";
}
}
| 23.915423 | 99 | 0.618265 |
6ec5e591af1f1e8aaccf82b72d8ecf9bed1b3bcc | 6,051 | /*
* BuildBattle - Ultimate building competition minigame
* Copyright (C) 2019 Plajer's Lair - maintained by Tigerpanzer_02, Plajer and contributors
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package pl.plajer.buildbattle.menus.options.registry.particles;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
import org.bukkit.Bukkit;
import org.bukkit.Material;
import org.bukkit.Particle;
import org.bukkit.configuration.file.FileConfiguration;
import org.bukkit.inventory.Inventory;
import org.bukkit.inventory.ItemStack;
import pl.plajer.buildbattle.Main;
import pl.plajer.buildbattle.menus.options.OptionsRegistry;
import pl.plajer.buildbattle.utils.Debugger;
import pl.plajer.buildbattle.utils.Utils;
import pl.plajerlair.commonsbox.minecraft.compat.XMaterial;
import pl.plajerlair.commonsbox.minecraft.configuration.ConfigUtils;
import pl.plajerlair.commonsbox.minecraft.item.ItemBuilder;
/**
* @author Plajer
* <p>
* Created at 23.12.2018
*/
public class ParticleRegistry {
private Inventory inventory;
private List<String> blackListedParticles = Arrays.asList("BLOCK_CRACK", "ITEM_CRACK", "ITEM_TAKE", "BLOCK_DUST", "MOB_APPEARANCE", "FOOTSTEP", "REDSTONE");
private Set<ParticleItem> registeredParticles = new HashSet<>();
private Main plugin;
public ParticleRegistry(OptionsRegistry registry) {
this.plugin = registry.getPlugin();
updateParticlesFile();
registerParticles();
registerInventory();
}
private void registerParticles() {
FileConfiguration config = ConfigUtils.getConfig(plugin, "particles");
Debugger.debug(Debugger.Level.TASK, "Registering particles!");
int i = 0;
for (Particle particle : Particle.values()) {
if (i >= 52) {
Debugger.debug(Debugger.Level.WARN, "There are too many particles to register! Menu can't hold any more!");
break;
}
boolean blacklisted = false;
for (String blackList : blackListedParticles) {
if (particle.name().contains(blackList)) {
blacklisted = true;
break;
}
}
if (config.getBoolean(particle.toString() + ".disabled", false)){
blacklisted = true;
}
if (blacklisted) {
continue;
}
ParticleItem particleItem = new ParticleItem();
particleItem.setItemStack(new ItemBuilder(XMaterial.fromString(config
.getString(particle.toString() + ".material-name").toUpperCase()).parseItem())
.name(plugin.getChatManager().colorRawMessage(config.getString(particle.toString() + ".displayname")))
.lore(config.getStringList(particle.toString() + ".lore")
.stream().map(lore -> lore = plugin.getChatManager().colorRawMessage(lore)).collect(Collectors.toList()))
.build());
particleItem.setPermission(config.getString(particle.toString() + ".permission"));
particleItem.setEffect(particle);
registeredParticles.add(particleItem);
i++;
}
Debugger.debug(Debugger.Level.INFO, "Registered in total " + i + " particles!");
ConfigUtils.saveConfig(plugin, config, "particles");
}
private void updateParticlesFile() {
FileConfiguration config = ConfigUtils.getConfig(plugin, "particles");
for (Particle particle : Particle.values()) {
if (!config.isSet(particle.toString())) {
config.set(particle.toString() + ".displayname", "&6" + particle.toString());
config.set(particle.toString() + ".lore", Arrays.asList("&7Click to activate", "&7on your location"));
config.set(particle.toString() + ".material-name", Material.PAPER.name());
config.set(particle.toString() + ".permission", "particles.VIP");
continue;
}
if (!config.isSet(particle.toString() + ".material-name")) {
config.set(particle.toString() + ".material-name", Material.PAPER.name());
Debugger.debug(Debugger.Level.WARN, "Found outdated item in particles.yml! We've converted it to the newest version!");
}
}
ConfigUtils.saveConfig(plugin, config, "particles");
}
private void registerInventory() {
Inventory inv = Bukkit.createInventory(null, Utils.serializeInt(registeredParticles.size() + 1),
plugin.getChatManager().colorMessage("Menus.Option-Menu.Items.Particle.Inventory-Name"));
for (ParticleItem item : registeredParticles) {
inv.addItem(item.getItemStack());
}
inv.setItem(Utils.serializeInt(registeredParticles.size()) - 1, new ItemBuilder(new ItemStack(Material.REDSTONE_BLOCK))
.name(plugin.getChatManager().colorMessage("Menus.Option-Menu.Items.Particle.In-Inventory-Item-Name"))
.lore(Collections.singletonList(plugin.getChatManager().colorMessage("Menus.Option-Menu.Items.Particle.In-Inventory-Item-Lore")))
.build());
inv.addItem(Utils.getGoBackItem());
inventory = inv;
}
public Inventory getInventory() {
return inventory;
}
public void setInventory(Inventory inventory) {
this.inventory = inventory;
}
@Nullable
public ParticleItem getItemByEffect(Particle effect) {
for (ParticleItem item : registeredParticles) {
if (item.getEffect() == effect) {
return item;
}
}
return null;
}
public Set<ParticleItem> getRegisteredParticles() {
return registeredParticles;
}
}
| 38.788462 | 158 | 0.705668 |
1a2a0e9795e5d85d725c8efe6665af4aace25976 | 1,490 | package com.mmaitre314;
import org.apache.spark.sql.*;
import org.apache.spark.sql.streaming.*;
import org.apache.spark.sql.types.*;
public class Main {
public static void main(String[] args) throws Exception {
System.out.printf("Current folder: %s\n", System.getProperty("user.dir"));
SparkSession spark = SparkSession.builder()
.appName("SparkStructuredStreamingDemo")
.master("local[2]")
.getOrCreate();
Dataset<Row> stream = spark.readStream()
.schema(new StructType()
.add("Id", DataTypes.IntegerType)
.add("Count", DataTypes.IntegerType))
.csv("data\\input\\stream");
Dataset<Row> reference = spark.read()
.schema(new StructType()
.add("Id", DataTypes.IntegerType)
.add("Name", DataTypes.StringType))
.csv("data\\input\\reference.csv");
Dataset<Row> output = setupProcessing(spark, stream, reference);
StreamingQuery query = output.writeStream()
.format("csv")
.outputMode(OutputMode.Append())
.option("path", "data\\output")
.option("checkpointLocation", "data\\checkpoint")
.start();
query.awaitTermination();
}
public static Dataset<Row> setupProcessing(SparkSession spark, Dataset<Row> stream, Dataset<Row> reference) {
return stream.join(reference, "Id");
}
}
| 32.391304 | 113 | 0.587919 |
3d5c5df848df24b83c19f6d2b711790430d5152a | 831 | /*******************************************************************************
* Copyright (c) 2017, 2018 Pegasystems Inc. All rights reserved.
*
* Contributors:
* Manu Varghese
*******************************************************************************/
package com.pega.gcs.tracerviewer.model;
import java.awt.Color;
import org.dom4j.Element;
import com.pega.gcs.fringecommon.guiutilities.MyColor;
public class TraceEventAsynchronousActivity extends TraceEvent {
public TraceEventAsynchronousActivity(TraceEventKey traceEventKey, byte[] bytes, Element traceEventElement) {
super(traceEventKey, bytes, traceEventElement);
traceEventType = TraceEventType.ASYNCHRONOUS_ACTIVITY;
}
@Override
public Color getBaseColumnBackground() {
return MyColor.LIGHT_GREEN;
}
}
| 27.7 | 113 | 0.602888 |
e45601a5e989d9c145298e0bfa8593706046800a | 252 | package com.gls.starter.data.jpa.base;
import org.springframework.data.repository.NoRepositoryBean;
/**
* @author george
*/
@NoRepositoryBean
public interface BaseEntityRepository<Entity extends BaseEntity> extends BaseRepository<Entity, Long> {
}
| 22.909091 | 103 | 0.801587 |
81062dbe562573b166523bf6bcbc0aa81f441749 | 1,239 | package com.mkyong.java11.jep329.chacha20;
import javax.crypto.Cipher;
import javax.crypto.SecretKey;
import javax.crypto.spec.ChaCha20ParameterSpec;
/*
The inputs to ChaCha20 encryption, specified by RFC 7539, are:
- A 256-bit secret key (32 bytes)
- A 96-bit nonce (12 bytes)
- A 32-bit initial count (4 bytes)
abstract class ChaCha20Cipher extends CipherSpi
*/
public class ChaCha20 {
private static final String ENCRYPT_ALGO = "ChaCha20";
public byte[] encrypt(byte[] pText, SecretKey key, byte[] nonce, int counter) throws Exception {
Cipher cipher = Cipher.getInstance(ENCRYPT_ALGO);
ChaCha20ParameterSpec param = new ChaCha20ParameterSpec(nonce, counter);
cipher.init(Cipher.ENCRYPT_MODE, key, param);
byte[] encryptedText = cipher.doFinal(pText);
return encryptedText;
}
public byte[] decrypt(byte[] cText, SecretKey key, byte[] nonce, int counter) throws Exception {
Cipher cipher = Cipher.getInstance(ENCRYPT_ALGO);
ChaCha20ParameterSpec param = new ChaCha20ParameterSpec(nonce, counter);
cipher.init(Cipher.DECRYPT_MODE, key, param);
byte[] decryptedText = cipher.doFinal(cText);
return decryptedText;
}
}
| 26.361702 | 100 | 0.708636 |
4d4e484a8a8bb65a9d69677ac3bf59ebcfe6146d | 5,184 | package com.joeracosta.library;
import android.content.Context;
import android.os.Bundle;
import android.os.Parcelable;
import android.util.AttributeSet;
import android.view.View;
import android.widget.FrameLayout;
import com.joeracosta.library.Map.NoViewIDException;
/**
* Created by Joe on 1/4/2016.
* Abstract Class that adds convenience methods to a View. A Screen should be though of as one Screen
* on the device. It can contain one view. It can contain many custom views. The implementation
* of this class should contain a ViewFactory so it can be properly added to a ViewStack or ViewMap.
*/
public abstract class Screen extends FrameLayout {
private static String INSTANCE_STATE = "com.joeracosta.screen.instanceState";
private boolean mRestored;
private Bundle mPassedData;
public Screen(Context context, AttributeSet attrs) {
super(context, attrs);
setId(getViewId());
}
/**
* Call this when you want to give your Screen some data
* @param dataToPass the Bundle of data you're passing over
*/
public void setPassedData(Bundle dataToPass){
mPassedData = dataToPass;
}
/**
* Call this to get the data that was passed to this Screen. Once you call this
* the data is cleared. In other words, two consecutive calls to this will yield the appropriate
* bundle and null respectively.
* @return the data that was passed to this Screen
*/
public Bundle getPassedData(){
Bundle tempData = mPassedData;
mPassedData = null;
return tempData;
}
protected boolean isRestored(){
return mRestored;
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
if (getId() == View.NO_ID){
try {
throw new NoViewIDException("Your Screen must set a unique ID, returned in the getViewID() method");
} catch (NoViewIDException e) {
e.printStackTrace();
}
}
onScreenAttached();
}
// Note: This won't be called when we push the next View onto the stack because this View is
// kept in the container's view hierarchy. It's visibility is just set to gone.
@Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
onScreenGone();
onScreenDetached();
}
/**
* Called when a screen is attached to the window.
*/
protected void onScreenAttached(){
}
/**
* Must return a unique view id. This is the id that the Screen's view will be set to as soon as it's created.
* Best practice would be to define your screen IDs in your values resource folder and use those.
* @return a unique id for this Screen
*/
public abstract int getViewId();
/**
* Called when a Screen is detached from the window. This won't necessarily be called
* just because a Screen is no longer visible to the user.
*/
protected void onScreenDetached(){
}
// Note: These instance state saving methods will only be called if the view has an id.
@Override
protected Parcelable onSaveInstanceState() {
Bundle bundle = new Bundle();
bundle.putParcelable(INSTANCE_STATE, super.onSaveInstanceState());
return onSaveState(bundle);
}
@Override
protected void onRestoreInstanceState(Parcelable savedInstanceState) {
if (savedInstanceState instanceof Bundle) {
Bundle bundle = (Bundle) savedInstanceState;
mRestored = true;
onRestoreState(bundle);
savedInstanceState = bundle.getParcelable(INSTANCE_STATE);
}
super.onRestoreInstanceState(savedInstanceState);
}
/**
* Called when screen is destroyed and is saving state
* @param bundle the bundle to return. Add what you want to this before returning it back.
* @return
*/
protected Bundle onSaveState(Bundle bundle){
return bundle;
}
/**
* Called when screen is restored with data
* @param bundle The bundle with saved stuff. Grab your stuff from this bundle.
*/
protected void onRestoreState(Bundle bundle){
}
@Override
protected void onVisibilityChanged(View changedView, int visibility) {
switch (visibility){
case VISIBLE:
if (changedView == this || getVisibility() == VISIBLE) {
onScreenVisible();
}
break;
case GONE:
if (changedView == this || getVisibility() != GONE) {
onScreenGone();
}
break;
}
super.onVisibilityChanged(changedView, visibility);
}
/**
* Called when screen becomes visible on screen. The screen might have been created or brought
* back into view
*/
protected void onScreenVisible(){
}
/**
* Called when screen is gone from view or detached. The screen still might exist and be attached, but
* it is not visible
*/
protected void onScreenGone(){
mPassedData = null;
}
}
| 30.857143 | 116 | 0.641975 |
a9ab8b1c76619f38ba3acb96612e8f2ada1fe053 | 2,155 | package gq.baijie.cardgame.client.android.ui.view;
import android.content.Context;
import android.support.annotation.DrawableRes;
import android.support.percent.PercentFrameLayout;
import android.support.percent.PercentLayoutHelper;
import android.support.v7.widget.AppCompatImageView;
import android.view.Gravity;
import gq.baijie.cardgame.client.android.R;
import gq.baijie.cardgame.domain.entity.Card;
public class AndroidCardView extends PercentFrameLayout {
private final AppCompatImageView contentView;
private final Card card;
private boolean open;
public AndroidCardView(Context context, Card card, boolean open) {
super(context);
contentView = new AppCompatImageView(context);
this.card = card;
this.open = open;
init();
}
private void init() {
contentView.setBackgroundResource(R.drawable.card_background);
updateContentView();
addView(contentView, 0, 0);
final LayoutParams layoutParams = (LayoutParams) contentView.getLayoutParams();
layoutParams.gravity = Gravity.CENTER_HORIZONTAL;
final PercentLayoutHelper.PercentLayoutInfo layoutInfo = layoutParams.getPercentLayoutInfo();
// https://en.wikipedia.org/wiki/Standard_52-card_deck
layoutInfo.aspectRatio = 0.71428571428571428571428571428571f;// 2.5 / 3.5
layoutInfo.widthPercent = 1f;
}
public Card getCard() {
return card;
}
public boolean isOpen() {
return open;
}
public void setOpen(boolean open) {
if (this.open == open) {
return;
}
this.open = open;
updateContentView();
}
private void updateContentView() {
if (open) {
contentView.setImageResource(toDrawableRes(card));
} else {
contentView.setImageResource(R.drawable.card_back);
}
}
@DrawableRes
private int toDrawableRes(Card card) {
return getResources()
.getIdentifier(toDrawableResName(card), "drawable", getContext().getPackageName());
}
/**
* <string>Note</string>: not support Joker cards
*/
private static String toDrawableResName(Card card) {
return "card_" + card.getSuit().name().toLowerCase() + "_" + card.getRank().getId();
}
}
| 26.604938 | 97 | 0.721578 |
0ac3b5de73173f3c492b90a740a9869baa3ea6ec | 5,128 | package patrick.pramedia.wire.adapter;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.Intent;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.CompoundButton;
import android.widget.ImageView;
import android.widget.Switch;
import android.widget.TextView;
import android.widget.Toast;
import com.android.volley.AuthFailureError;
import com.android.volley.Request;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.android.volley.toolbox.StringRequest;
import com.android.volley.toolbox.Volley;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import patrick.pramedia.wire.ActivityLogin;
import patrick.pramedia.wire.MainActivity;
import patrick.pramedia.wire.R;
import patrick.pramedia.wire.entitas.EntitasDevice;
import patrick.pramedia.wire.entitas.RemoveDevice;
import patrick.pramedia.wire.modul.Globals;
import patrick.pramedia.wire.modul.SecurityAES128CBC;
import patrick.pramedia.wire.modul.VarGlobals;
/**
* Created by PRA on 5/11/2019.
*/
public class AdapterRemoveDevice extends RecyclerView.Adapter<AdapterRemoveDevice.MyViewHolder> {
private List<RemoveDevice> list;
private Globals g = new Globals();
private Context context;
public class MyViewHolder extends RecyclerView.ViewHolder {
private View view;
private TextView tv_nama;
private ImageView imgremove;
public MyViewHolder(View view) {
super(view);
this.view = view;
tv_nama = (TextView) view.findViewById(R.id.tv_namaDevice);
imgremove = (ImageView) view.findViewById(R.id.imgremove);
}
}
public AdapterRemoveDevice(List<RemoveDevice> listData, Context context) {
this.list = listData;
this.context = context;
}
@Override
public MyViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View itemView = LayoutInflater.from(parent.getContext()).inflate(R.layout.row_remove_device, parent, false);
return new MyViewHolder(itemView);
}
@Override
public void onBindViewHolder(final MyViewHolder holder, final int position) {
final RemoveDevice obj = list.get(position);
holder.tv_nama.setText(obj.getDevice_name());
holder.imgremove.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
deleteitem(g.getServer()+"removeItemGroup.php", obj.getGroup_id(), obj.getDevice_id(), position);
}
});
}
@Override
public int getItemCount() {
return list.size();
}
private void deleteitem(String url, final String group_id, final String device_id, final int posisi){
final ProgressDialog progress = new ProgressDialog(context);
progress.setMessage("Loading ID..."); // Setting Message
//progress.setTitle("ProgressDialog"); // Setting Title
progress.setProgressStyle(ProgressDialog.STYLE_SPINNER); // Progress Dialog Style Spinner
progress.show(); // Display Progress Dialog
progress.setCancelable(false);
StringRequest stringRequest = new StringRequest(Request.Method.POST, url,
new Response.Listener<String>() {
@Override
public void onResponse(String response) {
try {
progress.dismiss();
Toast.makeText(context, response, Toast.LENGTH_SHORT).show();
list.remove(posisi);
notifyItemRemoved(posisi);
notifyItemRangeChanged(posisi,list.size());
}catch (Exception e){
Toast.makeText(context, e.getMessage(), Toast.LENGTH_SHORT).show();
progress.dismiss();
}
}
},
new Response.ErrorListener() {
@Override
public void onErrorResponse(VolleyError error) {
Toast.makeText(context, "The server unreachable", Toast.LENGTH_SHORT).show();
progress.dismiss();
}
})
{
@Override
protected Map<String, String> getParams() throws AuthFailureError {
Map<String, String> params = new HashMap<>();
//Adding parameters to request
params.put("grouping_id", group_id);
params.put("device_id", device_id);
//returning parameter
return params;
}
};
//Adding the string request to the queue
Volley.newRequestQueue(context).add(stringRequest);
}
}
| 37.430657 | 117 | 0.619735 |
1e41f3d23f8a2daf48ca2284299f047f20e49a0f | 19,971 | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/assuredworkloads/v1/assuredworkloads.proto
package com.google.cloud.assuredworkloads.v1;
public final class AssuredworkloadsProto {
private AssuredworkloadsProto() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(com.google.protobuf.ExtensionRegistryLite) registry);
}
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_assuredworkloads_v1_CreateWorkloadRequest_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_assuredworkloads_v1_CreateWorkloadRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_assuredworkloads_v1_UpdateWorkloadRequest_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_assuredworkloads_v1_UpdateWorkloadRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_assuredworkloads_v1_DeleteWorkloadRequest_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_assuredworkloads_v1_DeleteWorkloadRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_assuredworkloads_v1_GetWorkloadRequest_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_assuredworkloads_v1_GetWorkloadRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_assuredworkloads_v1_ListWorkloadsRequest_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_assuredworkloads_v1_ListWorkloadsRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_assuredworkloads_v1_ListWorkloadsResponse_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_assuredworkloads_v1_ListWorkloadsResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_assuredworkloads_v1_Workload_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_assuredworkloads_v1_Workload_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_assuredworkloads_v1_Workload_ResourceInfo_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_assuredworkloads_v1_Workload_ResourceInfo_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_assuredworkloads_v1_Workload_KMSSettings_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_assuredworkloads_v1_Workload_KMSSettings_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_assuredworkloads_v1_Workload_ResourceSettings_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_assuredworkloads_v1_Workload_ResourceSettings_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_assuredworkloads_v1_Workload_LabelsEntry_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_assuredworkloads_v1_Workload_LabelsEntry_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_assuredworkloads_v1_CreateWorkloadOperationMetadata_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_assuredworkloads_v1_CreateWorkloadOperationMetadata_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n7google/cloud/assuredworkloads/v1/assur" +
"edworkloads.proto\022 google.cloud.assuredw" +
"orkloads.v1\032\034google/api/annotations.prot" +
"o\032\027google/api/client.proto\032\037google/api/f" +
"ield_behavior.proto\032\031google/api/resource" +
".proto\032#google/longrunning/operations.pr" +
"oto\032\036google/protobuf/duration.proto\032\033goo" +
"gle/protobuf/empty.proto\032 google/protobu" +
"f/field_mask.proto\032\037google/protobuf/time" +
"stamp.proto\"\266\001\n\025CreateWorkloadRequest\022@\n" +
"\006parent\030\001 \001(\tB0\340A\002\372A*\022(assuredworkloads." +
"googleapis.com/Workload\022A\n\010workload\030\002 \001(" +
"\0132*.google.cloud.assuredworkloads.v1.Wor" +
"kloadB\003\340A\002\022\030\n\013external_id\030\003 \001(\tB\003\340A\001\"\220\001\n" +
"\025UpdateWorkloadRequest\022A\n\010workload\030\001 \001(\013" +
"2*.google.cloud.assuredworkloads.v1.Work" +
"loadB\003\340A\002\0224\n\013update_mask\030\002 \001(\0132\032.google." +
"protobuf.FieldMaskB\003\340A\002\"j\n\025DeleteWorkloa" +
"dRequest\022>\n\004name\030\001 \001(\tB0\340A\002\372A*\n(assuredw" +
"orkloads.googleapis.com/Workload\022\021\n\004etag" +
"\030\002 \001(\tB\003\340A\001\"T\n\022GetWorkloadRequest\022>\n\004nam" +
"e\030\001 \001(\tB0\340A\002\372A*\n(assuredworkloads.google" +
"apis.com/Workload\"\217\001\n\024ListWorkloadsReque" +
"st\022@\n\006parent\030\001 \001(\tB0\340A\002\372A*\022(assuredworkl" +
"oads.googleapis.com/Workload\022\021\n\tpage_siz" +
"e\030\002 \001(\005\022\022\n\npage_token\030\003 \001(\t\022\016\n\006filter\030\004 " +
"\001(\t\"o\n\025ListWorkloadsResponse\022=\n\tworkload" +
"s\030\001 \003(\0132*.google.cloud.assuredworkloads." +
"v1.Workload\022\027\n\017next_page_token\030\002 \001(\t\"\240\014\n" +
"\010Workload\022\021\n\004name\030\001 \001(\tB\003\340A\001\022\031\n\014display_" +
"name\030\002 \001(\tB\003\340A\002\022O\n\tresources\030\003 \003(\01327.goo" +
"gle.cloud.assuredworkloads.v1.Workload.R" +
"esourceInfoB\003\340A\003\022^\n\021compliance_regime\030\004 " +
"\001(\0162;.google.cloud.assuredworkloads.v1.W" +
"orkload.ComplianceRegimeB\006\340A\002\340A\005\0227\n\013crea" +
"te_time\030\005 \001(\0132\032.google.protobuf.Timestam" +
"pB\006\340A\003\340A\005\022\037\n\017billing_account\030\006 \001(\tB\006\340A\002\340" +
"A\004\022\021\n\004etag\030\t \001(\tB\003\340A\001\022K\n\006labels\030\n \003(\01326." +
"google.cloud.assuredworkloads.v1.Workloa" +
"d.LabelsEntryB\003\340A\001\022)\n\034provisioned_resour" +
"ces_parent\030\r \001(\tB\003\340A\004\022Q\n\014kms_settings\030\016 " +
"\001(\01326.google.cloud.assuredworkloads.v1.W" +
"orkload.KMSSettingsB\003\340A\004\022[\n\021resource_set" +
"tings\030\017 \003(\0132;.google.cloud.assuredworklo" +
"ads.v1.Workload.ResourceSettingsB\003\340A\004\032\357\001" +
"\n\014ResourceInfo\022\023\n\013resource_id\030\001 \001(\003\022[\n\rr" +
"esource_type\030\002 \001(\0162D.google.cloud.assure" +
"dworkloads.v1.Workload.ResourceInfo.Reso" +
"urceType\"m\n\014ResourceType\022\035\n\031RESOURCE_TYP" +
"E_UNSPECIFIED\020\000\022\024\n\020CONSUMER_PROJECT\020\001\022\033\n" +
"\027ENCRYPTION_KEYS_PROJECT\020\002\022\013\n\007KEYRING\020\003\032" +
"\217\001\n\013KMSSettings\022A\n\022next_rotation_time\030\001 " +
"\001(\0132\032.google.protobuf.TimestampB\t\340A\002\340A\004\340" +
"A\005\022=\n\017rotation_period\030\002 \001(\0132\031.google.pro" +
"tobuf.DurationB\t\340A\002\340A\004\340A\005\032\232\001\n\020ResourceSe" +
"ttings\022\023\n\013resource_id\030\001 \001(\t\022[\n\rresource_" +
"type\030\002 \001(\0162D.google.cloud.assuredworkloa" +
"ds.v1.Workload.ResourceInfo.ResourceType" +
"\022\024\n\014display_name\030\003 \001(\t\032-\n\013LabelsEntry\022\013\n" +
"\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"\330\001\n\020Compli" +
"anceRegime\022!\n\035COMPLIANCE_REGIME_UNSPECIF" +
"IED\020\000\022\007\n\003IL4\020\001\022\010\n\004CJIS\020\002\022\020\n\014FEDRAMP_HIGH" +
"\020\003\022\024\n\020FEDRAMP_MODERATE\020\004\022\026\n\022US_REGIONAL_" +
"ACCESS\020\005\022\t\n\005HIPAA\020\006\022\013\n\007HITRUST\020\007\022\032\n\026EU_R" +
"EGIONS_AND_SUPPORT\020\010\022\032\n\026CA_REGIONS_AND_S" +
"UPPORT\020\t:u\352Ar\n(assuredworkloads.googleap" +
"is.com/Workload\022Forganizations/{organiza" +
"tion}/locations/{location}/workloads/{wo" +
"rkload}\"\344\001\n\037CreateWorkloadOperationMetad" +
"ata\0224\n\013create_time\030\001 \001(\0132\032.google.protob" +
"uf.TimestampB\003\340A\001\022\031\n\014display_name\030\002 \001(\tB" +
"\003\340A\001\022\023\n\006parent\030\003 \001(\tB\003\340A\001\022[\n\021compliance_" +
"regime\030\004 \001(\0162;.google.cloud.assuredworkl" +
"oads.v1.Workload.ComplianceRegimeB\003\340A\0012\342" +
"\010\n\027AssuredWorkloadsService\022\357\001\n\016CreateWor" +
"kload\0227.google.cloud.assuredworkloads.v1" +
".CreateWorkloadRequest\032\035.google.longrunn" +
"ing.Operation\"\204\001\202\323\344\223\002>\"2/v1/{parent=orga" +
"nizations/*/locations/*}/workloads:\010work" +
"load\332A\017parent,workload\312A+\n\010Workload\022\037Cre" +
"ateWorkloadOperationMetadata\022\333\001\n\016UpdateW" +
"orkload\0227.google.cloud.assuredworkloads." +
"v1.UpdateWorkloadRequest\032*.google.cloud." +
"assuredworkloads.v1.Workload\"d\202\323\344\223\002G2;/v" +
"1/{workload.name=organizations/*/locatio" +
"ns/*/workloads/*}:\010workload\332A\024workload,u" +
"pdate_mask\022\244\001\n\016DeleteWorkload\0227.google.c" +
"loud.assuredworkloads.v1.DeleteWorkloadR" +
"equest\032\026.google.protobuf.Empty\"A\202\323\344\223\0024*2" +
"/v1/{name=organizations/*/locations/*/wo" +
"rkloads/*}\332A\004name\022\262\001\n\013GetWorkload\0224.goog" +
"le.cloud.assuredworkloads.v1.GetWorkload" +
"Request\032*.google.cloud.assuredworkloads." +
"v1.Workload\"A\202\323\344\223\0024\0222/v1/{name=organizat" +
"ions/*/locations/*/workloads/*}\332A\004name\022\305" +
"\001\n\rListWorkloads\0226.google.cloud.assuredw" +
"orkloads.v1.ListWorkloadsRequest\0327.googl" +
"e.cloud.assuredworkloads.v1.ListWorkload" +
"sResponse\"C\202\323\344\223\0024\0222/v1/{parent=organizat" +
"ions/*/locations/*}/workloads\332A\006parent\032S" +
"\312A\037assuredworkloads.googleapis.com\322A.htt" +
"ps://www.googleapis.com/auth/cloud-platf" +
"ormB\335\002\n$com.google.cloud.assuredworkload" +
"s.v1B\025AssuredworkloadsProtoP\001ZPgoogle.go" +
"lang.org/genproto/googleapis/cloud/assur" +
"edworkloads/v1;assuredworkloads\252\002 Google" +
".Cloud.AssuredWorkloads.V1\312\002 Google\\Clou" +
"d\\AssuredWorkloads\\V1\352\002#Google::Cloud::A" +
"ssuredWorkloads::V1\352A]\n(assuredworkloads" +
".googleapis.com/Location\0221organizations/" +
"{organization}/locations/{location}b\006pro" +
"to3"
};
descriptor = com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.AnnotationsProto.getDescriptor(),
com.google.api.ClientProto.getDescriptor(),
com.google.api.FieldBehaviorProto.getDescriptor(),
com.google.api.ResourceProto.getDescriptor(),
com.google.longrunning.OperationsProto.getDescriptor(),
com.google.protobuf.DurationProto.getDescriptor(),
com.google.protobuf.EmptyProto.getDescriptor(),
com.google.protobuf.FieldMaskProto.getDescriptor(),
com.google.protobuf.TimestampProto.getDescriptor(),
});
internal_static_google_cloud_assuredworkloads_v1_CreateWorkloadRequest_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_google_cloud_assuredworkloads_v1_CreateWorkloadRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_assuredworkloads_v1_CreateWorkloadRequest_descriptor,
new java.lang.String[] { "Parent", "Workload", "ExternalId", });
internal_static_google_cloud_assuredworkloads_v1_UpdateWorkloadRequest_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_google_cloud_assuredworkloads_v1_UpdateWorkloadRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_assuredworkloads_v1_UpdateWorkloadRequest_descriptor,
new java.lang.String[] { "Workload", "UpdateMask", });
internal_static_google_cloud_assuredworkloads_v1_DeleteWorkloadRequest_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_google_cloud_assuredworkloads_v1_DeleteWorkloadRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_assuredworkloads_v1_DeleteWorkloadRequest_descriptor,
new java.lang.String[] { "Name", "Etag", });
internal_static_google_cloud_assuredworkloads_v1_GetWorkloadRequest_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_google_cloud_assuredworkloads_v1_GetWorkloadRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_assuredworkloads_v1_GetWorkloadRequest_descriptor,
new java.lang.String[] { "Name", });
internal_static_google_cloud_assuredworkloads_v1_ListWorkloadsRequest_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_google_cloud_assuredworkloads_v1_ListWorkloadsRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_assuredworkloads_v1_ListWorkloadsRequest_descriptor,
new java.lang.String[] { "Parent", "PageSize", "PageToken", "Filter", });
internal_static_google_cloud_assuredworkloads_v1_ListWorkloadsResponse_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_google_cloud_assuredworkloads_v1_ListWorkloadsResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_assuredworkloads_v1_ListWorkloadsResponse_descriptor,
new java.lang.String[] { "Workloads", "NextPageToken", });
internal_static_google_cloud_assuredworkloads_v1_Workload_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_google_cloud_assuredworkloads_v1_Workload_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_assuredworkloads_v1_Workload_descriptor,
new java.lang.String[] { "Name", "DisplayName", "Resources", "ComplianceRegime", "CreateTime", "BillingAccount", "Etag", "Labels", "ProvisionedResourcesParent", "KmsSettings", "ResourceSettings", });
internal_static_google_cloud_assuredworkloads_v1_Workload_ResourceInfo_descriptor =
internal_static_google_cloud_assuredworkloads_v1_Workload_descriptor.getNestedTypes().get(0);
internal_static_google_cloud_assuredworkloads_v1_Workload_ResourceInfo_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_assuredworkloads_v1_Workload_ResourceInfo_descriptor,
new java.lang.String[] { "ResourceId", "ResourceType", });
internal_static_google_cloud_assuredworkloads_v1_Workload_KMSSettings_descriptor =
internal_static_google_cloud_assuredworkloads_v1_Workload_descriptor.getNestedTypes().get(1);
internal_static_google_cloud_assuredworkloads_v1_Workload_KMSSettings_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_assuredworkloads_v1_Workload_KMSSettings_descriptor,
new java.lang.String[] { "NextRotationTime", "RotationPeriod", });
internal_static_google_cloud_assuredworkloads_v1_Workload_ResourceSettings_descriptor =
internal_static_google_cloud_assuredworkloads_v1_Workload_descriptor.getNestedTypes().get(2);
internal_static_google_cloud_assuredworkloads_v1_Workload_ResourceSettings_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_assuredworkloads_v1_Workload_ResourceSettings_descriptor,
new java.lang.String[] { "ResourceId", "ResourceType", "DisplayName", });
internal_static_google_cloud_assuredworkloads_v1_Workload_LabelsEntry_descriptor =
internal_static_google_cloud_assuredworkloads_v1_Workload_descriptor.getNestedTypes().get(3);
internal_static_google_cloud_assuredworkloads_v1_Workload_LabelsEntry_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_assuredworkloads_v1_Workload_LabelsEntry_descriptor,
new java.lang.String[] { "Key", "Value", });
internal_static_google_cloud_assuredworkloads_v1_CreateWorkloadOperationMetadata_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_google_cloud_assuredworkloads_v1_CreateWorkloadOperationMetadata_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_assuredworkloads_v1_CreateWorkloadOperationMetadata_descriptor,
new java.lang.String[] { "CreateTime", "DisplayName", "Parent", "ComplianceRegime", });
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.ClientProto.defaultHost);
registry.add(com.google.api.FieldBehaviorProto.fieldBehavior);
registry.add(com.google.api.AnnotationsProto.http);
registry.add(com.google.api.ClientProto.methodSignature);
registry.add(com.google.api.ClientProto.oauthScopes);
registry.add(com.google.api.ResourceProto.resource);
registry.add(com.google.api.ResourceProto.resourceDefinition);
registry.add(com.google.api.ResourceProto.resourceReference);
registry.add(com.google.longrunning.OperationsProto.operationInfo);
com.google.protobuf.Descriptors.FileDescriptor
.internalUpdateFileDescriptor(descriptor, registry);
com.google.api.AnnotationsProto.getDescriptor();
com.google.api.ClientProto.getDescriptor();
com.google.api.FieldBehaviorProto.getDescriptor();
com.google.api.ResourceProto.getDescriptor();
com.google.longrunning.OperationsProto.getDescriptor();
com.google.protobuf.DurationProto.getDescriptor();
com.google.protobuf.EmptyProto.getDescriptor();
com.google.protobuf.FieldMaskProto.getDescriptor();
com.google.protobuf.TimestampProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
| 64.422581 | 207 | 0.774573 |
89b79d3247e148662704cb02012241d29e1ede69 | 473 | package com.brainlesslabs.momo.common.exceptions;
public class DataCorruptedException extends MomoException {
public DataCorruptedException() {
super(MomoErrorCodes.DATA_CORRUPTED);
}
public DataCorruptedException(final String message) {
super(message, MomoErrorCodes.DATA_CORRUPTED);
}
public DataCorruptedException(final String message, final Throwable cause) {
super(message, cause, MomoErrorCodes.DATA_CORRUPTED);
}
}
| 29.5625 | 80 | 0.7463 |
2be81e6fe1c48f7fbb39b5aed0596218c4d84e58 | 3,493 | /*
* Copyright 2016 Steven Smith [email protected]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.kanji_tutor.multipleitemslist;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.TextView;
/**
* child of ViewHolderDelegate. viewholder creation and object customization
* happen here.
*/
public class Item extends ViewHolderDelegate {
private static final String TAG = "Item";
private static final int resId = R.layout.item;
public class ItemVH extends ViewHolder {
private static final String TAG = "ItemVH";
// view elements for viewholder.
protected View view;
protected TextView itemTV;
protected Item item;
@Override public int getItemResId() {return resId; }
protected ItemVH(LayoutInflater inflater) {
super();
view = inflater.inflate(resId, null);
view.setId(super.getId());
itemTV = (TextView)view.findViewById(R.id.item_text);
}
// this is the position of the viewholder in the layout list.
// In the listadapter getView method if the list position and
// the viewholder position differ, the viewholder should be
// updated to correspond to the ViewHolderDelegate corresponding
// with the listview object for the position.
protected int position;
@Override
int getPosition() {
return position;
}
@Override
public String toString() {
return String.format("%s:position=%d:text=\"%s\":resId=0x%08x",
TAG, position, itemTV.getText().toString(), resId);
}
}
// resid is used to match objects with viewholders. If an delegate and a
// viewholder share the same resource id, the delegate can update the
// viewsholder.
@Override public int getItemResId() { return resId; }
// String to display in item text view.
private String itemText;
// constructor
public Item(String itemText) {
super(resId);
this.itemText = itemText;
}
// viewholder creation and update happen here. If the viewholder passed
// in is null or of a different type, create a new one. Otherwise, just
// configure this viewholder to reflect the object.
@Override
protected View setupVH(LayoutInflater inflater
, ViewHolderDelegate.ViewHolder vh, int position) {
ItemVH viewHolder;
if (vh == null || vh.getItemResId() != getItemResId())
viewHolder = new ItemVH(inflater);
else
viewHolder = (ItemVH) vh;
viewHolder.position = position;
viewHolder.itemTV.setText(itemText);
viewHolder.item = this;
viewHolder.view.setTag(viewHolder);
return viewHolder.view;
}
@Override
public String toString() {
return String.format("%s:text=%s:resId=0x%08x", TAG , itemText, resId);
}
}
| 35.282828 | 79 | 0.655597 |
c609a5ada67fb77ac24191337b271b0f1463f5ae | 251 | package com.ruoyi.system.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.ruoyi.system.domain.SysRoleDept;
/**
* 角色与部门关联表 数据层
*
* @author ruoyi
*/
public interface SysRoleDeptMapper extends BaseMapper<SysRoleDept> {
}
| 17.928571 | 68 | 0.772908 |
1b42c334ff1f8bd56e3c9ada499cc406af2690ca | 930 | /*-------------------------------------------------------------------------
*
* Copyright (c) 2008-2011, PostgreSQL Global Development Group
*
*
*-------------------------------------------------------------------------
*/
package org.postgresql.jdbc3g;
import java.sql.SQLException;
import java.util.Properties;
import org.postgresql.core.Oid;
import org.postgresql.core.TypeInfo;
import org.postgresql.util.HostSpec;
public abstract class AbstractJdbc3gConnection extends org.postgresql.jdbc3.AbstractJdbc3Connection
{
public AbstractJdbc3gConnection(HostSpec[] hostSpecs, String user, String database, Properties info, String url) throws SQLException {
super(hostSpecs, user, database, info, url);
TypeInfo types = getTypeInfo();
if (haveMinimumServerVersion("8.3")) {
types.addCoreType("uuid", Oid.UUID, java.sql.Types.OTHER, "java.util.UUID", Oid.UUID_ARRAY);
}
}
}
| 30 | 138 | 0.612903 |
f84239a23e7c66847139f9020438161016db849f | 902 | package test_locally.api.status;
import com.slack.api.status.v1.LegacyStatusApiException;
import com.slack.api.status.v2.StatusApiException;
import okhttp3.Protocol;
import okhttp3.Request;
import okhttp3.Response;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class EqualityTest {
@Test
public void test() {
Request request = new Request.Builder()
.url("https://www.example.com/")
.build();
Response response = new Response.Builder()
.request(request)
.code(200)
.protocol(Protocol.HTTP_1_1)
.message("")
.build();
assertEquals(new LegacyStatusApiException(response, ""), new LegacyStatusApiException(response, ""));
assertEquals(new StatusApiException(response, ""), new StatusApiException(response, ""));
}
}
| 30.066667 | 109 | 0.641907 |
846f769e07b9192b593625d4cfabee5e12ad7272 | 2,380 | package com.pointcx.rocker.spring.boot.starter.web;
import com.fizzed.rocker.BindableRockerModel;
import com.fizzed.rocker.RenderingException;
import com.fizzed.rocker.RockerModel;
import com.fizzed.rocker.TemplateBindException;
import com.fizzed.rocker.runtime.OutputStreamOutput;
import com.fizzed.rocker.runtime.RockerBootstrap;
import com.pointcx.rocker.spring.boot.starter.RockerProperties;
import org.springframework.core.io.Resource;
import org.springframework.web.servlet.view.AbstractTemplateView;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.Map;
public class RockerView extends AbstractTemplateView {
private String viewName;
private RockerBootstrap rockerBootstrap;
private RockerProperties properties;
public RockerView(String viewName, RockerBootstrap rockerBootstrap, RockerProperties properties) {
this.viewName = viewName;
this.rockerBootstrap = rockerBootstrap;
this.properties = properties;
}
@Override
protected void renderMergedTemplateModel(Map<String, Object> map, HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse) throws Exception {
RockerModel model = rockerBootstrap.model(viewName);
BindableRockerModel bindableRockerModel = new BindableRockerModel(viewName, model.getClass().getCanonicalName(), model);
// -------------------------------------------------------------------
// bind all parameter in map to rocker model
for (Map.Entry<String,Object> entry : map.entrySet()) {
try {
bindableRockerModel.bind(entry.getKey(), entry.getValue());
}catch (TemplateBindException bindException){
// use set method to bind, if the set method don't exist a TemplateBindException will be thrown.
// ignore it
}
}
httpServletResponse.setContentType(properties.getContentType());
OutputStreamOutput output = bindableRockerModel.render((contentType, charsetName) -> {
try {
return new OutputStreamOutput(contentType, httpServletResponse.getOutputStream(), charsetName);
} catch (IOException e) {
throw new RenderingException(e.getMessage(), e);
}
});
}
}
| 41.754386 | 168 | 0.702941 |
43aa68171f34734bf0a2c76ff5095adf8d595bd8 | 2,762 | /**
* Copyright 2005-2016 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.gateway.handlers.detecting;
import io.fabric8.gateway.handlers.detecting.protocol.amqp.AmqpProtocol;
import io.fabric8.gateway.handlers.detecting.protocol.http.HttpProtocol;
import io.fabric8.gateway.handlers.detecting.protocol.mqtt.MqttProtocol;
import io.fabric8.gateway.handlers.detecting.protocol.openwire.OpenwireProtocol;
import io.fabric8.gateway.handlers.detecting.protocol.ssl.SslConfig;
import io.fabric8.gateway.handlers.detecting.protocol.ssl.SslProtocol;
import io.fabric8.gateway.handlers.detecting.protocol.stomp.StompProtocol;
import io.fabric8.gateway.loadbalancer.LoadBalancer;
import io.fabric8.gateway.loadbalancer.LoadBalancers;
import java.io.File;
import java.util.ArrayList;
/**
*/
public class DetectingGatewayVirtualHostTest extends AbstractDetectingGatewayTest {
@Override
public DetectingGateway createGateway() {
String loadBalancerType = LoadBalancers.ROUND_ROBIN_LOAD_BALANCER;
int stickyLoadBalancerCacheSize = LoadBalancers.STICKY_LOAD_BALANCER_DEFAULT_CACHE_SIZE;
LoadBalancer serviceLoadBalancer = LoadBalancers.createLoadBalancer(loadBalancerType, stickyLoadBalancerCacheSize);
ArrayList<Protocol> protocols = new ArrayList<Protocol>();
protocols.add(new StompProtocol());
protocols.add(new MqttProtocol());
protocols.add(new AmqpProtocol());
protocols.add(new OpenwireProtocol());
protocols.add(new HttpProtocol());
protocols.add(new SslProtocol());
DetectingGateway gateway = new DetectingGateway();
gateway.setPort(0);
gateway.setVertx(vertx);
SslConfig sslConfig = new SslConfig(new File(basedir(), "src/test/resources/server.ks"), "password");
sslConfig.setKeyPassword("password");
gateway.setSslConfig(sslConfig);
gateway.setServiceMap(serviceMap);
gateway.setProtocols(protocols);
gateway.setServiceLoadBalancer(serviceLoadBalancer);
gateway.setDefaultVirtualHost("broker");
gateway.setConnectionTimeout(5000);
gateways.add(gateway);
return gateway;
}
}
| 40.617647 | 123 | 0.749819 |
988d369db139bd2feeeb499b7dda8bf0527467fe | 2,360 | // Copyright © 2016-2019 Esko Luontola
// This software is released under the Apache License 2.0.
// The license text is at http://www.apache.org/licenses/LICENSE-2.0
package fi.luontola.cqrshotel.reservation.queries;
import fi.luontola.cqrshotel.framework.projections.AnnotatedProjection;
import fi.luontola.cqrshotel.framework.util.EventListener;
import fi.luontola.cqrshotel.hotel.Hotel;
import fi.luontola.cqrshotel.reservation.commands.SearchForAccommodation;
import fi.luontola.cqrshotel.reservation.events.PriceOffered;
import org.javamoney.moneta.Money;
import java.time.Clock;
import java.time.LocalDate;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
public class ReservationOfferView extends AnnotatedProjection {
private final Map<LocalDate, PriceOffered> offersByDate = new ConcurrentHashMap<>();
private final UUID reservationId;
private final Clock clock;
public ReservationOfferView(UUID reservationId, Clock clock) {
this.reservationId = reservationId;
this.clock = clock;
}
@EventListener
public void apply(PriceOffered offer) {
checkReservationId(offer.reservationId);
offersByDate.put(offer.date, offer);
}
// queries
public ReservationOffer query(SearchForAccommodation query) {
checkReservationId(query.reservationId);
var result = new ReservationOffer();
result.reservationId = query.reservationId;
result.arrival = query.arrival;
result.departure = query.departure;
var totalPrice = Money.of(0, Hotel.CURRENCY);
for (var date = query.arrival; date.isBefore(query.departure); date = date.plusDays(1)) {
var offer = offersByDate.get(date);
if (offer != null && offer.isStillValid(clock)) {
totalPrice = totalPrice.add(offer.price);
} else {
// offer missing for some day; don't set result.totalPrice
return result;
}
}
result.totalPrice = totalPrice;
return result;
}
// helpers
private void checkReservationId(UUID reservationId) {
if (!this.reservationId.equals(reservationId)) {
throw new IllegalArgumentException("this projection instance works only for a single reservation");
}
}
}
| 34.202899 | 111 | 0.699576 |
05d75f6fefb4b641c35f04fed739e791cbccfd83 | 2,026 | /* Generated By:JJTree: Do not edit this line. OCommitStatement.java Version 4.3 */
/* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=true,TRACK_TOKENS=true,NODE_PREFIX=O,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */
package com.orientechnologies.orient.core.sql.parser;
import com.orientechnologies.orient.core.command.OCommandContext;
import com.orientechnologies.orient.core.sql.executor.OInternalResultSet;
import com.orientechnologies.orient.core.sql.executor.OResultInternal;
import com.orientechnologies.orient.core.sql.executor.OResultSet;
import java.util.Map;
public class OCommitStatement extends OSimpleExecStatement {
protected OInteger retry;
public OCommitStatement(int id) {
super(id);
}
public OCommitStatement(OrientSql p, int id) {
super(p, id);
}
@Override public OResultSet executeSimple(OCommandContext ctx) {
ctx.getDatabase().commit();
OInternalResultSet result = new OInternalResultSet();
OResultInternal item = new OResultInternal();
item.setProperty("operation", "commit");
result.add(item);
return result;
}
@Override public void toString(Map<Object, Object> params, StringBuilder builder) {
builder.append("COMMIT");
if (retry != null) {
builder.append(" RETRY ");
retry.toString(params, builder);
}
}
@Override public OCommitStatement copy() {
OCommitStatement result = new OCommitStatement(-1);
result.retry = retry == null ? null : retry.copy();
return result;
}
@Override public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
OCommitStatement that = (OCommitStatement) o;
if (retry != null ? !retry.equals(that.retry) : that.retry != null)
return false;
return true;
}
@Override public int hashCode() {
return retry != null ? retry.hashCode() : 0;
}
}
/* JavaCC - OriginalChecksum=eaa0bc8f765fdaa017789953861bc0aa (do not edit this line) */
| 30.69697 | 163 | 0.71619 |
ed2d5aade66989211faef13ece557237dba92e78 | 338 | package dao;
import entity.Order;
import java.util.List;
public interface OrderDAO extends SuperDAO{
/*public List<Order> getAllOrders();
public Order getOrder(String orderId);
public boolean saveOrder(Order order);
public boolean deleteOrder(String orderId);
public boolean updateOrder(Order order);*/
}
| 16.9 | 48 | 0.724852 |
f5ffb0f2d9f54639ac4ac1a3d54c42136173430e | 5,800 | package com.mjchael.datapump.core.service;
import com.mjchael.datapump.core.model.AllTableColumn;
import lombok.SneakyThrows;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.io.*;
import java.math.BigDecimal;
import java.sql.Timestamp;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Optional;
@Service
public class DataPumpService {
@Autowired
public AllTableColumnService allTableColumnService;
@Autowired
public CustomQueryService customQueryService;
public List<String> generateInsert(String owner, String tableName, String whereClause) {
List<AllTableColumn> list = allTableColumnService.findByTableNameAndOwner(tableName.toUpperCase(), owner.toUpperCase());
list.sort(Comparator.comparing(AllTableColumn::getColumnId));
List<Object[]> dataList= customQueryService.executeQuery(generateStringQuery(list, whereClause));
return buildInsertStatement(dataList, list, tableName);
}
public void writeInFile(String owner, String tableName, String whereClause){
List<String> insertList = generateInsert(
owner,
tableName,
whereClause
);
File fileOut = new File("out/insert_"+tableName+".sql");
try (BufferedWriter bufferedWriter = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileOut)))) {
bufferedWriter.write("-- delete target records " + tableName);
bufferedWriter.newLine();
bufferedWriter.write("delete from " + tableName + " where " + whereClause + ";");
bufferedWriter.newLine();
bufferedWriter.write("-- insert " + tableName);
bufferedWriter.newLine();
for (String insertStatement : insertList) {
bufferedWriter.write(insertStatement);
bufferedWriter.newLine();
}
bufferedWriter.write("commit;");
bufferedWriter.newLine();
} catch (IOException io) {
io.printStackTrace();
}
}
private List<String> buildInsertStatement(List<Object[]> dataList, List<AllTableColumn> list, String tableName){
List<String> insertStatements = new ArrayList<>();
for (Object[] record : dataList) {
String insert = "insert into "+ tableName + "(" + getColumnQuery(list) +") values (";
StringBuilder insertValues = new StringBuilder();
for (Object object : record){
if (!insertValues.toString().trim().isEmpty()){
insertValues.append(",");
}
if (object instanceof BigDecimal){
insertValues.append(formatBigDecimal(object));
}
else if (object instanceof String || object instanceof Character){
insertValues.append(formatString(object));
}
else if (object instanceof Timestamp){
insertValues.append(formatDate(object));
}
else if (Optional.ofNullable(object).equals(Optional.empty())){
insertValues.append(object);
}
else {
throw new AssertionError("Unhandled data type" + object.getClass());
}
}
insert = insert + insertValues + ");";
insertStatements.add(insert);
}
return insertStatements;
}
private String formatDate(Object o){
LocalDateTime localDateTime = ((Timestamp) o).toLocalDateTime() ;
return "to_date('"
+ StringUtils.leftPad(String.valueOf(localDateTime.getDayOfMonth()), 2, "0")
+"/"
+ StringUtils.leftPad(String.valueOf(localDateTime.getMonthValue()), 2, "0")
+"/"
+ localDateTime.getYear()
+" "
+ StringUtils.leftPad(String.valueOf(localDateTime.getHour()), 2, "0")
+":"
+ StringUtils.leftPad(String.valueOf(localDateTime.getMinute()), 2, "0")
+":"
+ StringUtils.leftPad(String.valueOf(localDateTime.getSecond()), 2, "0")
+"', 'DD/MM/YYYY HH24:MI:SS')";
}
private String formatString(Object o){
return "'"+o+"'";
}
private String formatBigDecimal(Object o){
return o.toString();
}
/**
* Generates an sql statement given the list of columns and the where clause
* @param list
* @param whereClause
* @return String
*/
private String generateStringQuery(List<AllTableColumn> list, String whereClause) {
if (list.isEmpty()) {
throw new AssertionError("Table does not exist");
}
if (whereClause.trim().isEmpty()) {
throw new AssertionError("Where clause cannot be null");
}
return "select " + getColumnQuery((list))
+ " from " + list.stream().findFirst().map(AllTableColumn::getTableName).get()
+ " where " + whereClause;
}
/**
* Method that builds the list of column
* @param list
* @return String of {@link AllTableColumn column}
*/
private String getColumnQuery(List<AllTableColumn> list) {
return list
.stream()
.map(AllTableColumn::getColumnName)
.reduce("",
(generatedQuery, column) -> (generatedQuery + column + " "),
String::concat)
.trim()
.replace(" ", ", ");
}
}
| 37.662338 | 128 | 0.588621 |
6170ad981e568b9e8491ab0f43361264bca6dca6 | 5,333 | package demo.com.campussecondbookrecycle.Views;
import android.content.Context;
import android.util.AttributeSet;
import android.util.Log;
import android.view.View;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import demo.com.campussecondbookrecycle.R;
import demo.com.campussecondbookrecycle.common.Const;
public class CollapsibleTextView extends LinearLayout implements View.OnClickListener {
private TextView mTvContent,mTvOpDesc;
private LinearLayout mLlOp;
private ImageView mIvop;
private boolean isCollapsible = false;
private boolean folded = true;
private boolean flag = false;
private int mMaxHeight =200;
public CollapsibleTextView(Context context, AttributeSet attrs) {
super(context, attrs);
View view = inflate(context, R.layout.view_collapsible,this);
mTvContent = view.findViewById(R.id.tv_content);
mTvOpDesc = view.findViewById(R.id.tv_op_desc);
mLlOp = view.findViewById(R.id.ll_op);
mIvop = view.findViewById(R.id.iv_op);
mLlOp.setOnClickListener(this);
}
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
super.onLayout(changed, l, t, r, b);
int count = 0;
count++;
Log.d("count",count+"");
if (!flag){
if (mTvContent.getLineCount() > Const.TEXT_MAX_LINE){
Log.d("linecount",mTvContent.getLineCount()+"");
mTvContent.setMaxLines(Const.TEXT_MAX_LINE);
mLlOp.setVisibility(VISIBLE);
mIvop.setImageResource(R.drawable.down);
mTvOpDesc.setText("查看更多");
isCollapsible = true;
folded = true;
}else {
mLlOp.setVisibility(GONE);
isCollapsible = false;
}
}else {
if (isCollapsible && folded){
mTvContent.setMaxLines(Integer.MAX_VALUE);
mTvOpDesc.setText("收起");
mIvop.setImageResource(R.drawable.up);
}else if(isCollapsible && !folded){
mTvContent.setMaxLines(Const.TEXT_MAX_LINE);
mTvOpDesc.setText("查看更多");
mIvop.setImageResource(R.drawable.down);
}
}
}
// @Override
// protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
// if (!flag){
// if (mTvContent.getLineCount() > Const.TEXT_MAX_LINE){
// Log.d("linecount",mTvContent.getLineCount()+"");
// mTvContent.setMaxLines(Const.TEXT_MAX_LINE);
// mLlOp.setVisibility(VISIBLE);
// mIvop.setImageResource(R.drawable.down);
// mTvOpDesc.setText("查看更多");
// isCollapsible = true;
// collapsible = true;
// flag = true;
// }else {
// mLlOp.setVisibility(GONE);
// isCollapsible = false;
// }
// }else {
// if(isCollapsible && collapsible){
// mTvContent.setMaxLines(Integer.MAX_VALUE);
// mLlOp.setVisibility(VISIBLE);
// mIvop.setImageResource(R.drawable.up);
// mTvOpDesc.setText("收起");
// isCollapsible = true;
// collapsible = false;
// }else if(isCollapsible && !collapsible){
// mIvop.setImageResource(R.drawable.down);
// mTvOpDesc.setText("查看更多");
// collapsible = true;
// }
// }
// super.onMeasure(widthMeasureSpec, heightMeasureSpec);
// }
public void setText(String text){
mTvContent.setText(text);
requestLayout();
}
@Override
public void onClick(View v) {
flag = true;
if (folded){
requestLayout();
folded = false;
}else {
requestLayout();
folded = true;
}
// if(isCollapsible && collapsible){
// Log.d("click","2222");
// mTvContent.setMaxLines(Integer.MAX_VALUE);
// mIvop.setImageResource(R.drawable.up);
// mTvOpDesc.setText("收起");
// collapsible = false;
// }else if(isCollapsible && !collapsible){
// Log.d("click","13331");
// mTvContent.setMaxLines(Const.TEXT_MAX_LINE);
// mIvop.setImageResource(R.drawable.down);
// collapsible = true;
// }
// post(new InnerRunnable());
}
// class InnerRunnable implements Runnable{
// @Override
// public void run() {
// Log.d("click","111111");
// if(isCollapsible && collapsible){
// Log.d("click","2222");
// mTvContent.setMaxLines(Integer.MAX_VALUE);
// mIvop.setImageResource(R.drawable.up);
// mTvOpDesc.setText("收起");
// collapsible = false;
// return;
// }
// if(isCollapsible && !collapsible){
// Log.d("click","13331");
// mTvContent.setMaxLines(Const.TEXT_MAX_LINE);
// mIvop.setImageResource(R.drawable.down);
// collapsible = true;
// }
// }
// }
}
| 35.085526 | 87 | 0.551284 |
216e02b079e82f02dea4b893aa062428676e2c2c | 523 | package com.joshstrohminger.stepbystep;
import android.app.Fragment;
public class FragmentMap {
private int fragmentName;
private Class<? extends Fragment> fragmentClass;
public FragmentMap(int fragmentName, Class<? extends Fragment> fragmentClass) {
this.fragmentName = fragmentName;
this.fragmentClass = fragmentClass;
}
public int getFragmentName() {
return fragmentName;
}
public Class<? extends Fragment> getFragmentClass() {
return fragmentClass;
}
}
| 27.526316 | 83 | 0.705545 |
52eb101a9e24374e95fcdb51c104dfb36c81a785 | 1,965 | package me.deecaad.weaponmechanics.weapon.weaponevents;
import me.deecaad.core.utils.primitive.DoubleMap;
import me.deecaad.weaponmechanics.weapon.explode.regeneration.BlockRegenSorter;
import me.deecaad.weaponmechanics.weapon.projectile.weaponprojectile.WeaponProjectile;
import org.bukkit.block.Block;
import org.bukkit.entity.LivingEntity;
import org.bukkit.event.Cancellable;
import org.bukkit.event.HandlerList;
import org.jetbrains.annotations.NotNull;
import java.util.List;
public class ProjectileExplodeEvent extends ProjectileEvent implements Cancellable {
private static final HandlerList HANDLERS = new HandlerList();
private List<Block> blocks;
private BlockRegenSorter sorter;
private DoubleMap<LivingEntity> entities;
private boolean isCancelled;
public ProjectileExplodeEvent(WeaponProjectile projectile, List<Block> blocks,
BlockRegenSorter sorter, DoubleMap<LivingEntity> entities) {
super(projectile);
this.blocks = blocks;
this.sorter = sorter;
this.entities = entities;
}
public List<Block> getBlocks() {
return blocks;
}
public void setBlocks(List<Block> blocks) {
this.blocks = blocks;
}
public BlockRegenSorter getSorter() {
return sorter;
}
public void setSorter(BlockRegenSorter sorter) {
this.sorter = sorter;
}
public DoubleMap<LivingEntity> getEntities() {
return entities;
}
public void setEntities(DoubleMap<LivingEntity> entities) {
this.entities = entities;
}
@Override
@NotNull
public HandlerList getHandlers() {
return HANDLERS;
}
public static HandlerList getHandlerList() {
return HANDLERS;
}
@Override
public boolean isCancelled() {
return isCancelled;
}
@Override
public void setCancelled(boolean cancelled) {
isCancelled = cancelled;
}
}
| 25.855263 | 94 | 0.69771 |
ae4e098cb69a15a6d88322e6d26fcb9a3d01f7bb | 7,530 | /*
* LICENSE
*
* "THE BEER-WARE LICENSE" (Revision 42):
* "Sven Strittmatter" <[email protected]> wrote this file.
* As long as you retain this notice you can do whatever you want with
* this stuff. If we meet some day, and you think this stuff is worth it,
* you can buy me a beer in return.
*/
package org.jenkinsci.plugins.darcs;
import hudson.FilePath;
import hudson.Launcher;
import hudson.Launcher.ProcStarter;
import hudson.util.ArgumentListBuilder;
import java.io.ByteArrayOutputStream;
import java.util.Map;
/**
* Abstracts the Darcs command.
*
* @author Sven Strittmatter <[email protected]>
*/
public class DarcsCmd {
/**
* `darcs changes` command.
*/
private static final String CMD_CHANGES = "changes";
/**
* `darcs pull` command.
*/
private static final String CMD_PULL = "pull";
/**
* `darcs get` command.
*/
private static final String CMD_GET = "get";
// Command options
private static final String OPT_REPO = "--repo=";
private static final String OPT_XML_OUTPUT = "--xml-output";
private static final String OPT_SUMMARY = "--summary";
private static final String OPT_LAST = "--last=";
private static final String OPT_REPODIR = "--repodir=";
private static final String OPT_COUNT = "--count";
private static final String OPT_ALL = "--all";
private static final String OPT_VERBOSE = "--verbose";
/**
* Used to start a process.
*/
private final Launcher launcher;
/**
* Name of the Darcs executable binary.
*/
private final String darcsExe;
/**
* Environment variables.
*/
private final Map<String, String> envs;
private final FilePath workingDir;
/**
* Creates a Darcs command object.
*
* @param launcher starts a process
* @param envs environment variables
* @param darcsExe executable name
*/
public DarcsCmd(final Launcher launcher, final Map<String, String> envs, final String darcsExe, final FilePath workingDir) {
super();
this.envs = envs;
this.launcher = launcher;
this.darcsExe = darcsExe;
this.workingDir = workingDir;
}
/**
* Creates process starter.
*
* @param args builds argument list for command
* @return a process starter object
*/
public ProcStarter createProc(final ArgumentListBuilder args) {
final ProcStarter proc = launcher.launch();
proc.cmds(args);
proc.envs(envs);
proc.pwd(workingDir);
return proc;
}
public ByteArrayOutputStream lastSummarizedChanges(final String repo, final int n) throws DarcsCmdException {
return getChanges(repo, true, n);
}
public ByteArrayOutputStream allSummarizedChanges(final String repo) throws DarcsCmdException {
return getChanges(repo, true);
}
public ByteArrayOutputStream allChanges(final String repo) throws DarcsCmdException {
return getChanges(repo, false);
}
private ByteArrayOutputStream getChanges(final String repo, final boolean summarize) throws DarcsCmdException {
return getChanges(repo, summarize, 0);
}
private ByteArrayOutputStream getChanges(final String repo, final boolean summarize, final int n)
throws DarcsCmdException {
final ArgumentListBuilder args = new ArgumentListBuilder();
args.add(darcsExe)
.add(CMD_CHANGES)
.add(OPT_REPO + repo)
.add(OPT_XML_OUTPUT);
if (summarize) {
args.add(OPT_SUMMARY);
}
if (n > 0) {
args.add(OPT_LAST + n);
}
final ProcStarter proc = createProc(args);
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
proc.stdout(baos);
try {
final int ret = proc.join();
if (0 != ret) {
throw new DarcsCmdException("can not do darcs changes in repo " + repo);
}
} catch (Exception ex) {
throw new DarcsCmdException("can not do darcs changes in repo " + repo, ex);
}
return baos;
}
public int countChanges(final String repo) throws DarcsCmdException {
final ArgumentListBuilder args = new ArgumentListBuilder();
args.add(darcsExe)
.add(CMD_CHANGES)
.add(OPT_REPODIR + repo)
.add(OPT_COUNT);
final ProcStarter proc = createProc(args);
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
proc.stdout(baos);
try {
final int ret = proc.join();
if (0 != ret) {
throw new DarcsCmdException("can not do darcs changes in repo " + repo);
}
} catch (Exception ex) {
throw new DarcsCmdException("can not do darcs changes in repo " + repo, ex);
}
return Integer.parseInt(baos.toString().trim());
}
public void pull(final String repo, final String from) throws DarcsCmdException {
final ArgumentListBuilder args = new ArgumentListBuilder();
args.add(darcsExe)
.add(CMD_PULL)
.add(from)
.add(OPT_REPODIR + repo)
.add(OPT_ALL)
.add(OPT_VERBOSE);
try {
final ProcStarter proc = createProc(args);
proc.stdout(this.launcher.getListener());
final int ret = proc.join();
if (0 != ret) {
throw new DarcsCmdException(String.format("Can't do darcs changes in repo %s! Return code: %d",
repo, ret));
}
} catch (Exception ex) {
throw new DarcsCmdException(String.format("Can't do darcs changes in repo %s!", repo), ex);
}
}
/**
* Do a fresh checkout of a repository.
*
* @param repo where to checkout
* @param from from where to get the repository
* @throws DarcsCmd.DarcsCmdException if can't do checkout
*/
public void get(final String repo, final String from) throws DarcsCmdException {
final ArgumentListBuilder args = new ArgumentListBuilder();
args.add(darcsExe)
.add(CMD_GET)
.add(from)
.add(repo);
try {
final ProcStarter proc = createProc(args);
proc.stdout(this.launcher.getListener());
final int ret = proc.join();
if (0 != ret) {
throw new DarcsCmdException(String.format("Getting repo with args %s failed! Return code: %d",
args.toStringWithQuote(), ret));
}
} catch (Exception ex) {
throw new DarcsCmdException(String.format("Can't get repo with args: %s", args.toStringWithQuote()), ex);
}
}
/**
* Darcs command exception.
*/
public static class DarcsCmdException extends RuntimeException {
/**
* Creates exception with message.
*
* @param string exception message
*/
public DarcsCmdException(final String string) {
super(string);
}
/**
* Creates exception with message and a previous exception.
*
* @param string exception message
* @param thrwbl previous exception
*/
public DarcsCmdException(final String string, final Throwable thrwbl) {
super(string, thrwbl);
}
}
}
| 31.506276 | 128 | 0.600664 |
614f44b0aec79d94a3db5349d1786b8548e27a96 | 1,674 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.olingo.client.core.uri;
import org.apache.commons.lang3.StringUtils;
import org.apache.olingo.client.api.uri.FilterArg;
public class FilterFunction implements FilterArg {
private final String function;
private final FilterArg[] params;
public FilterFunction(final String function, final FilterArg... params) {
this.function = function;
this.params = params;
}
@Override
public String build() {
final String[] strParams = params == null || params.length == 0 ? new String[0] : new String[params.length];
for (int i = 0; i < strParams.length; i++) {
strParams[i] = params[i].build();
}
return new StringBuilder(function).
append('(').
append(strParams.length == 0 ? StringUtils.EMPTY : StringUtils.join(strParams, ',')).
append(')').
toString();
}
}
| 34.163265 | 112 | 0.700119 |
0e0c6d4bf8a030b56c27d55f814035e9634b9ce1 | 1,396 | package com.vantibolli.inventory.dto;
public class ItemQuantityDto {
private String name;
private String size;
private String warehouse;
private String country;
private int quantity;
/**
* @return the quantity
*/
public int getQuantity() {
return quantity;
}
/**
* @param quantity the quantity to set
*/
public void setQuantity(int quantity) {
this.quantity = quantity;
}
/**
* @return the country
*/
public String getCountry() {
return country;
}
/**
* @param country the country to set
*/
public void setCountry(String country) {
this.country = country;
}
/**
* @return the warehouse
*/
public String getWarehouse() {
return warehouse;
}
/**
* @param warehouse the warehouse to set
*/
public void setWarehouse(String warehouse) {
this.warehouse = warehouse;
}
/**
* @return the size
*/
public String getSize() {
return size;
}
/**
* @param size the size to set
*/
public void setSize(String size) {
this.size = size;
}
/**
* @return the name
*/
public String getName() {
return name;
}
/**
* @param name the name to set
*/
public void setName(String name) {
this.name = name;
}
@Override
public String toString() {
return "ItemDto [name=" + name + ", size=" + size + ", warehouse=" + warehouse + ", country=" + country
+ ", quantity=" + quantity + "]";
}
}
| 15.010753 | 105 | 0.622493 |
2b35f72b1b9bda194bcc8f0c0a8a333e9014bec9 | 543 | package com.padcmyanmar.poc_screen_implementation.dagger;
import com.padcmyanmar.poc_screen_implementation.network.PopularMoviesDataAgent;
import com.padcmyanmar.poc_screen_implementation.network.PopularMoviesDataAgentImpl;
import javax.inject.Singleton;
import dagger.Module;
import dagger.Provides;
/**
* Created by yekokohtet on 1/11/18.
*/
@Module
public class NetworkModule {
@Provides
@Singleton
public PopularMoviesDataAgent providePopularMoviesDataAgent() {
return new PopularMoviesDataAgentImpl();
}
}
| 22.625 | 84 | 0.797422 |
4f5e3db5d39500ff8d6ceb659f70f2730b0a661e | 19,629 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.autoscaling.capacity;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterInfo;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.DiskUsage;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodeRole;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.snapshots.InternalSnapshotsInfoService;
import org.elasticsearch.snapshots.SnapshotShardSizeInfo;
import org.elasticsearch.xpack.autoscaling.AutoscalingMetadata;
import org.elasticsearch.xpack.autoscaling.AutoscalingTestCase;
import org.elasticsearch.xpack.autoscaling.capacity.memory.AutoscalingMemoryInfo;
import org.elasticsearch.xpack.autoscaling.policy.AutoscalingPolicy;
import org.elasticsearch.xpack.autoscaling.policy.AutoscalingPolicyMetadata;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.StreamSupport;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.sameInstance;
public class AutoscalingCalculateCapacityServiceTests extends AutoscalingTestCase {
public void testMultiplePoliciesFixedCapacity() {
AutoscalingCalculateCapacityService service = new AutoscalingCalculateCapacityService(Set.of(new FixedAutoscalingDeciderService()));
Set<String> policyNames = IntStream.range(0, randomIntBetween(1, 10))
.mapToObj(i -> "test_ " + randomAlphaOfLength(10))
.collect(Collectors.toSet());
SortedMap<String, AutoscalingPolicyMetadata> policies = new TreeMap<>(
policyNames.stream()
.map(s -> Tuple.tuple(s, new AutoscalingPolicyMetadata(new AutoscalingPolicy(s, randomRoles(), randomFixedDeciders()))))
.collect(Collectors.toMap(Tuple::v1, Tuple::v2))
);
ClusterState state = ClusterState.builder(ClusterName.DEFAULT)
.metadata(Metadata.builder().putCustom(AutoscalingMetadata.NAME, new AutoscalingMetadata(policies)))
.build();
SortedMap<String, AutoscalingDeciderResults> resultsMap = service.calculate(
state,
ClusterInfo.EMPTY,
null,
AutoscalingMemoryInfo.EMPTY
);
assertThat(resultsMap.keySet(), equalTo(policyNames));
for (Map.Entry<String, AutoscalingDeciderResults> entry : resultsMap.entrySet()) {
AutoscalingDeciderResults results = entry.getValue();
SortedMap<String, Settings> deciders = policies.get(entry.getKey()).policy().deciders();
assertThat(deciders.size(), equalTo(1));
Settings configuration = deciders.values().iterator().next();
AutoscalingCapacity requiredCapacity = calculateFixedDeciderCapacity(configuration);
assertThat(results.requiredCapacity(), equalTo(requiredCapacity));
assertThat(results.results().size(), equalTo(1));
AutoscalingDeciderResult deciderResult = results.results().get(deciders.firstKey());
assertNotNull(deciderResult);
assertThat(deciderResult.requiredCapacity(), equalTo(requiredCapacity));
ByteSizeValue storage = configuration.getAsBytesSize(FixedAutoscalingDeciderService.STORAGE.getKey(), null);
ByteSizeValue memory = configuration.getAsMemory(FixedAutoscalingDeciderService.MEMORY.getKey(), null);
int nodes = FixedAutoscalingDeciderService.NODES.get(configuration);
assertThat(deciderResult.reason(), equalTo(new FixedAutoscalingDeciderService.FixedReason(storage, memory, nodes)));
assertThat(
deciderResult.reason().summary(),
equalTo("fixed storage [" + storage + "] memory [" + memory + "] nodes [" + nodes + "]")
);
// there is no nodes in any tier.
assertThat(results.currentCapacity(), equalTo(AutoscalingCapacity.ZERO));
assertThat(results.currentNodes(), equalTo(Collections.emptySortedSet()));
}
}
public void testDefaultDeciders() {
FixedAutoscalingDeciderService defaultOn = new FixedAutoscalingDeciderService() {
@Override
public boolean defaultOn() {
return true;
}
@Override
public String name() {
return "default_on";
}
};
FixedAutoscalingDeciderService defaultOff = new FixedAutoscalingDeciderService();
AutoscalingCalculateCapacityService service = new AutoscalingCalculateCapacityService(Set.of(defaultOn, defaultOff));
ClusterState state = ClusterState.builder(ClusterName.DEFAULT)
.metadata(
Metadata.builder()
.putCustom(
AutoscalingMetadata.NAME,
new AutoscalingMetadata(
new TreeMap<>(
Map.of("test", new AutoscalingPolicyMetadata(new AutoscalingPolicy("test", randomRoles(), new TreeMap<>())))
)
)
)
)
.build();
assertThat(
service.calculate(state, ClusterInfo.EMPTY, SnapshotShardSizeInfo.EMPTY, AutoscalingMemoryInfo.EMPTY)
.get("test")
.results()
.keySet(),
equalTo(Set.of(defaultOn.name()))
);
}
private SortedMap<String, Settings> randomFixedDeciders() {
Settings.Builder settings = Settings.builder();
if (randomBoolean()) {
settings.put(FixedAutoscalingDeciderService.STORAGE.getKey(), randomByteSizeValue());
}
if (randomBoolean()) {
settings.put(FixedAutoscalingDeciderService.MEMORY.getKey(), randomByteSizeValue());
}
settings.put(FixedAutoscalingDeciderService.NODES.getKey(), randomIntBetween(1, 10));
return new TreeMap<>(Map.of(FixedAutoscalingDeciderService.NAME, settings.build()));
}
private AutoscalingCapacity calculateFixedDeciderCapacity(Settings configuration) {
ByteSizeValue storage = configuration.getAsBytesSize(FixedAutoscalingDeciderService.STORAGE.getKey(), null);
ByteSizeValue memory = configuration.getAsBytesSize(FixedAutoscalingDeciderService.MEMORY.getKey(), null);
int nodes = FixedAutoscalingDeciderService.NODES.get(configuration);
ByteSizeValue totalStorage = storage != null ? new ByteSizeValue(storage.getBytes() * nodes) : null;
ByteSizeValue totalMemory = memory != null ? new ByteSizeValue(memory.getBytes() * nodes) : null;
if (totalStorage == null && totalMemory == null) {
return null;
} else {
return new AutoscalingCapacity(
new AutoscalingCapacity.AutoscalingResources(totalStorage, totalMemory),
new AutoscalingCapacity.AutoscalingResources(storage, memory)
);
}
}
public void testContext() {
ClusterState state = ClusterState.builder(ClusterName.DEFAULT).build();
ClusterInfo info = ClusterInfo.EMPTY;
SortedSet<String> roleNames = randomRoles();
boolean hasDataRole = roleNames.stream().anyMatch(r -> r.equals("data") || r.startsWith("data_"));
AutoscalingCalculateCapacityService service = new AutoscalingCalculateCapacityService(Set.of(new FixedAutoscalingDeciderService()));
SnapshotShardSizeInfo snapshotShardSizeInfo = new SnapshotShardSizeInfo(
ImmutableOpenMap.<InternalSnapshotsInfoService.SnapshotShard, Long>builder().build()
);
AutoscalingDeciderContext context = service.createContext(
roleNames,
state,
info,
snapshotShardSizeInfo,
n -> randomNonNegativeLong()
);
assertSame(state, context.state());
assertThat(context.nodes(), equalTo(Set.of()));
assertThat(context.currentCapacity(), equalTo(AutoscalingCapacity.ZERO));
assertThat(context.info(), sameInstance(info));
assertThat(context.snapshotShardSizeInfo(), sameInstance(snapshotShardSizeInfo));
Set<DiscoveryNodeRole> roles = roleNames.stream().map(DiscoveryNode::getRoleFromRoleName).collect(Collectors.toSet());
Set<DiscoveryNodeRole> otherRoles = mutateRoles(roleNames).stream()
.map(DiscoveryNode::getRoleFromRoleName)
.collect(Collectors.toSet());
final long memory = between(0, 1000);
state = ClusterState.builder(ClusterName.DEFAULT)
.nodes(
DiscoveryNodes.builder().add(new DiscoveryNode("nodeId", buildNewFakeTransportAddress(), Map.of(), roles, Version.CURRENT))
)
.build();
context = new AutoscalingCalculateCapacityService.DefaultAutoscalingDeciderContext(roleNames, state, info, null, n -> memory);
assertThat(context.nodes().size(), equalTo(1));
assertThat(context.nodes(), equalTo(StreamSupport.stream(state.nodes().spliterator(), false).collect(Collectors.toSet())));
if (hasDataRole) {
assertNull(context.currentCapacity());
} else {
assertThat(context.currentCapacity().node().memory(), equalTo(new ByteSizeValue(memory)));
assertThat(context.currentCapacity().total().memory(), equalTo(new ByteSizeValue(memory)));
assertThat(context.currentCapacity().node().storage(), equalTo(ByteSizeValue.ZERO));
assertThat(context.currentCapacity().total().storage(), equalTo(ByteSizeValue.ZERO));
}
ImmutableOpenMap.Builder<String, DiskUsage> leastUsagesBuilder = ImmutableOpenMap.builder();
ImmutableOpenMap.Builder<String, DiskUsage> mostUsagesBuilder = ImmutableOpenMap.builder();
DiscoveryNodes.Builder nodes = DiscoveryNodes.builder();
Set<DiscoveryNode> expectedNodes = new HashSet<>();
long sumTotal = 0;
long maxTotal = 0;
for (int i = 0; i < randomIntBetween(1, 5); ++i) {
String nodeId = "nodeId" + i;
boolean useOtherRoles = randomBoolean();
DiscoveryNode node = new DiscoveryNode(
nodeId,
buildNewFakeTransportAddress(),
Map.of(),
useOtherRoles ? otherRoles : roles,
Version.CURRENT
);
nodes.add(node);
if (useOtherRoles == false) {
long total = randomLongBetween(1, 1L << 40);
DiskUsage diskUsage = new DiskUsage(nodeId, null, randomAlphaOfLength(5), total, randomLongBetween(0, total));
leastUsagesBuilder.put(nodeId, diskUsage);
if (randomBoolean()) {
diskUsage = new DiskUsage(nodeId, null, diskUsage.getPath(), total, diskUsage.getFreeBytes());
}
mostUsagesBuilder.put(nodeId, diskUsage);
sumTotal += total;
maxTotal = Math.max(total, maxTotal);
expectedNodes.add(node);
} else {
long total1 = randomLongBetween(0, 1L << 40);
leastUsagesBuilder.put(nodeId, new DiskUsage(nodeId, null, randomAlphaOfLength(5), total1, randomLongBetween(0, total1)));
long total2 = randomLongBetween(0, 1L << 40);
mostUsagesBuilder.put(nodeId, new DiskUsage(nodeId, null, randomAlphaOfLength(5), total2, randomLongBetween(0, total2)));
}
}
state = ClusterState.builder(ClusterName.DEFAULT).nodes(nodes).build();
ImmutableOpenMap<String, DiskUsage> leastUsages = leastUsagesBuilder.build();
ImmutableOpenMap<String, DiskUsage> mostUsages = mostUsagesBuilder.build();
info = new ClusterInfo(leastUsages, mostUsages, null, null, null);
context = new AutoscalingCalculateCapacityService.DefaultAutoscalingDeciderContext(roleNames, state, info, null, n -> memory);
assertThat(context.nodes(), equalTo(expectedNodes));
if (hasDataRole) {
assertThat(context.currentCapacity().node().storage(), equalTo(new ByteSizeValue(maxTotal)));
assertThat(context.currentCapacity().total().storage(), equalTo(new ByteSizeValue(sumTotal)));
} else {
assertThat(context.currentCapacity().node().storage(), equalTo(ByteSizeValue.ZERO));
assertThat(context.currentCapacity().total().storage(), equalTo(ByteSizeValue.ZERO));
}
assertThat(context.currentCapacity().node().memory(), equalTo(new ByteSizeValue(memory * Integer.signum(expectedNodes.size()))));
assertThat(context.currentCapacity().total().memory(), equalTo(new ByteSizeValue(memory * expectedNodes.size())));
if (expectedNodes.isEmpty() == false) {
context = new AutoscalingCalculateCapacityService.DefaultAutoscalingDeciderContext(
roleNames,
state,
info,
null,
AutoscalingMemoryInfo.EMPTY
);
assertThat(context.nodes(), equalTo(expectedNodes));
assertThat(context.currentCapacity(), is(nullValue()));
String multiPathNodeId = randomFrom(expectedNodes).getId();
mostUsagesBuilder = ImmutableOpenMap.builder(mostUsages);
DiskUsage original = mostUsagesBuilder.get(multiPathNodeId);
mostUsagesBuilder.put(
multiPathNodeId,
new DiskUsage(
multiPathNodeId,
null,
randomValueOtherThan(original.getPath(), () -> randomAlphaOfLength(5)),
original.getTotalBytes(),
original.getFreeBytes()
)
);
info = new ClusterInfo(leastUsages, mostUsagesBuilder.build(), null, null, null);
context = new AutoscalingCalculateCapacityService.DefaultAutoscalingDeciderContext(roleNames, state, info, null, n -> memory);
assertThat(context.nodes(), equalTo(expectedNodes));
if (hasDataRole) {
assertThat(context.currentCapacity(), is(nullValue()));
} else {
assertThat(context.currentCapacity().node().memory(), equalTo(new ByteSizeValue(memory)));
assertThat(context.currentCapacity().total().memory(), equalTo(new ByteSizeValue(memory * expectedNodes.size())));
assertThat(context.currentCapacity().node().storage(), equalTo(ByteSizeValue.ZERO));
assertThat(context.currentCapacity().total().storage(), equalTo(ByteSizeValue.ZERO));
}
}
}
public void testValidateDeciderName() {
AutoscalingCalculateCapacityService service = new AutoscalingCalculateCapacityService(Set.of(new FixedAutoscalingDeciderService()));
String badDeciderName = randomValueOtherThan(FixedAutoscalingDeciderService.NAME, () -> randomAlphaOfLength(8));
AutoscalingPolicy policy = new AutoscalingPolicy(
randomAlphaOfLength(8),
randomRoles(),
new TreeMap<>(Map.of(badDeciderName, Settings.EMPTY))
);
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> service.validate(policy));
assertThat(exception.getMessage(), equalTo("unknown decider [" + badDeciderName + "]"));
}
public void testValidateDeciderRoles() {
Set<String> roles = randomRoles();
AutoscalingCalculateCapacityService service = new AutoscalingCalculateCapacityService(Set.of(new FixedAutoscalingDeciderService() {
@Override
public List<DiscoveryNodeRole> roles() {
return roles.stream().map(DiscoveryNode::getRoleFromRoleName).collect(Collectors.toList());
}
}));
SortedSet<String> badRoles = new TreeSet<>(randomRoles());
badRoles.removeAll(roles);
AutoscalingPolicy policy = new AutoscalingPolicy(
FixedAutoscalingDeciderService.NAME,
badRoles,
new TreeMap<>(Map.of(FixedAutoscalingDeciderService.NAME, Settings.EMPTY))
);
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> service.validate(policy));
assertThat(
exception.getMessage(),
equalTo("decider [" + FixedAutoscalingDeciderService.NAME + "] not applicable to policy with roles [ " + badRoles + "]")
);
}
public void testValidateSettingName() {
AutoscalingCalculateCapacityService service = new AutoscalingCalculateCapacityService(Set.of(new FixedAutoscalingDeciderService()));
Set<String> legalNames = Set.of(
FixedAutoscalingDeciderService.STORAGE.getKey(),
FixedAutoscalingDeciderService.MEMORY.getKey(),
FixedAutoscalingDeciderService.NODES.getKey()
);
String badSettingName = randomValueOtherThanMany(legalNames::contains, () -> randomAlphaOfLength(8));
AutoscalingPolicy policy = new AutoscalingPolicy(
randomAlphaOfLength(8),
randomRoles(),
new TreeMap<>(
Map.of(FixedAutoscalingDeciderService.NAME, Settings.builder().put(badSettingName, randomAlphaOfLength(1)).build())
)
);
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> service.validate(policy));
assertThat(
exception.getMessage(),
equalTo("unknown setting [" + badSettingName + "] for decider [" + FixedAutoscalingDeciderService.NAME + "]")
);
}
public void testValidateSettingValue() {
AutoscalingCalculateCapacityService service = new AutoscalingCalculateCapacityService(Set.of(new FixedAutoscalingDeciderService()));
String value = randomValueOtherThanMany(s -> Character.isDigit(s.charAt(0)), () -> randomAlphaOfLength(5));
AutoscalingPolicy policy = new AutoscalingPolicy(
randomAlphaOfLength(8),
randomRoles(),
new TreeMap<>(
Map.of(
FixedAutoscalingDeciderService.NAME,
Settings.builder().put(FixedAutoscalingDeciderService.STORAGE.getKey(), value).build()
)
)
);
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> service.validate(policy));
assertThat(exception.getMessage(), containsString("[" + value + "]"));
assertThat(exception.getMessage(), containsString("[" + FixedAutoscalingDeciderService.STORAGE.getKey() + "]"));
}
}
| 51.928571 | 140 | 0.662846 |
a843eecff43ae2ee9f2d58608ab842b9f0add0ec | 6,152 | package gov.healthit.chpl.auth.dao.impl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.util.Set;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.TestExecutionListeners;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.support.DependencyInjectionTestExecutionListener;
import org.springframework.test.context.support.DirtiesContextTestExecutionListener;
import org.springframework.test.context.transaction.TransactionalTestExecutionListener;
import com.github.springtestdbunit.DbUnitTestExecutionListener;
import com.github.springtestdbunit.annotation.DatabaseSetup;
import gov.healthit.chpl.auth.dao.UserDAO;
import gov.healthit.chpl.auth.dao.UserPermissionDAO;
import gov.healthit.chpl.auth.dto.UserDTO;
import gov.healthit.chpl.auth.dto.UserPermissionDTO;
import gov.healthit.chpl.auth.permission.GrantedPermission;
import gov.healthit.chpl.auth.permission.UserPermissionRetrievalException;
import gov.healthit.chpl.auth.user.JWTAuthenticatedUser;
import gov.healthit.chpl.auth.user.UserCreationException;
import gov.healthit.chpl.auth.user.UserRetrievalException;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(classes = { gov.healthit.chpl.auth.CHPLAuthenticationSecurityTestConfig.class })
@TestExecutionListeners({ DependencyInjectionTestExecutionListener.class, DirtiesContextTestExecutionListener.class,
TransactionalTestExecutionListener.class, DbUnitTestExecutionListener.class })
@DatabaseSetup("classpath:data/testData.xml")
public class UserDaoTest {
@Autowired
private UserDAO dao;
@Autowired
private UserPermissionDAO permDao;
@Autowired
private BCryptPasswordEncoder bCryptPasswordEncoder;
private static final String ROLE_ACB = "ROLE_ACB";
private static JWTAuthenticatedUser authUser;
@BeforeClass
public static void setUpClass() throws Exception {
authUser = new JWTAuthenticatedUser();
authUser.setFullName("Administrator");
authUser.setId(-2L);
authUser.setFriendlyName("Administrator");
authUser.setSubjectName("admin");
authUser.getPermissions().add(new GrantedPermission("ROLE_ADMIN"));
SecurityContextHolder.getContext().setAuthentication(authUser);
}
@Test(expected = UserRetrievalException.class)
public void testCreateAndDeleteUser() throws UserCreationException, UserRetrievalException {
String password = "password";
String encryptedPassword = bCryptPasswordEncoder.encode(password);
UserDTO testUser = new UserDTO();
testUser.setAccountEnabled(true);
testUser.setAccountExpired(false);
testUser.setAccountLocked(false);
testUser.setCredentialsExpired(false);
testUser.setEmail("[email protected]");
testUser.setFullName("Katy");
testUser.setFriendlyName("Ekey-Test");
testUser.setPhoneNumber("443-745-0987");
testUser.setSubjectName("testUser");
testUser.setTitle("Developer");
testUser = dao.create(testUser, encryptedPassword);
assertNotNull(testUser.getId());
assertEquals("testUser", testUser.getSubjectName());
Long insertedUserId = testUser.getId();
dao.delete(insertedUserId);
dao.getById(insertedUserId);
}
@Test
public void testAddAcbAdminPermission() throws UserRetrievalException, UserPermissionRetrievalException {
UserDTO toEdit = dao.getByName("TESTUSER");
assertNotNull(toEdit);
dao.removePermission(toEdit.getSubjectName(), ROLE_ACB);
dao.addPermission(toEdit.getSubjectName(), ROLE_ACB);
Set<UserPermissionDTO> permissions = permDao.findPermissionsForUser(toEdit.getId());
assertNotNull(permissions);
boolean hasAcbStaffRole = false;
for (UserPermissionDTO perm : permissions) {
if (ROLE_ACB.equals(perm.toString())) {
hasAcbStaffRole = true;
}
}
assertTrue(hasAcbStaffRole);
}
@Test
public void testAddInvalidPermission() throws UserRetrievalException, UserPermissionRetrievalException {
UserDTO toEdit = dao.getByName("TESTUSER");
assertNotNull(toEdit);
boolean caught = false;
try {
dao.addPermission(toEdit.getSubjectName(), "BOGUS");
} catch (UserPermissionRetrievalException ex) {
caught = true;
}
assertTrue(caught);
}
@Test
public void testFindUser() {
UserDTO toFind = new UserDTO();
toFind.setSubjectName("TESTUSER");
toFind.setFullName("TEST");
toFind.setFriendlyName("USER");
toFind.setEmail("[email protected]");
toFind.setPhoneNumber("(301) 560-6999");
toFind.setTitle("employee");
UserDTO found = dao.findUser(toFind);
assertNotNull(found);
assertNotNull(found.getId());
assertEquals(1, found.getId().longValue());
}
/**
* Given the DAO is called
* When the passed in user id has been deleted
* Then null is returned
*
* @throws UserRetrievalException
*/
@Test(expected = UserRetrievalException.class)
public void testGetById_returnsNullForDeletedUser() throws UserRetrievalException {
dao.getById(-3L);
}
/**
* Given the DAO is called
* When the passed in user id is valid/active
* Then a result is returned
*
* @throws UserRetrievalException
*/
@Test
public void testGetById_returnsResultForActiveUser() throws UserRetrievalException {
UserDTO userDto = null;
userDto = dao.getById(-2L);
assertTrue(userDto != null);
}
}
| 36.838323 | 116 | 0.725293 |
9f9c7a614ab4ea1ef2fc08cc500329acc8f74888 | 1,293 | package com.yunussen.mobilappws;
import org.modelmapper.ModelMapper;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.web.servlet.support.SpringBootServletInitializer;
import org.springframework.context.annotation.Bean;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import com.yunussen.mobilappws.security.AppProperties;
@SpringBootApplication
public class MobilAppWsApplication extends SpringBootServletInitializer {
@Override
protected SpringApplicationBuilder configure(SpringApplicationBuilder application) {
return application.sources(MobilAppWsApplication.class);
}
public static void main(String[] args) {
SpringApplication.run(MobilAppWsApplication.class, args);
}
@Bean
public BCryptPasswordEncoder bCryptPasswordEncoder() {
return new BCryptPasswordEncoder();
}
@Bean
public SpringApplicationContext springApplicationContext() {
return new SpringApplicationContext();
}
@Bean(name = "AppProperties")
public AppProperties getAppProperties() {
return new AppProperties();
}
@Bean
public ModelMapper getModelMapper() {
return new ModelMapper();
}
}
| 29.386364 | 85 | 0.823666 |
b67c6e8834c3dabea24283dbf1b259d0a60b7b9b | 895 | package implementation;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
public class Boj2566 {
public static final String NEW_LINE = "\n";
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringBuilder sb = new StringBuilder();
int max = 0, num = 0;
int[][] RC = new int[9][9];
for(int i = 0; i < 9; i++){
StringTokenizer st = new StringTokenizer(br.readLine(), " ");
for(int j = 0; j < 9; j++){
num = Integer.parseInt(st.nextToken());
RC[i][j] = num;
max = Math.max(num, max);
}
}
LOOP: for(int i = 0; i < 9; i++){
for(int j = 0; j < 9; j++){
if(max == RC[i][j]){
sb.append(max).append(NEW_LINE).append((i+1)+" "+(j+1));
break LOOP;
}
}
}
System.out.println(sb.toString());
}
}
| 23.552632 | 75 | 0.603352 |
c6a0e3803490cbad1a594f64d7a9b4622cfaecbf | 2,113 | package com.example.controller;
import com.example.entity.User;
import com.example.service.UserService;
import com.example.service.impl.UserServiceImpl;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
/**
* @author 王鸿轩
* @description
* @since 2021/5/11 19:10
*/
public class LoginServlet extends HttpServlet {
UserService userService = new UserServiceImpl();
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
this.doGet(request, response);
}
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// 获取请求数据
String name = request.getParameter("name");
String pwd = request.getParameter("pwd");
// 封装对象
User user = new User(name, pwd);
// 调用 service 进行逻辑处理
User u = userService.checkUser(user);
System.out.println(u);
if (u != null) {
// response.getWriter().write("success");
// request.getRequestDispatcher("hello").forward(request, response);
// 此时如果采取请求转发的方式,用户如果频繁刷新,每次都会经过多个servlet的流转
// 然而在用户登录成功的情况下,其实刷新欢迎页面就可以了
// 重定向
// 1.浏览器发送两次请求
// 2.浏览器的地址发生变化
// 3.请求过程产生两个request对象和两个response对象
// 4.两个servlet不能共享同一个request和response对象
response.sendRedirect("hello");
}
else {
request.setCharacterEncoding("utf-8");
// 设置多个servlet的共享参数
request.setAttribute("str", "用户名或者密码错误");
// 请求转发
// 请求servlet的时候,写相对路径,同时后续不需要逻辑代码的处理
// 1.客户端只发送一次请求
// 2.浏览器的地址栏地址没有变化
// 3.请求过程中只有一个request和response
// 4.几个servlet共享一个request和response
// 5.对客户端透明,客户端不需要知道用户做了哪些操作
request.getRequestDispatcher("page").forward(request, response);
return;
}
}
}
| 32.015152 | 122 | 0.640795 |
a0ab83d87e45805b3daf1453cac1dd9c481c6e88 | 845 | package fi.nls.oskari.domain;
public class SelectItem implements Comparable<SelectItem> {
private String name = "";
private String value = "";
public int compareTo(SelectItem si2) {
if(si2 == null) {
return 0;
}
return this.getName().compareTo(si2.getName());
}
public SelectItem(String name, String value) {
super();
this.name = name;
this.value = value;
}
public String getName(boolean actual) {
if(actual) {
return name;
}
return getName();
}
public String getName() {
if(name == null || name.isEmpty()) {
return getValue();
}
return name;
}
public void setName(String name) {
this.name = name;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}
| 18.777778 | 59 | 0.592899 |
97adb77d55344f4af93092aaecc10e6fb6d5e856 | 257 | package com.bardlind.pattern.cor;
/**
* Created by baardl on 07.12.15.
*/
public interface EmailHandler {
//reference to the next handler in the chain
void setNext(EmailHandler handler);
//handle request
void handleRequest(Email email);
} | 23.363636 | 48 | 0.712062 |
8001e5466e9c0d0b5d4633aca103115a54a4c432 | 377 | package io.onedev.server.web.page.project.setting.webhook;
import io.onedev.server.web.page.base.BaseDependentCssResourceReference;
public class WebHookCssResourceReference extends BaseDependentCssResourceReference {
private static final long serialVersionUID = 1L;
public WebHookCssResourceReference() {
super(WebHookCssResourceReference.class, "web-hook.css");
}
}
| 26.928571 | 84 | 0.824934 |
eba72d4ab987b7f0112c8b52174b51cdd06e8f25 | 476 |
package org.openntf.domino.extmgr.events;
import org.openntf.domino.extmgr.EMBridgeEventParams;
public class AgentOpenEvent extends AbstractDocumentEvent {
private static EMBridgeEventParams[] params = { EMBridgeEventParams.SourceDbpath, EMBridgeEventParams.Noteid,
EMBridgeEventParams.Username };
@Override
public EMBridgeEventParams[] getParams() {
return params;
}
public AgentOpenEvent() {
super(EMEventIds.EM_AGENTOPEN.getId());
}
}
| 23.8 | 111 | 0.760504 |
ad1ed32a69e4b9ff9420b2ac504e9266b04e4eb5 | 224 | /**
* Simple demo domain, no JPA, just hardcoded users in {@link com.github.virgo47.respsec.main.domain.UserDao}.
* Point is - this is not poluted by any security at all.
*/
package com.github.virgo47.respsec.main.domain; | 44.8 | 110 | 0.741071 |
257fbcf4e9259512e156d0e41b2cf31d96629280 | 46,620 | package org.snpeff;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.snpeff.interval.Chromosome;
import org.snpeff.interval.Custom;
import org.snpeff.interval.Gene;
import org.snpeff.interval.Genome;
import org.snpeff.interval.Marker;
import org.snpeff.interval.Markers;
import org.snpeff.interval.Motif;
import org.snpeff.interval.NextProt;
import org.snpeff.interval.ProteinInteractionLocus;
import org.snpeff.interval.SpliceSite;
import org.snpeff.interval.Transcript;
import org.snpeff.interval.TranscriptSupportLevel;
import org.snpeff.logStatsServer.LogStats;
import org.snpeff.logStatsServer.VersionCheck;
import org.snpeff.motif.Jaspar;
import org.snpeff.motif.Pwm;
import org.snpeff.pdb.DistanceResult;
import org.snpeff.serializer.MarkerSerializer;
import org.snpeff.snpEffect.Config;
import org.snpeff.snpEffect.ErrorWarningType;
import org.snpeff.snpEffect.SnpEffectPredictor;
import org.snpeff.snpEffect.commandLine.CommandLine;
import org.snpeff.snpEffect.commandLine.SnpEffCmdAcat;
import org.snpeff.snpEffect.commandLine.SnpEffCmdBuild;
import org.snpeff.snpEffect.commandLine.SnpEffCmdBuildNextProt;
import org.snpeff.snpEffect.commandLine.SnpEffCmdCds;
import org.snpeff.snpEffect.commandLine.SnpEffCmdClosest;
import org.snpeff.snpEffect.commandLine.SnpEffCmdCount;
import org.snpeff.snpEffect.commandLine.SnpEffCmdDatabases;
import org.snpeff.snpEffect.commandLine.SnpEffCmdDownload;
import org.snpeff.snpEffect.commandLine.SnpEffCmdDump;
import org.snpeff.snpEffect.commandLine.SnpEffCmdEff;
import org.snpeff.snpEffect.commandLine.SnpEffCmdGenes2Bed;
import org.snpeff.snpEffect.commandLine.SnpEffCmdGsa;
import org.snpeff.snpEffect.commandLine.SnpEffCmdLen;
import org.snpeff.snpEffect.commandLine.SnpEffCmdPdb;
import org.snpeff.snpEffect.commandLine.SnpEffCmdProtein;
import org.snpeff.snpEffect.commandLine.SnpEffCmdSeq;
import org.snpeff.snpEffect.commandLine.SnpEffCmdShow;
import org.snpeff.snpEffect.commandLine.SnpEffCmdTranslocationsReport;
import org.snpeff.spliceSites.SnpEffCmdSpliceAnalysis;
import org.snpeff.util.Gpr;
import org.snpeff.util.Log;
/**
* SnpEff's main command line program
*
* @author pcingola
*/
public class SnpEff implements CommandLine {
/**
* Available gene database formats
*/
public enum GeneDatabaseFormat {
// BED // http://genome.ucsc.edu/FAQ/FAQformat.html#format1
BIOMART //
, GFF3 // Discouraged GFF 3 format (http://www.sequenceontology.org/gff3.shtml)
, GFF2 // Obsolete GFF 2 format
, GTF22 // GTF 2.2 format (http://mblab.wustl.edu/GTF22.html)
, REFSEQ // UCSC's format using RefSeq
, KNOWN_GENES // UCSC's format using KnownGenes
, GENBANK // GeneBank file format
, EMBL // EMBL file format
}
/**
* Available input formats
*/
public enum InputFormat {
// TXT, PILEUP,
VCF, BED
}
/**
* Available output formats
*/
public enum OutputFormat {
VCF, BED, BEDANN, GATK
}
public static final String DEFAULT_COMMAND = "ann";
public static final int COMMAND_LINE_WIDTH = 40;
// Version info
public static final String SOFTWARE_NAME = "SnpEff";
public static final String REVISION = "f";
public static final String BUILD = Gpr.compileTimeStamp(SnpEff.class);
public static final String BUILD_DATE = Gpr.compileDate(SnpEff.class);
public static final String VERSION_MAJOR = "5.0";
public static final String VERSION_SHORT = VERSION_MAJOR + REVISION;
public static final String VERSION_BUILD = VERSION_SHORT + " (build " + BUILD + ")";
public static final String VERSION_AUTHOR = VERSION_BUILD + ", by " + Pcingola.BY;
public static final String VERSION = SOFTWARE_NAME + " " + VERSION_AUTHOR;
protected String command = "";
protected String[] args; // Arguments used to invoke this command
protected String[] shiftArgs;
protected boolean canonical = false; // Use only canonical transcripts
protected boolean debug; // Debug mode
protected boolean download = true; // Download genome, if not available
protected boolean expandIub = true; // Expand IUB codes
protected boolean help; // Show command help and exit
protected boolean hgvs = true; // Use Hgvs notation
protected boolean hgvsForce = false; // Use Hgvs notation even in classic mode?
protected boolean hgvsOneLetterAa = false; // Use 1-letter AA codes in HGVS.p notation?
protected boolean hgvsOld = false; // Old notation style notation: E.g. 'c.G123T' instead of 'c.123G>T' and 'X' instead of '*'
protected boolean hgvsShift = true; // Shift variants towards the 3-prime end of the transcript
protected boolean hgvsTrId = false; // Use full transcript version in HGVS notation?
protected boolean interaction = true; // Use interaction loci information if available
protected boolean log; // Log to server (statistics)
protected boolean motif = true; // Annotate using motifs
protected boolean multiThreaded = false; // Use multiple threads
protected boolean nextProt = true; // Annotate using NextProt database
protected boolean nextProtKeepAllTrs = false; // Keep all nextprot entries, even if the transcript doesn't exist
protected boolean noGenome = false; // Do not load genome database
protected boolean onlyProtein = false; // Only use protein coding transcripts
protected boolean onlyRegulation = false; // Only build regulation tracks
protected boolean quiet; // Be quiet
protected boolean strict = false; // Only use transcript that have been validated
protected boolean saveOutput = false; // Save output to buffer (instead of printing it to STDOUT)
protected boolean suppressOutput = false; // Only used for debugging purposes
protected boolean verbose; // Be verbose
protected Boolean treatAllAsProteinCoding = null; // Only use coding genes. Default is 'null' which means 'auto'
protected int numWorkers = Gpr.NUM_CORES; // Max number of threads (if multi-threaded version is available)
protected int spliceSiteSize = SpliceSite.CORE_SPLICE_SITE_SIZE; // Splice site size default: 2 bases (canonical splice site)
protected int spliceRegionExonSize = SpliceSite.SPLICE_REGION_EXON_SIZE;
protected int spliceRegionIntronMin = SpliceSite.SPLICE_REGION_INTRON_MIN;
protected int spliceRegionIntronMax = SpliceSite.SPLICE_REGION_INTRON_MAX;
protected int upDownStreamLength = SnpEffectPredictor.DEFAULT_UP_DOWN_LENGTH; // Upstream & downstream interval length
protected String configFile; // Config file
protected String dataDir; // Override data_dir in config file
protected String genomeVer; // Genome version
protected String onlyTranscriptsFile = null; // Only use the transcripts in this file (Format: One transcript ID per line)
protected String canonicalFile = null; // Use cannonical transcripts changing the ones that are present in the file.
protected TranscriptSupportLevel maxTranscriptSupportLevel = null; // Filter by maximum Transcript Support Level (TSL)
protected StringBuilder output = new StringBuilder();
protected Config config; // Configuration
protected Genome genome;
protected SnpEff snpEffCmd; // Real command to run
protected ArrayList<String> customIntervalFiles; // Custom interval files (bed)
protected ArrayList<String> filterIntervalFiles;// Files used for filter intervals
protected HashSet<String> regulationTracks = new HashSet<>();
protected Map<String, String> configOverride = new HashMap<>();
/**
* Main
*/
public static void main(String[] args) {
SnpEff snpEff = new SnpEff(args);
boolean ok = snpEff.run();
System.exit(ok ? 0 : -1);
}
public SnpEff() {
genomeVer = ""; // Genome version
configFile = Config.DEFAULT_CONFIG_FILE; // Config file
verbose = false; // Be verbose
debug = false; // Debug mode
quiet = false; // Be quiet
log = true; // Log to server (statistics)
multiThreaded = false; // Use multiple threads
customIntervalFiles = new ArrayList<>(); // Custom interval files
}
public SnpEff(String[] args) {
this();
this.args = args;
}
public void addRegulationTrack(String cellType) {
regulationTracks.add(cellType);
}
/**
* Filter canonical transcripts
*/
protected void canonical() {
if (verbose) Log.info("Filtering out non-canonical transcripts.");
config.getSnpEffectPredictor().removeNonCanonical(canonicalFile);
if (verbose) {
// Show genes and transcript (which ones are considered 'canonical')
Log.info("Canonical transcripts:\n\t\tgeneName\tgeneId\ttranscriptId\tcdsLength");
for (Gene g : config.getSnpEffectPredictor().getGenome().getGenes()) {
for (Transcript t : g) {
String cds = t.cds();
int cdsLen = (cds != null ? cds.length() : 0);
System.err.println("\t\t" + g.getGeneName() + "\t" + g.getId() + "\t" + t.getId() + "\t" + cdsLen);
}
}
}
if (verbose) Log.info("done.");
}
/**
* Check if there is a new version of the program
*/
void checkNewVersion(Config config) {
// Download command checks for versions, no need to do it twice
if ((config != null) && !command.equalsIgnoreCase("download")) {
// Check if a new version is available
VersionCheck versionCheck = VersionCheck.version(SnpEff.SOFTWARE_NAME, SnpEff.VERSION_SHORT, config.getVersionsUrl(), verbose);
if (!quiet && versionCheck.isNewVersion()) {
System.err.println("\n\nNEW VERSION!\n\tThere is a new " + this.getClass().getSimpleName() + " version available: " //
+ "\n\t\tVersion : " + versionCheck.getLatestVersion() //
+ "\n\t\tRelease date : " + versionCheck.getLatestReleaseDate() //
+ "\n\t\tDownload URL : " + versionCheck.getLatestUrl() //
+ "\n" //
);
}
}
}
/**
* Create an appropriate SnpEffCmd* object
*/
public SnpEff cmd() {
// Parse command line arguments (generic and database specific arguments)
parseArgs(args);
// Create a new command
SnpEff cmd = cmdFactory(command);
// Copy values to specific command
copyValues(cmd);
// Help requested?
if (help) {
cmd.usage(null); // Show help message and exit
return null;
}
// Parse command specific arguments
cmd.parseArgs(shiftArgs);
return cmd;
}
SnpEff cmdFactory(String command) {
// All commands are lower-case
switch (command.trim().toLowerCase()) {
case "ann":
case "eff":
return new SnpEffCmdEff();
case "build":
return new SnpEffCmdBuild();
case "buildnextprot":
return new SnpEffCmdBuildNextProt();
case "cds":
return new SnpEffCmdCds();
case "closest":
return new SnpEffCmdClosest();
case "count":
return new SnpEffCmdCount();
case "databases":
return new SnpEffCmdDatabases();
case "download":
return new SnpEffCmdDownload();
case "dump":
return new SnpEffCmdDump();
case "gsa":
return new SnpEffCmdGsa();
case "genes2bed":
return new SnpEffCmdGenes2Bed();
case "len":
return new SnpEffCmdLen();
case "pdb":
return new SnpEffCmdPdb();
case "protein":
return new SnpEffCmdProtein();
case "seq":
return new SnpEffCmdSeq();
case "show":
return new SnpEffCmdShow();
case "translocreport":
return new SnpEffCmdTranslocationsReport();
// Obsolete stuff
case "spliceanalysis":
return new SnpEffCmdSpliceAnalysis();
case "acat":
return new SnpEffCmdAcat();
default:
throw new RuntimeException("Unknown command '" + command + "'");
}
}
/**
* Command line argument list (try to fit it into COMMAND_LINE_WIDTH)
*/
protected String commandLineStr(boolean splitLines) {
if (args == null) return "";
StringBuilder argsList = new StringBuilder();
argsList.append("SnpEff " + command + " ");
int size = argsList.length();
for (String arg : args) {
argsList.append(arg.trim());
size += arg.length();
if (splitLines && (size > COMMAND_LINE_WIDTH)) {
argsList.append(" \n");
size = 0;
} else {
argsList.append(" ");
size++;
}
}
return argsList.toString();
}
/**
* Copy values to a new command
*/
void copyValues(SnpEff cmd) {
cmd.canonical = canonical;
cmd.canonicalFile = canonicalFile;
cmd.configFile = configFile;
cmd.customIntervalFiles = customIntervalFiles;
cmd.dataDir = dataDir;
cmd.debug = debug;
cmd.download = download;
cmd.expandIub = expandIub;
cmd.filterIntervalFiles = filterIntervalFiles;
cmd.genomeVer = genomeVer;
cmd.help = help;
cmd.hgvs = hgvs;
cmd.hgvsForce = hgvsForce;
cmd.hgvsOld = hgvsOld;
cmd.hgvsOneLetterAa = hgvsOneLetterAa;
cmd.hgvsShift = hgvsShift;
cmd.hgvsTrId = hgvsTrId;
cmd.interaction = interaction;
cmd.log = log;
cmd.motif = motif;
cmd.maxTranscriptSupportLevel = maxTranscriptSupportLevel;
cmd.multiThreaded = multiThreaded;
cmd.nextProt = nextProt;
cmd.noGenome = noGenome;
cmd.numWorkers = numWorkers;
cmd.onlyProtein = onlyProtein;
cmd.onlyRegulation = onlyRegulation;
cmd.onlyTranscriptsFile = onlyTranscriptsFile;
cmd.quiet = quiet;
cmd.regulationTracks = regulationTracks;
cmd.spliceSiteSize = spliceSiteSize;
cmd.spliceRegionExonSize = spliceRegionExonSize;
cmd.spliceRegionIntronMax = spliceRegionIntronMax;
cmd.spliceRegionIntronMin = spliceRegionIntronMin;
cmd.strict = strict;
cmd.suppressOutput = suppressOutput;
cmd.treatAllAsProteinCoding = treatAllAsProteinCoding;
cmd.upDownStreamLength = upDownStreamLength;
cmd.verbose = verbose;
cmd.configOverride = configOverride;
}
@Override
public String[] getArgs() {
return args;
}
public Config getConfig() {
return config;
}
public String getConfigFile() {
return configFile;
}
public String getOutput() {
return output.toString();
}
/**
* Is this a command line option (e.g. "-tfam" is a command line option, but "-"
* means STDIN)
*/
protected boolean isOpt(String arg) {
return arg.startsWith("-") && (arg.length() > 1);
}
public void load() {
loadConfig(); // Read config file
loadDb(); // Load database
}
/**
* Read config file
*/
protected void loadConfig() {
if (config == null) {
// Read config file
if (verbose) //
Log.info("Reading configuration file '" + configFile + "'" //
+ ((genomeVer != null) && (!genomeVer.isEmpty()) ? ". Genome: '" + genomeVer + "'" : "") //
);
config = new Config(genomeVer, configFile, dataDir, configOverride, verbose); // Read configuration
if (verbose) Log.info("done");
}
// Command line options overriding configuration file
config.setUseHgvs(hgvs);
config.setHgvsOld(hgvsOld);
config.setHgvsOneLetterAA(hgvsOneLetterAa);
config.setHgvsShift(hgvsShift);
config.setHgvsTrId(hgvsTrId);
config.setExpandIub(expandIub);
// Verbose & debug
config.setDebug(debug);
config.setVerbose(verbose);
config.setQuiet(quiet);
}
/**
* Read a custom interval file
*/
protected int loadCustomFile(String fileName) {
Markers markers = loadMarkers(fileName);
// Add all markers to predictor
for (Marker m : markers)
config.getSnpEffectPredictor().add(m);
// Number added
return markers.size();
}
/**
* Load database
*/
public void loadDb() {
if (config.getSnpEffectPredictor() != null) {
genome = config.getSnpEffectPredictor().getGenome();
return; // Already loaded
}
// Read database (or create a new one)
if (noGenome) {
if (verbose) Log.info("Creating empty database (no genome).");
SnpEffectPredictor snpEffectPredictor = new SnpEffectPredictor(new Genome());
config.setSnpEffectPredictor(snpEffectPredictor);
config.setErrorOnMissingChromo(false); // All chromosome will be missing (no genome)
config.setErrorChromoHit(false); // We don't have chromosomes, so we de-activate this error.
} else if (onlyRegulation) {
// Create predictor
config.setSnpEffectPredictor(new SnpEffectPredictor(config.getGenome()));
config.setOnlyRegulation(true);
config.setErrorOnMissingChromo(false); // A chromosome might be missing (e.g. no regulation tracks available for 'MT')
config.setErrorChromoHit(false); // A chromosome's length might be smaller than the real (it's calculated using regulation features, not real chromo data)
} else {
// Read
if (verbose) Log.info("Reading database for genome version '" + genomeVer + "' from file '" + config.getFileSnpEffectPredictor() + "' (this might take a while)");
// Try to download database if it doesn't exists?
if (download && !Gpr.canRead(config.getFileSnpEffectPredictor())) {
if (verbose) Log.info("Database not installed\n\tAttempting to download and install database '" + genomeVer + "'");
// Run download command
String downloadArgs[] = { genomeVer };
SnpEffCmdDownload snpEffCmdDownload = new SnpEffCmdDownload();
boolean ok = run(snpEffCmdDownload, downloadArgs, null);
if (!ok) throw new RuntimeException("Genome download failed!");
else if (verbose) Log.info("Database installed.");
}
config.loadSnpEffectPredictor(); // Read snpEffect predictor
genome = config.getSnpEffectPredictor().getGenome();
if (verbose) Log.info("done");
}
// Set 'treatAllAsProteinCoding'
if (treatAllAsProteinCoding != null) config.setTreatAllAsProteinCoding(treatAllAsProteinCoding);
else {
// treatAllAsProteinCoding was set to 'auto'
// I.e.: Use 'true' if there is protein coding info, otherwise use false.
boolean tapc = !config.getGenome().hasCodingInfo();
if (debug) Log.debug("Setting '-treatAllAsProteinCoding' to '" + tapc + "'");
config.setTreatAllAsProteinCoding(tapc);
}
// Read custom interval files
for (String intFile : customIntervalFiles) {
if (verbose) Log.info("Reading interval file '" + intFile + "'");
int count = loadCustomFile(intFile);
if (verbose) Log.info("done (" + count + " intervals loaded). ");
}
// Read regulation tracks
for (String regTrack : regulationTracks)
loadRegulationTrack(regTrack);
// Set upstream-downstream interval length
config.getSnpEffectPredictor().setUpDownStreamLength(upDownStreamLength);
// Set splice site/region sizes
config.getSnpEffectPredictor().setSpliceSiteSize(spliceSiteSize);
config.getSnpEffectPredictor().setSpliceRegionExonSize(spliceRegionExonSize);
config.getSnpEffectPredictor().setSpliceRegionIntronMin(spliceRegionIntronMin);
config.getSnpEffectPredictor().setSpliceRegionIntronMax(spliceRegionIntronMax);
// Filter canonical transcripts
if (canonical || (canonicalFile != null && !canonicalFile.isEmpty())) canonical();
// Filter transcripts by TSL
if (maxTranscriptSupportLevel != null) {
if (verbose) Log.info("Filtering transcripts by Transcript Support Level (TSL): " + maxTranscriptSupportLevel);
config.getSnpEffectPredictor().filterTranscriptSupportLevel(maxTranscriptSupportLevel);
if (verbose) {
// Show genes and transcript (which ones are considered 'canonical')
Log.info("Transcript:\n\t\tgeneName\tgeneId\ttranscriptId\tTSL");
for (Gene g : config.getSnpEffectPredictor().getGenome().getGenes()) {
for (Transcript t : g)
System.err.println("\t\t" + g.getGeneName() + "\t" + g.getId() + "\t" + t.getId() + "\t" + t.getTranscriptSupportLevel());
}
}
if (verbose) Log.info("done.");
}
// Filter verified transcripts
if (strict) {
if (verbose) Log.info("Filtering out non-verified transcripts.");
if (config.getSnpEffectPredictor().removeUnverified()) {
Log.fatalError("All transcripts have been removed form every single gene!\nUsing strickt on this database leaves no information.");
}
if (verbose) Log.info("done.");
}
// Use transcripts set form input file
if (onlyTranscriptsFile != null) {
// Load file
String onlyTr = Gpr.readFile(onlyTranscriptsFile);
Set<String> trIds = new HashSet<>();
for (String trId : onlyTr.split("\n"))
trIds.add(trId.trim());
// Remove transcripts
if (verbose) Log.info("Filtering out transcripts in file '" + onlyTranscriptsFile + "'. Total " + trIds.size() + " transcript IDs.");
int removed = config.getSnpEffectPredictor().retainAllTranscripts(trIds);
int countTr = config.getSnpEffectPredictor().countTranscripts();
if (verbose) Log.info("Done: " + removed + " transcripts removed, " + countTr + " transcripts left.");
if (countTr <= 0) Log.fatalError("No transcripts left for analysis after filter using file '" + onlyTranscriptsFile + "'");
}
// Use protein coding transcripts
if (onlyProtein) {
// Remove transcripts
if (verbose) Log.info("Filtering out non-protein coding transcripts.");
int removed = config.getSnpEffectPredictor().keepTranscriptsProteinCoding();
if (verbose) Log.info("Done: " + removed + " transcripts removed.");
}
// Load NextProt database
if (nextProt) loadNextProt();
// Load Motif databases
if (motif) loadMotif();
// Load Motif databases
if (interaction) loadInteractions();
// Build tree
if (verbose) Log.info("Building interval forest");
config.getSnpEffectPredictor().buildForest();
if (verbose) Log.info("done.");
// Show some genome stats. Chromosome names are shown, a lot of people has
// problems with the correct chromosome names.
if (verbose) {
Log.info("Genome stats :");
Genome genome = config.getGenome();
// When in debug mode, try to show detailed errors
StringBuilder errors = debug ? new StringBuilder() : null;
System.err.println(genome.toString(errors));
if (errors != null && (errors.length() > 0)) System.err.println(errors);
}
genome = config.getSnpEffectPredictor().getGenome();
genome.getGenomicSequences().setVerbose(verbose);
}
/**
* Load protein interaction database
*/
void loadInteractions() {
// Sanity checks
String intFileName = config.getDirDataGenomeVersion() + "/" + SnpEffCmdPdb.PROTEIN_INTERACTION_FILE;
if (!Gpr.exists(intFileName)) {
if (debug) if (!Gpr.exists(intFileName)) Log.warning(ErrorWarningType.WARNING_FILE_NOT_FOUND, "Cannot open interactions file '" + intFileName + "'");
return;
}
// Build transcript map
HashMap<String, Transcript> id2tr = new HashMap<>();
SnpEffectPredictor sep = config.getSnpEffectPredictor();
Genome genome = sep.getGenome();
for (Gene g : genome.getGenes())
for (Transcript tr : g)
id2tr.put(tr.getId(), tr);
// Load all interactions
if (verbose) Log.info("Loading interactions from : " + intFileName);
String lines[] = Gpr.readFile(intFileName, true).split("\n");
int count = 0, countSkipped = 0;
for (String line : lines) {
DistanceResult dres = new DistanceResult(line);
Chromosome chr1 = genome.getChromosome(dres.chr1);
Chromosome chr2 = genome.getChromosome(dres.chr2);
Transcript tr1 = id2tr.get(dres.trId1);
Transcript tr2 = id2tr.get(dres.trId2);
String id = dres.getId();
// All chromosomes and transcript found? => Add entries
if (chr1 != null && chr2 != null && tr1 != null && tr2 != null) {
// Gene1
Gene gene1 = (Gene) tr1.getParent();
gene1.getId();
List<ProteinInteractionLocus> list = ProteinInteractionLocus.factory(tr1, dres.aaPos1, tr2, id);
for (Marker m : list)
gene1.addPerGene(m);
// Since they act on different transcript (or different AAs within the
// transcript), we
// need to add two markers (one for each "side" of the interaction
Gene gene2 = (Gene) tr2.getParent();
gene2.getId();
list = ProteinInteractionLocus.factory(tr2, dres.aaPos2, tr1, id);
for (Marker m : list)
gene2.addPerGene(m);
count++;
} else countSkipped++;
}
if (verbose) Log.info("\tInteractions: " + count + " added, " + countSkipped + " skipped.");
}
/**
* Read markers file Supported formats: BED, TXT, BigBed, GFF
*/
protected Markers loadMarkers(String fileName) {
Markers markersSeqChange = Markers.readMarkers(fileName);
String label = Gpr.removeExt(Gpr.baseName(fileName));
// Convert markers to 'Custom' markers
Markers markers = new Markers();
for (Marker m : markersSeqChange) {
if (m instanceof Custom) {
((Custom) m).setLabel(label);
markers.add(m);
} else {
// Not a custom interval? Create one
Custom custom = new Custom(m.getParent(), m.getStart(), m.getEnd(), false, m.getId(), label);
markers.add(custom);
}
}
// Number added
return markers;
}
/**
* Read regulation motif files
*/
void loadMotif() {
// Sanity checks
String pwmsFileName = config.getDirDataGenomeVersion() + "/pwms.bin";
String motifBinFileName = config.getBaseFileNameMotif() + ".bin";
if (!Gpr.exists(pwmsFileName) || !Gpr.exists(motifBinFileName)) {
if (verbose) Log.info("Loading Motifs and PWMs");
// OK, we don't have motif annotations, no problem
if (debug) {
if (!Gpr.exists(pwmsFileName)) Log.warning(ErrorWarningType.WARNING_FILE_NOT_FOUND, "Cannot open PWMs file '" + pwmsFileName + "'");
if (!Gpr.exists(motifBinFileName)) Log.warning(ErrorWarningType.WARNING_FILE_NOT_FOUND, "Cannot open Motifs file '" + motifBinFileName + "'");
}
return;
}
// Load all PWMs
if (verbose) Log.info("Loading PWMs from : " + pwmsFileName);
Jaspar jaspar = new Jaspar();
jaspar.load(pwmsFileName);
// Read motifs
if (verbose) Log.info("Loading Motifs from file '" + motifBinFileName + "'");
MarkerSerializer markerSerializer = new MarkerSerializer();
Markers motifsDb = markerSerializer.load(motifBinFileName);
// Add (only) motif markers. The original motifs has to be serialized with
// Chromosomes, Genomes and other markers (otherwise it could have not been
// saved)
SnpEffectPredictor snpEffectPredictor = config.getSnpEffectPredictor();
int countAddded = 0;
for (Marker m : motifsDb)
if (m instanceof Motif) {
Motif motif = (Motif) m;
// Connect motifs to their respective PWMs
Pwm pwm = jaspar.getPwm(motif.getPwmId());
if (pwm != null) {
// Set PWM and add to snpEffPredictor
motif.setPwm(pwm);
snpEffectPredictor.add(motif);
countAddded++;
} else if (debug) Log.debug("Cannot find PWM for motif '" + motif.getPwmId() + "'");
}
if (verbose) Log.info("\tMotif database: " + countAddded + " markers loaded.");
}
/**
* Read regulation track and update SnpEffectPredictor
*/
void loadNextProt() {
SnpEffectPredictor snpEffectPredictor = config.getSnpEffectPredictor();
// Read nextProt binary file
String nextProtBinFile = config.getDirDataGenomeVersion() + "/nextProt.bin";
if (!Gpr.canRead(nextProtBinFile)) {
if (debug) Log.debug("NextProt database '" + nextProtBinFile + "' doesn't exist. Ignoring.");
return;
}
if (verbose) Log.info("Reading NextProt database from file '" + nextProtBinFile + "'");
MarkerSerializer markerSerializer = new MarkerSerializer();
Markers nextProtDb = markerSerializer.load(nextProtBinFile);
// Create a collection of (only) NextProt markers. The original nextProtDb has
// Chromosomes, Genomes and other markers (otherwise it could have not been
// saved)
ArrayList<NextProt> nextProts = new ArrayList<>(nextProtDb.size());
for (Marker m : nextProtDb)
if (m instanceof NextProt) nextProts.add((NextProt) m);
if (verbose) Log.info("NextProt database: " + nextProts.size() + " markers loaded.");
// Connect nextProt annotations to transcripts and exons
if (verbose) Log.info("Adding transcript info to NextProt markers.");
// Create a list of all transcripts
HashMap<String, Transcript> trs = new HashMap<>();
for (Gene g : snpEffectPredictor.getGenome().getGenes())
for (Transcript tr : g)
trs.put(tr.getId(), tr);
// Add nextprot entries
if (nextProtKeepAllTrs) {
// Add all nextProt marker to predictor (even if the transcript doesn't exist)
// WARNING: This is not recommended
for (NextProt np : nextProts)
snpEffectPredictor.add(np);
} else {
// Find the corresponding transcript for each nextProt marker
// WARNING: The transcripts might be filtered out by the user
// (e.g. '-cannon' command line option or user defined
// sets). We only keep nextProt markers associated to found
// transcripts. All others are discarded (the user doesn't
// want that info).
ArrayList<NextProt> nextProtsToAdd = new ArrayList<>();
for (NextProt np : nextProts) {
Transcript tr = trs.get(np.getTranscriptId());
// Found transcript, now try to find an exon
if (tr != null) {
np.setParent(tr); // Set this transcript as parent
nextProtsToAdd.add(np);
}
}
// Add all nextProt marker to predictor
for (NextProt np : nextProtsToAdd)
snpEffectPredictor.add(np);
// Note: We might end up with more markers than we loaded (just because they map
// to multiple exons (although it would be highly unusual)
if (verbose) Log.info("NextProt database: " + nextProtsToAdd.size() + " markers added.");
}
}
/**
* Read regulation track and update SnpEffectPredictor
*/
void loadRegulationTrack(String regTrack) {
// Read file
if (verbose) Log.info("Reading regulation track '" + regTrack + "'");
String regFile = config.getDirDataGenomeVersion() + "/regulation_" + regTrack + ".bin";
Markers regulation = new Markers();
regulation.load(regFile);
// Are all chromosomes available?
HashMap<String, Integer> chrs = new HashMap<>();
for (Marker r : regulation) {
String chr = r.getChromosomeName();
int max = chrs.containsKey(chr) ? chrs.get(chr) : 0;
max = Math.max(max, r.getEnd());
chrs.put(chr, max);
}
// Add all chromosomes
for (String chr : chrs.keySet())
if (genome.getChromosome(chr) == null) genome.add(new Chromosome(genome, 0, chrs.get(chr), chr));
// Add all markers to predictor
config.getSnpEffectPredictor().addAll(regulation);
}
/**
* Parse command line arguments
*/
@Override
public void parseArgs(String[] args) {
if (args == null) {
command = DEFAULT_COMMAND;
return;
}
if (args.length <= 0) usage(null);
int argNum = 0;
// Parse command
if (args[0].equalsIgnoreCase("ann") // Annotate: Same as 'eff'
|| args[0].equalsIgnoreCase("build") //
|| args[0].equalsIgnoreCase("buildNextProt") //
|| args[0].equalsIgnoreCase("cds") //
|| args[0].equalsIgnoreCase("closest") //
|| args[0].equalsIgnoreCase("count") //
|| args[0].equalsIgnoreCase("databases") //
|| args[0].equalsIgnoreCase("download") //
|| args[0].equalsIgnoreCase("dump") //
|| args[0].equalsIgnoreCase("eff") //
|| args[0].equalsIgnoreCase("genes2bed") //
|| args[0].equalsIgnoreCase("gsa") //
|| args[0].equalsIgnoreCase("len") //
|| args[0].equalsIgnoreCase("pdb") //
|| args[0].equalsIgnoreCase("protein") //
|| args[0].equalsIgnoreCase("seq") //
|| args[0].equalsIgnoreCase("show") //
|| args[0].equalsIgnoreCase("test") //
|| args[0].equalsIgnoreCase("translocreport") //
// Obsolete stuff (from T2D projects)
|| args[0].equalsIgnoreCase("acat") //
|| args[0].equalsIgnoreCase("spliceAnalysis") //
) {
command = args[argNum++].trim().toLowerCase();
}
// Copy and parse arguments, except initial 'command'
ArrayList<String> argsList = new ArrayList<>();
for (int i = argNum; i < args.length; i++) {
String arg = args[i];
// These options are available for allow all commands
// Is it a command line option?
if (isOpt(arg)) {
switch (arg.toLowerCase()) {
case "-c":
case "-config":
if ((i + 1) < args.length) configFile = args[++i];
else usage("Option '-c' without config file argument");
break;
case "-configoption":
if ((i + 1) < args.length) {
String nameValue = args[++i];
String nv[] = nameValue.split("=", 2);
if (nv.length > 0) configOverride.put(nv[0], nv[1]);
else usage("Cannot parse config option (expected format 'name=value'): " + nameValue);
} else usage("Option '-configOption' without argument");
break;
case "-canon":
canonical = true; // Use canonical transcripts
break;
case "-canonlist":
if ((i + 1) < args.length) canonicalFile = args[++i];
else usage("Option '-canonList' without file argument");
break;
case "-d":
case "-debug":
debug = verbose = true;
break;
case "-datadir":
if ((i + 1) < args.length) dataDir = args[++i];
else usage("Option '-dataDir' without data_dir argument");
break;
case "-download":
download = true; // Download genome if not locally available
break;
case "-h":
case "-help":
help = true;
if (command.isEmpty()) usage(null); // Help was invoked without a specific command: Show generic help
break;
case "-interaction":
interaction = true; // Use interaction database
break;
case "-hgvs":
hgvs = hgvsForce = true; // Use HGVS notation
break;
case "-hgvsold":
hgvsOld = true;
break;
case "-hgvs1letteraa":
case "-hgvsoneletteraa":
hgvsOneLetterAa = true;
break;
case "-hgvstrid":
hgvsTrId = true;
break;
case "-interval":
if ((i + 1) < args.length) customIntervalFiles.add(args[++i]);
else usage("Option '-interval' without config interval_file argument");
break;
case "-maxtsl":
if ((i + 1) < args.length) maxTranscriptSupportLevel = TranscriptSupportLevel.parse(args[++i]);
else usage("Option '-maxTSL' without config transcript_support_level argument");
break;
case "-motif":
motif = true; // Use motif database
break;
case "-noexpandiub":
expandIub = false; // Do not expand IUB codes
break;
case "-nogenome":
noGenome = true; // Do not load genome
break;
case "-nointeraction":
interaction = false; // Do not use interaction database
break;
case "-nomotif":
motif = false; // Disable use of motif database
break;
case "-nextprot":
nextProt = true; // Use NextProt database
break;
case "-nonextprot":
nextProt = false; // Disable use of NextProt database
break;
case "-nodownload":
download = false; // Do not download genome
break;
case "-nolog":
log = false;
break;
case "-noout":
// Undocumented option (only used for development & debugging)
suppressOutput = true;
break;
case "-onlyreg":
onlyRegulation = true;
break;
case "-onlyprotein":
onlyProtein = true;
break;
case "-onlytr":
if ((i + 1) < args.length) onlyTranscriptsFile = args[++i]; // Only use the transcripts in this file
else usage("Option '-onltTr' without file argument");
break;
case "-q":
case "-quiet":
quiet = true;
verbose = false;
break;
case "-reg":
if ((i + 1) < args.length) addRegulationTrack(args[++i]); // Add this track to the list
else usage("Option '-reg' without file argument");
break;
case "-ss":
case "-splicesitesize":
if ((i + 1) < args.length) spliceSiteSize = Gpr.parseIntSafe(args[++i]);
else usage("Option '-spliceSiteSize' without argument");
break;
case "-spliceregionexonsize":
if ((i + 1) < args.length) spliceRegionExonSize = Gpr.parseIntSafe(args[++i]);
else usage("Option '-spliceRegionExonSize' without argument");
break;
case "-spliceregionintronmin":
if ((i + 1) < args.length) spliceRegionIntronMin = Gpr.parseIntSafe(args[++i]);
else usage("Option '-spliceRegionIntronMin' without argument");
break;
case "-spliceregionintronmax":
if ((i + 1) < args.length) spliceRegionIntronMax = Gpr.parseIntSafe(args[++i]);
else usage("Option '-spliceRegionIntronMax' without argument");
break;
case "-strict":
strict = true;
break;
// case "-t":
// multiThreaded = true;
// break;
case "-treatallasproteincoding":
if ((i + 1) < args.length) {
i++;
if (args[i].equalsIgnoreCase("auto")) treatAllAsProteinCoding = null;
else treatAllAsProteinCoding = Gpr.parseBoolSafe(args[i]);
}
break;
case "-ud":
case "-updownstreamlen":
if ((i + 1) < args.length) upDownStreamLength = Gpr.parseIntSafe(args[++i]);
else usage("Option '-upDownstreamLen' without argument");
break;
case "-v":
case "-verbose":
verbose = true;
quiet = false;
break;
case "-version":
// Show version number and exit
System.out.println(SOFTWARE_NAME + "\t" + VERSION_SHORT + "\t" + BUILD_DATE);
System.exit(0);
break;
default:
// Unrecognized option? may be it's command specific. Let command parse it
argsList.add(arg);
}
} else {
// Command specific argument: Let command parse it
argsList.add(arg);
}
}
shiftArgs = argsList.toArray(new String[0]);
if (command.isEmpty()) command = DEFAULT_COMMAND; // Default command is 'ann'
// Show version and command
if (!help && (verbose || debug)) {
Log.info("SnpEff version " + VERSION);
Log.info("Command: '" + command + "'");
}
}
/**
* Print to screen or save to output buffer
*/
void print(Object o) {
if (saveOutput) output.append(o.toString() + "\n");
else if (!suppressOutput) System.out.println(o.toString());
}
/**
* Additional values to be reported
*/
public HashMap<String, String> reportValues() {
HashMap<String, String> reportValues = new HashMap<>();
return reportValues;
}
/**
* Run according to command line options
*/
@Override
public boolean run() {
SnpEff snpEffCmd = cmd();
if (snpEffCmd == null) return true;
// Run
boolean ok = false;
StringBuilder err = new StringBuilder();
try {
ok = snpEffCmd.run();
} catch (Throwable t) {
ok = false;
err.append(t.getMessage());
t.printStackTrace();
}
// Update config if needed
if (config == null) config = snpEffCmd.getConfig();
// Report to server (usage statistics)
if (log) {
// Log to server
LogStats.report(SOFTWARE_NAME, VERSION_BUILD, VERSION, ok, verbose, args, err.toString(), snpEffCmd.reportValues());
// Check for new version (use config file from command, since this one doesn't
// load a config file)
checkNewVersion(snpEffCmd.config);
}
if (verbose) Log.info("Done.");
return ok;
}
/**
* Run a SnpEff (usually a sub-class)
*/
protected boolean run(SnpEff snpEff, String args[], StringBuilder err) {
boolean ok = false;
try {
snpEff.verbose = verbose;
snpEff.help = help;
snpEff.debug = debug;
snpEff.quiet = quiet;
snpEff.configFile = configFile;
snpEff.dataDir = dataDir;
if (help) snpEff.usage(null); // Show help message and exit
else snpEff.parseArgs(args);
ok = snpEff.run();
} catch (Throwable t) {
if (err != null) err.append(t.getMessage());
t.printStackTrace();
}
return ok;
}
public void setCanonical(boolean canonical) {
this.canonical = canonical;
}
public void setConfig(Config config) {
this.config = config;
}
public void setConfigFile(String configFile) {
this.configFile = configFile;
}
public void setDebug(boolean debug) {
this.debug = debug;
}
public void setGenomeVer(String genomeVer) {
this.genomeVer = genomeVer;
}
public void setLog(boolean log) {
this.log = log;
}
public void setNextProt(boolean nextProt) {
this.nextProt = nextProt;
}
public void setNextProtKeepAllTrs(boolean nextProtKeepAllTrs) {
this.nextProtKeepAllTrs = nextProtKeepAllTrs;
}
public void setShiftHgvs(boolean shiftHgvs) {
hgvsShift = shiftHgvs;
}
public void setSpliceSiteSize(int spliceSiteSize) {
this.spliceSiteSize = spliceSiteSize;
}
public void setSupressOutput(boolean suppressOutput) {
this.suppressOutput = suppressOutput;
}
public void setUpDownStreamLength(int upDownStreamLength) {
this.upDownStreamLength = upDownStreamLength;
}
public void setVerbose(boolean verbose) {
this.verbose = verbose;
}
/**
* Show 'usage' message and exit with an error code '-1'
*/
@Override
public void usage(String message) {
if (message != null) System.err.println("Error: " + message + "\n");
System.err.println("SnpEff version " + VERSION);
System.err.println("Usage: snpEff [command] [options] [files]");
System.err.println("\nRun 'java -jar snpEff.jar command' for help on each specific command");
System.err.println("\nAvailable commands: ");
System.err.println("\t[eff|ann] : Annotate variants / calculate effects (you can use either 'ann' or 'eff', they mean the same). Default: ann (no command or 'ann').");
System.err.println("\tbuild : Build a SnpEff database.");
System.err.println("\tbuildNextProt : Build a SnpEff for NextProt (using NextProt's XML files).");
System.err.println("\tcds : Compare CDS sequences calculated form a SnpEff database to the one in a FASTA file. Used for checking databases correctness.");
System.err.println("\tclosest : Annotate the closest genomic region.");
System.err.println("\tcount : Count how many intervals (from a BAM, BED or VCF file) overlap with each genomic interval.");
System.err.println("\tdatabases : Show currently available databases (from local config file).");
System.err.println("\tdownload : Download a SnpEff database.");
System.err.println("\tdump : Dump to STDOUT a SnpEff database (mostly used for debugging).");
System.err.println("\tgenes2bed : Create a bed file from a genes list.");
System.err.println("\tlen : Calculate total genomic length for each marker type.");
System.err.println("\tpdb : Build interaction database (based on PDB data).");
System.err.println("\tprotein : Compare protein sequences calculated form a SnpEff database to the one in a FASTA file. Used for checking databases correctness.");
System.err.println("\tseq : Show sequence (from command line) translation.");
System.err.println("\tshow : Show a text representation of genes or transcripts coordiantes, DNA sequence and protein sequence.");
System.err.println("\ttranslocReport : Create a translocations report (from VCF file).");
// System.err.println("\tspliceAnalysis : Perform an analysis of splice sites.
// Experimental feature.");
usageGenericAndDb();
System.exit(-1);
}
/**
* Show database load and build options
*/
protected void usageDb() {
System.err.println("\nDatabase options:");
System.err.println("\t-canon : Only use canonical transcripts.");
System.err.println("\t-canonList <file> : Only use canonical transcripts, replace some transcripts using the 'gene_id \t transcript_id' entries in <file>.");
System.err.println("\t-interaction : Annotate using inteactions (requires interaciton database). Default: " + interaction);
System.err.println("\t-interval <file> : Use a custom intervals in TXT/BED/BigBed/VCF/GFF file (you may use this option many times)");
System.err.println("\t-maxTSL <TSL_number> : Only use transcripts having Transcript Support Level lower than <TSL_number>.");
System.err.println("\t-motif : Annotate using motifs (requires Motif database). Default: " + motif);
System.err.println("\t-nextProt : Annotate using NextProt (requires NextProt database).");
System.err.println("\t-noGenome : Do not load any genomic database (e.g. annotate using custom files).");
System.err.println("\t-noExpandIUB : Disable IUB code expansion in input variants");
System.err.println("\t-noInteraction : Disable inteaction annotations");
System.err.println("\t-noMotif : Disable motif annotations.");
System.err.println("\t-noNextProt : Disable NextProt annotations.");
System.err.println("\t-onlyReg : Only use regulation tracks.");
System.err.println("\t-onlyProtein : Only use protein coding transcripts. Default: " + onlyProtein);
System.err.println("\t-onlyTr <file.txt> : Only use the transcripts in this file. Format: One transcript ID per line.");
System.err.println("\t-reg <name> : Regulation track to use (this option can be used add several times).");
System.err.println("\t-ss , -spliceSiteSize <int> : Set size for splice sites (donor and acceptor) in bases. Default: " + spliceSiteSize);
System.err.println("\t-spliceRegionExonSize <int> : Set size for splice site region within exons. Default: " + spliceRegionExonSize + " bases");
System.err.println("\t-spliceRegionIntronMin <int> : Set minimum number of bases for splice site region within intron. Default: " + spliceRegionIntronMin + " bases");
System.err.println("\t-spliceRegionIntronMax <int> : Set maximum number of bases for splice site region within intron. Default: " + spliceRegionIntronMax + " bases");
System.err.println("\t-strict : Only use 'validated' transcripts (i.e. sequence has been checked). Default: " + strict);
System.err.println("\t-ud , -upDownStreamLen <int> : Set upstream downstream interval length (in bases)");
}
/**
* Show generic options
*/
protected void usageGeneric() {
System.err.println("\nGeneric options:");
System.err.println("\t-c , -config : Specify config file");
System.err.println("\t-configOption name=value : Override a config file option");
System.err.println("\t-d , -debug : Debug mode (very verbose).");
System.err.println("\t-dataDir <path> : Override data_dir parameter from config file.");
System.err.println("\t-download : Download a SnpEff database, if not available locally. Default: " + download);
System.err.println("\t-nodownload : Do not download a SnpEff database, if not available locally.");
System.err.println("\t-h , -help : Show this help and exit");
System.err.println("\t-noLog : Do not report usage statistics to server");
System.err.println("\t-q , -quiet : Quiet mode (do not show any messages or errors)");
// System.err.println("\t-t : Use multiple threads (implies '-noStats'). Default 'off'");
System.err.println("\t-v , -verbose : Verbose mode");
System.err.println("\t-version : Show version number and exit");
}
protected void usageGenericAndDb() {
usageGeneric();
usageDb();
}
}
| 35.105422 | 188 | 0.683569 |
fbec0898e1a1f66b8928e4ef449e8231d6040038 | 1,482 | package ru.tolstonogov.entities.file;
import java.util.Objects;
public class FileProperty {
private FileGroupProperties group;
private String propertyName;
private String description;
public FileProperty(FileGroupProperties group, String propertyName, String description) {
this.group = group;
this.propertyName = propertyName;
this.description = description;
}
public FileProperty(FileGroupProperties group, String propertyName) {
this.group = group;
this.propertyName = propertyName;
}
public FileGroupProperties getGroup() {
return group;
}
public void setGroup(FileGroupProperties group) {
this.group = group;
}
public String getPropertyName() {
return propertyName;
}
public void setPropertyName(String propertyName) {
this.propertyName = propertyName;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
FileProperty that = (FileProperty) o;
return group.equals(that.group) &&
propertyName.equals(that.propertyName);
}
@Override
public int hashCode() {
return Objects.hash(group, propertyName);
}
}
| 24.295082 | 93 | 0.649798 |
36a54edf7c0c9d74f8e3d8dc925fb8850ff150bb | 3,392 | /**
* Created by saazimi on 6/27/2018.
*/
public class List<T extends Comparable<T>> {
Node<T> first;
Node<T> last;
private int size;
Node recursivesearchvalue = null;
public List() {
first = null;
last = null;
}
public List(Node<T> first) {
this.first = first;
}
public List(T item) {
Node<T> node = new Node<T>(item);
first = node ;
last = node ;
node.setNext(null);
}
public void insertfirst(T item){
if (isEmpty()) {
this.first = new Node<>(item);
first.setNext(null);
last = first;
}
else {
Node<T> node = new Node<>(item);
node.setNext(first);
first = node ;
}
}
public void insertlast(T item){
if (isEmpty()) {
insertfirst(item);
}
else {
Node<T> node = new Node<>(item);
last.setNext(node);
last = node ;
}
}
public Node<T> getLastItem(){
if (isEmpty()) {
System.out.println("list Is Empty");
return null;
}
else if (first.getNext() == null) {
Node x = first;
first = null;
last = null;
return x;
}
else {
Node node = first;
while (node.getNext() != last)
node = node.getNext();
node.setNext(null);
Node z = last;
last = node;
return z;
}
}
public Node<TreeNode> getFirst() {
if (isEmpty()) return null;
else if (first.getNext() == null) {
Node pointer = first;
first = null;
return pointer;
}
else {
Node pointer = first;
first = first.getNext();
pointer.setNext(null);
return pointer;
}
}
private boolean isEmpty() {
if (this.first == null) return true;
else return false;
}
public void backwardPrint(){
if (isEmpty()) {
System.out.println("list Is Empty");
}
else {
while (!isEmpty())
System.out.println(getLastItem().getData());
}
}
public void recursivesearch(T item , Node<T> firrst){
if (firrst.getData().compareTo(item) == 0 ) recursivesearchvalue = firrst;
else {
if ( firrst.getNext() != null)
recursivesearch(item,firrst.getNext());
else return ;
}
}
public void recursivePrintlistbackward(Node listfirstitem){
if (listfirstitem.getNext() != null) {
recursivePrintlistbackward(listfirstitem.getNext());
}
else {
System.out.println(listfirstitem.getData());
return;
}
System.out.println(listfirstitem.getData());
}
public int getLength(){
int lengh = 1;
if (isEmpty()) return 0;
else {
while (first.getNext()!= null){
first = first.getNext();
lengh++;
}
}
return lengh;
}
}
| 23.887324 | 83 | 0.442512 |
6167706159563a6a8963958fee32c34b4e97785b | 3,637 | package sktest.ling.zero.crypto;
import org.junit.jupiter.api.Test;
import org.shaneking.ling.zero.crypto.SKC1;
import org.shaneking.ling.zero.lang.String0;
import org.shaneking.ling.zero.lang.ZeroException;
import java.io.UnsupportedEncodingException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import static org.junit.jupiter.api.Assertions.*;
class SKC1Test {
@Test
void decrypt() {
try {
System.out.println("有梦才有精彩!");//有梦才有精彩!
System.out.println(new String("有梦才有精彩!".getBytes(StandardCharsets.UTF_8), "GBK"));//鏈夋ⅵ鎵嶆湁绮惧僵锛�
System.out.println(new String(new String("有梦才有精彩!".getBytes(StandardCharsets.UTF_8), "GBK").getBytes("GBK"), StandardCharsets.UTF_8));//有梦才有精彩�?
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
assertAll(
() -> assertEquals(new String("有梦才有精彩!".getBytes(StandardCharsets.UTF_8), "GBK"), SKC1.decrypt("bzo0CkFz86geJhgbyunPGrzq2Bc07XbhBpjLd8QTPAePVWxLMAHYeU0q5KuD1SP/")),//D:U,S:G
() -> assertEquals("cC53mabDUr4WU5NjQZLTw==", SKC1.decrypt("cC53mabDUr4WU5NjQZLTw==")),
() -> assertEquals(String0.EMPTY, SKC1.decrypt("Sj7lBZn+sxDVTAps58IiqA==")),
() -> assertEquals("ILoveYou", SKC1.decrypt("TcC53mabDUr4WU5NjQZLTw==")),
() -> assertEquals("有梦才有精彩!", SKC1.decrypt("Ziaj84fYKpNiZ1wD1O7vPNrb1dYVC2H65UlR8wlIvCw=")),//D:U,S:U
() -> assertThrows(ZeroException.class, () -> SKC1.decrypt("cC53mabDUr4WU5NjQZLTw==", false)),
() -> assertEquals("ILoveYou", SKC1.decrypt("TcC53mabDUr4WU5NjQZLTw==", "494c6f7665596f75")),
() -> assertEquals("ILoveYou", SKC1.decrypt("TcC53mabDUr4WU5NjQZLTw==", "494c6f7665596f75", true)),
() -> assertEquals("有梦才有精彩!", SKC1.decrypt("fu3MmngB1XWzzxxph9AQtQ==", SKC1.DEFAULT_SALT, Charset.forName("GBK"), false)),//D:G,S:U
() -> assertNotEquals(new String("有梦才有精彩!".getBytes(StandardCharsets.UTF_8), "GBK"), SKC1.decrypt("Ziaj84fYKpNiZ1wD1O7vPI0qsFSFHwqOuDqcwzRPt34=", SKC1.DEFAULT_SALT, Charset.forName("GBK"), false))//D:G,S:G
);
}
@Test
void encrypt() {
assertAll(
() -> assertEquals("Ziaj84fYKpNiZ1wD1O7vPNrb1dYVC2H65UlR8wlIvCw=", SKC1.encrypt("有梦才有精彩!")),//S:U,E:U
() -> assertEquals("TcC53mabDUr4WU5NjQZLTw==", SKC1.encrypt("ILoveYou")),
() -> assertEquals("Sj7lBZn+sxDVTAps58IiqA==", SKC1.encrypt(String0.EMPTY)),
() -> assertEquals("bzo0CkFz86geJhgbyunPGrzq2Bc07XbhBpjLd8QTPAePVWxLMAHYeU0q5KuD1SP/", SKC1.encrypt(new String("有梦才有精彩!".getBytes(StandardCharsets.UTF_8), "GBK"))),//S:G,E:U
() -> assertThrows(NullPointerException.class, () -> SKC1.encrypt(null, false)),
() -> assertEquals("TcC53mabDUr4WU5NjQZLTw==", SKC1.encrypt("ILoveYou", "494c6f7665596f75")),
() -> assertEquals("ILoveYou", SKC1.encrypt("ILoveYou", "94c6f7665596f75")),
() -> assertThrows(ZeroException.class, () -> SKC1.encrypt("ILoveYou", "94c6f7665596f75", false)),
() -> assertEquals("ILoveYou", SKC1.encrypt("ILoveYou", "94c6f7665596f75", true)),
() -> assertEquals("fu3MmngB1XWzzxxph9AQtQ==", SKC1.encrypt("有梦才有精彩!", SKC1.DEFAULT_SALT, Charset.forName("GBK"), false)),//S:U,E:G
() -> assertEquals("Ziaj84fYKpNiZ1wD1O7vPI0qsFSFHwqOuDqcwzRPt34=", SKC1.encrypt(new String("有梦才有精彩!".getBytes(StandardCharsets.UTF_8), "GBK"), SKC1.DEFAULT_SALT, Charset.forName("GBK"), false))//S:G,E:G
);
}
@Test
void salt() {
assertAll(
() -> assertEquals(16, SKC1.salt().length()),
() -> assertEquals("494c6f7665596f75", SKC1.salt("ILoveYou")),
() -> assertThrows(IllegalArgumentException.class, () -> SKC1.salt("LengthNotEight"))
);
}
}
| 47.855263 | 211 | 0.692604 |
b2b427d896ce43cf83c44e8d0ea87a5a14759f2c | 258 | package at.ac.univie.a0908270.nncloud.db;
import org.springframework.data.mongodb.repository.MongoRepository;
public interface NeuronalNetworkRepository extends MongoRepository<Vinnsl, String> {
//VinnslDefinition findByDescription(String description);
}
| 32.25 | 84 | 0.844961 |
75e43224079211375c440f57f5d27dd3c3021bca | 756 | package com.ilearnrw.common.security.users.services;
/*
* Copyright (c) 2015, iLearnRW. Licensed under Modified BSD Licence. See licence.txt for details.
*/
import java.util.List;
import java.util.Map;
import com.ilearnrw.common.security.users.model.Role;
import com.ilearnrw.common.security.users.model.User;
public interface RoleService {
public List<Role> getRoleList();
public Role getRole(int id);
public int insertData(Role role);
public void updateData(Role role);
public void deleteData(int id);
public List<Role> getRoleList(User user);
public void setRoleList(User user, List<Role> roles);
public Role getRole(String roleName);
public Map<String, String> getUsersWithRole(String role);
}
| 23.625 | 99 | 0.730159 |
c800ddb41b9ffb07b9525e54b19a8ac9a5e2362a | 2,810 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.ant.freeform;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.netbeans.api.project.Project;
import org.netbeans.api.project.ProjectManager;
import org.netbeans.api.project.ProjectUtils;
import org.netbeans.api.project.SourceGroup;
import org.openide.filesystems.FileObject;
import org.openide.loaders.CreateFromTemplateAttributesProvider;
import org.openide.loaders.DataFolder;
/**
* Test for freeform template attributes provider, currently providing only
* project-license value from project.xml
*
* @author Milan Kubec
*/
public class FreeformTemplateAttributesProviderTest extends TestBase {
public FreeformTemplateAttributesProviderTest(String testName) {
super(testName);
}
public void testAttributesFor() throws Exception {
FileObject projdir = egdirFO.getFileObject("simplewithlicense");
Project simpleWithLicense = ProjectManager.getDefault().findProject(projdir);
CreateFromTemplateAttributesProvider provider = simpleWithLicense.getLookup().lookup(CreateFromTemplateAttributesProvider.class);
SourceGroup[] groups = ProjectUtils.getSources(simpleWithLicense).getSourceGroups("java"); // JavaProjectConstants.SOURCES_TYPE_JAVA
for (SourceGroup group : groups) {
FileObject root = group.getRootFolder();
Map result = provider.attributesFor(null, DataFolder.findFolder(root), null);
assertEquals(1, result.size());
Map values = (Map)result.get("project");
if (root.getName().equals("src")) {
Map<String, String> expected = new HashMap<String, String>();
expected.put("license", "cddl-netbeans-sun");
expected.put("encoding", "UTF-8");
assertEquals(expected, values);
} else {
assertEquals(Collections.singletonMap("license", "cddl-netbeans-sun"), values);
}
}
}
}
| 41.940299 | 140 | 0.715658 |
c020417a0bc1accbda4f36675a1cb1433e01dddd | 1,881 | package itx.dataserver;
import com.beust.jcommander.Parameter;
public class DsScanArguments {
@Parameter(names = {"-e", "--executor-size" }, description = "Number of threads in executor thread pool.")
private int executorSize = 1;
@Parameter(names = {"-p", "--root-path" }, description = "Root path of directory to scan.", required = true)
private String rootPath;
@Parameter(names = {"-eh", "--elastic-host" }, description = "Host name of ElasticSearch server.")
private String elasticHost = "127.0.0.1";
@Parameter(names = {"-ep", "--elastic-port" }, description = "Port number of ElasticSearch server.")
private int elasticPort = 9200;
@Parameter(names = {"-i", "--init-indices" }, description = "Initialize ElasticSearch indices, this will delete and create empty indices.")
private boolean initIndices = true;
@Parameter(names = {"-m", "--meta-data" }, description = "Meta Data file name used during data scanning.")
private String metaDataFileName = ".annotation-meta-data-bulk.json";
@Parameter(names = {"-mh", "--ml-host" }, description = "Host name of Machine-Learning server.")
private String mlHost = "127.0.0.1";
@Parameter(names = {"-mp", "--ml-port" }, description = "Port number of Machine-Learning server.")
private int mlPort = 5000;
public int getExecutorSize() {
return executorSize;
}
public String getRootPath() {
return rootPath;
}
public String getElasticHost() {
return elasticHost;
}
public int getElasticPort() {
return elasticPort;
}
public boolean isInitIndices() {
return initIndices;
}
public String getMetaDataFileName() {
return metaDataFileName;
}
public String getMlHost() {
return mlHost;
}
public int getMlPort() {
return mlPort;
}
}
| 29.390625 | 143 | 0.644338 |
e2ec6b285dee7ff040dd18b2db976c353de849ec | 496 | package top.woilanlan.mapper;
import java.util.List;
import top.woilanlan.bean.Department;
import top.woilanlan.bean.Employee;
public interface EmployeeMapper {
int deleteByPrimaryKey(Integer id);
int insert(Employee record);
Employee selectByPrimaryKey(Integer id);
List<Employee> selectAll();
int updateByPrimaryKey(Employee record);
Employee getEmployeeById(Integer eid);
Employee getEmployeeById2(Integer eid);
Department getDeptByEid(Integer did);
} | 20.666667 | 44 | 0.760081 |
fd7d024c588d55262ec759387a027601fa529e4a | 2,380 | package in.becandid.app.becandid.ui.group;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
/**
* Created by harishpc on 11/25/2017.
*/
public class CommunityGroupPojo {
@SerializedName("created_by_id_user_name") @Expose private String created_by_id_user_name;
@SerializedName("is_joined") @Expose private Boolean isJoined;
@SerializedName("created_at") @Expose private String createdAt;
@SerializedName("joined_at") @Expose private String joinedAt;
@SerializedName("group_id") @Expose private String groupId;
@SerializedName("group_name") @Expose private String groupName;
@SerializedName("group_notif") @Expose private boolean group_notif;
@SerializedName("group_image_url") @Expose private String groupImageUrl;
@SerializedName("group_description") @Expose private String groupDescription;
@SerializedName("users_in_group") @Expose private String usersInGroup;
@SerializedName("posts_inside_groups") @Expose private String postsInsideGroups;
@SerializedName("id_categories") @Expose private String idCategories;
@SerializedName("category_name") @Expose private String categoryName;
@SerializedName("created_by") @Expose private CreatedBy createdBy;
public boolean isGroup_notif() {
return group_notif;
}
public String getCreated_by_id_user_name() {
return created_by_id_user_name;
}
public Boolean getJoined() {
return isJoined;
}
public String getCreatedAt() {
return createdAt;
}
public String getJoinedAt() {
return joinedAt;
}
public String getGroupImageUrl() {
return groupImageUrl;
}
public String getGroupDescription() {
return groupDescription;
}
public String getUsersInGroup() {
return usersInGroup;
}
public String getPostsInsideGroups() {
return postsInsideGroups;
}
public String getGroupId() {
return groupId;
}
public String getGroupName() {
return groupName;
}
public String getIdCategories() {
return idCategories;
}
public String getCategoryName() {
return categoryName;
}
public CreatedBy getCreatedBy() {
return createdBy;
}
@Override
public String toString() {
return this.groupName;
}
}
| 26.153846 | 93 | 0.697479 |
f745d77b70402b3d3bd50f19ac7249948494f806 | 1,397 | package me.ionar.salhack.gui.hud.components;
import com.mojang.realmsclient.gui.ChatFormatting;
import me.ionar.salhack.gui.hud.HudComponentItem;
import me.ionar.salhack.managers.ModuleManager;
import me.ionar.salhack.managers.TickRateManager;
import me.ionar.salhack.module.ui.HudModule;
import me.ionar.salhack.util.colors.SalRainbowUtil;
import me.ionar.salhack.util.render.RenderUtil;
public class TPSComponent extends HudComponentItem
{
public TPSComponent()
{
super("TPS", 2, 125);
}
private HudModule l_Hud = (HudModule) ModuleManager.Get().GetMod(HudModule.class);
private SalRainbowUtil Rainbow = new SalRainbowUtil(9);
private int l_I = 0;
@Override
public void render(int p_MouseX, int p_MouseY, float p_PartialTicks)
{
super.render(p_MouseX, p_MouseY, p_PartialTicks);
final String tickrate = l_Hud.Rainbow.getValue() ? String.format("TPS %.2f", TickRateManager.Get().getTickRate()) : String.format(ChatFormatting.GRAY + "TPS%s %.2f", ChatFormatting.WHITE, TickRateManager.Get().getTickRate());
Rainbow.OnRender();
RenderUtil.drawStringWithShadow(tickrate, GetX(), GetY(), l_Hud.Rainbow.getValue() ? Rainbow.GetRainbowColorAt(Rainbow.getRainbowColorNumber(l_I)) : -1);
SetWidth(RenderUtil.getStringWidth(tickrate));
SetHeight(RenderUtil.getStringHeight(tickrate) + 1);
}
}
| 37.756757 | 233 | 0.733715 |
2c31e4c35bc0ad071a068dff0372ddb71df9ecec | 4,433 | package com.iceteaviet.englishnow.ui.matching.view;
import android.Manifest;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Color;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.design.widget.Snackbar;
import android.support.v4.content.ContextCompat;
import android.widget.Button;
import com.iceteaviet.englishnow.BR;
import com.iceteaviet.englishnow.R;
import com.iceteaviet.englishnow.databinding.ActivityConversationMatchingBinding;
import com.iceteaviet.englishnow.ui.base.BaseActivity;
import com.iceteaviet.englishnow.ui.matching.ConversationMatchingNavigator;
import com.iceteaviet.englishnow.ui.matching.viewmodel.ConversationMatchingViewModel;
import com.iceteaviet.englishnow.ui.videocall.view.VideoCallActivity;
import javax.inject.Inject;
public class ConversationMatchingActivity extends BaseActivity<ActivityConversationMatchingBinding, ConversationMatchingViewModel> implements ConversationMatchingNavigator {
public static final String EXTRA_SESSION_ID = "extra_session_id";
public static final String EXTRA_SESSION_TOKEN = "extra_session_token";
private static final String TAG = ConversationMatchingActivity.class.getSimpleName();
private static final int RC_SETTINGS_SCREEN_PERM = 123;
private static final int RC_VIDEO_APP_PERM = 124;
@Inject
protected ConversationMatchingViewModel conversationMatchingViewModel;
private ActivityConversationMatchingBinding conversationMatchingBinding;
private Button findButton;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
conversationMatchingViewModel.setNavigator(this);
conversationMatchingBinding = getViewDataBinding();
bindViews();
}
private void bindViews() {
findButton = conversationMatchingBinding.btnFind;
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
switch (requestCode) {
case RC_VIDEO_APP_PERM: {
// If request is cancelled, the result arrays are empty.
if (grantResults.length > 0
&& grantResults[0] == PackageManager.PERMISSION_GRANTED) {
// permission was granted, yay! Do the
// contacts-related task you need to do.
conversationMatchingViewModel.startFinding();
} else {
// permission denied, boo! Disable the
// functionality that depends on this permission.
}
return;
}
}
}
@Override
public void changeViewsToFindingMode() {
findButton.setTextColor(ContextCompat.getColor(this, R.color.colorGrey500));
findButton.setBackgroundResource(R.drawable.bg_rounded_button_grey);
}
@Override
public void changeViewsToNormalMode() {
findButton.setTextColor(Color.BLACK);
findButton.setBackgroundResource(R.drawable.bg_rounded_button);
}
@Override
public boolean selfCheckRequiredPermissions() {
return hasPermission(Manifest.permission.CAMERA) && hasPermission(Manifest.permission.RECORD_AUDIO);
}
@Override
public void requestPermissions() {
requestPermissionsSafely(new String[]{Manifest.permission.CAMERA, Manifest.permission.RECORD_AUDIO}, RC_VIDEO_APP_PERM);
}
@Override
public ConversationMatchingViewModel getViewModel() {
return conversationMatchingViewModel;
}
@Override
public int getBindingVariable() {
return BR.viewModel;
}
@Override
public int getLayoutId() {
return R.layout.activity_conversation_matching;
}
@Override
public void handleError(Throwable throwable) {
Snackbar mySnackbar = Snackbar.make(findViewById(R.id.cl_root_view),
throwable.getMessage(), Snackbar.LENGTH_SHORT);
mySnackbar.show();
throwable.printStackTrace();
}
@Override
public void navigateToVideoCallScreen(String sessionId, String token) {
Intent intent = new Intent(this, VideoCallActivity.class);
intent.putExtra(EXTRA_SESSION_ID, sessionId);
intent.putExtra(EXTRA_SESSION_TOKEN, token);
startActivity(intent);
}
}
| 36.04065 | 173 | 0.719377 |
d23a19f825acf05a2bcd5fcecf5baec997e2e743 | 3,945 | /*******************************************************************************
* Copyright (c) 2006, 2008 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.ltk.internal.ui.refactoring.actions;
import java.lang.reflect.InvocationTargetException;
import org.eclipse.team.core.diff.IThreeWayDiff;
import org.eclipse.team.core.mapping.IMergeContext;
import org.eclipse.team.core.mapping.ISynchronizationContext;
import org.eclipse.core.runtime.Assert;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.SubProgressMonitor;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.operation.IRunnableWithProgress;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ltk.core.refactoring.RefactoringDescriptorProxy;
import org.eclipse.ltk.internal.core.refactoring.history.RefactoringHistoryService;
import org.eclipse.ltk.internal.ui.refactoring.RefactoringUIPlugin;
import org.eclipse.ltk.internal.ui.refactoring.model.ModelMessages;
import org.eclipse.ltk.internal.ui.refactoring.model.RefactoringDescriptorSynchronizationProxy;
/**
* Action to reject a pending refactoring and to just store it in the history.
*
* @since 3.2
*/
public final class RejectRefactoringsAction extends Action {
/** The synchronization context to use */
private final ISynchronizationContext fContext;
/** The refactoring descriptor proxies, or <code>null</code> */
private RefactoringDescriptorProxy[] fProxies= null;
/**
* Creates a new reject refactorings action.
*
* @param context
* the synchronization context
*/
public RejectRefactoringsAction(final ISynchronizationContext context) {
Assert.isNotNull(context);
fContext= context;
setText(ModelMessages.RejectRefactoringsAction_title);
setToolTipText(ModelMessages.RejectRefactoringsAction_tool_tip);
setDescription(ModelMessages.RejectRefactoringsAction_description);
}
/**
* {@inheritDoc}
*/
public boolean isEnabled() {
if (fProxies != null && fProxies.length > 0 && fContext instanceof IMergeContext) {
for (int index= 0; index < fProxies.length; index++) {
if (fProxies[index] instanceof RefactoringDescriptorSynchronizationProxy) {
final RefactoringDescriptorSynchronizationProxy proxy= (RefactoringDescriptorSynchronizationProxy) fProxies[index];
if (proxy.getDirection() == IThreeWayDiff.INCOMING)
return true;
}
}
}
return false;
}
/**
* {@inheritDoc}
*/
public void run() {
if (fProxies != null) {
try {
PlatformUI.getWorkbench().getProgressService().run(true, true, new IRunnableWithProgress() {
public final void run(final IProgressMonitor monitor) throws InvocationTargetException, InterruptedException {
try {
monitor.beginTask("", fProxies.length + 100); //$NON-NLS-1$
final RefactoringHistoryService service= RefactoringHistoryService.getInstance();
for (int index= 0; index < fProxies.length; index++)
service.addRefactoringDescriptor(fProxies[index], new SubProgressMonitor(monitor, 1));
} finally {
monitor.done();
}
}
});
} catch (InvocationTargetException exception) {
RefactoringUIPlugin.log(exception);
} catch (InterruptedException exception) {
// Do nothing
}
}
}
/**
* Sets the refactoring descriptor proxies to accept.
*
* @param proxies
* the refactoring descriptor proxies
*/
public void setRefactoringDescriptors(final RefactoringDescriptorProxy[] proxies) {
Assert.isNotNull(proxies);
fProxies= proxies;
}
} | 34.605263 | 120 | 0.720913 |
d5ee1a8bbdf56e5e82313710151ef4eb192919f8 | 1,185 | package com.alan.leetcode.leetcode.easy.lc0069;
/**
*
* 题意:x 的平方根
* 难度:Easy
* 分类:Math
* 思路:
* @author alan
*/
public class Solution {
public static void main(String[] args) {
System.out.println(mySqrtV2(2147395599));
}
/**
* 思路:二分查找
* @param x
* @return
*/
public static int mySqrtV2(int x) {
if (x == 0) {
return 0;
}
int count = 0;
int left = 1, right = x, res = 0;
while (left <= right) {
count ++;
int mid = left + (right - left) / 2;
if (mid <= x /mid) {
left = mid + 1;
res = mid;
} else {
right = mid - 1;
}
}
System.out.println("count === " + count);
return res;
}
/**
* 思路:暴力法,通过遍历查询
* @param x
* @return
*/
public static int mySqrt(int x) {
int res = 1;
while (true) {
if (res * res > x) {
res -= 1;
break;
} else if (res * res == x) {
break;
}
res ++;
}
return res;
}
}
| 19.112903 | 49 | 0.387342 |
23578a507907664e265de469d0a2d3758185370e | 4,140 | package ps2vfs.vfs;
import ps2vfs.plugin.*;
public class MountPoint
{
private String openPath;
private boolean recursive;
private boolean virtual;
private boolean hidden;
private static boolean debug = false;
private java.util.Vector children = null;
private MountPoint parent = null;
public static class DirComp
implements java.util.Comparator
{
public int compare(Object e1, Object e2) {
MountPoint de1 = (MountPoint) e1;
MountPoint de2 = (MountPoint) e2;
if(de1.isVirtual() == de2.isVirtual()) {
return de1.getOpenPath().compareTo(de2.getOpenPath());
} else if(de1.isVirtual()) {
return -1;
} else
return 1;
}
}
MountPoint(String iname) {
openPath = iname;
virtual = true;
recursive = true;
children = new java.util.Vector(0);
}
MountPoint(String iPath,
boolean iRecursive,
boolean iHidden) {
openPath = iPath;
recursive = iRecursive;
hidden = iHidden;
virtual = false;
}
public MountPoint addChild(MountPoint child) {
int idx = children.indexOf(child);
if(idx < 0) {
children.add(child);
child.setParent(this);
} else {
child = (MountPoint) children.get(idx);
}
if(debug)
System.out.println("MP addChild: " + child);
return child;
}
public void removeChild(MountPoint child) {
children.remove(child);
prune();
}
public java.util.List getChildren() {
return children;
}
public MountPoint getChild(MountPoint child) {
int idx = children.indexOf(child);
if(idx >= 0) {
return (MountPoint) children.get(idx);
}
return null;
}
public MountPoint getParent() {
return parent;
}
public MountPoint setParent(MountPoint iparent) {
parent = iparent;
return parent;
}
void prune() {
if(children.size() == 0 && parent != null) {
parent.removeChild(this);
}
}
public String toString() {
if(virtual) {
return "MPv: " + openPath;
} else {
return "MP : " + (recursive ? "r" : "-") + (hidden?"h":"-") + " : " + openPath;
}
}
public boolean equals(Object o) throws ClassCastException {
boolean eq = false;
MountPoint mp = (MountPoint) o;
eq = mp.openPath.equals(openPath);
if(eq) {
if(virtual) {
eq = mp.virtual;
} else {
eq = (mp.recursive == recursive);
}
}
return eq;
}
public java.util.List /*<java.plugin.VfsDirEntry>*/ resolveDir() {
java.util.List fileVec = null;
if(debug) {
System.out.println("Resolving mp " + this);
}
if(children.size() != 0) {
fileVec = new java.util.Vector(1);
java.util.Iterator it = children.iterator();
while(it.hasNext()) {
MountPoint mp = (MountPoint) it.next();
if(mp.isVirtual()) {
if(debug)
System.out.println("Adding virtual dir: '" + mp + "'");
VfsDirEntry vDirEnt = new VfsDirEntry();
vDirEnt.setVirtualName(mp.getOpenPath());
vDirEnt.setDirectory(true);
vDirEnt.setHandler(new ps2vfs.plugin.VfsHandler(mp.getOpenPath(),
new MountPointHandler(mp)));
fileVec.add(vDirEnt);
} else {
java.io.File dir = new java.io.File(mp.getOpenPath());
if(dir.isDirectory()) {
if(debug) {
System.out.println("Reading content of dir: " +
dir.getAbsolutePath() + " mp: " + mp);
}
java.io.File[] files = dir.listFiles();
java.util.List content = VfsDirEntry.toList(files, mp.isRecursive());
if(content != null)
fileVec.addAll(content);
} else {
Ps2Vfs vfs = Ps2Vfs.getVfs();
java.util.List content = vfs.resolveURI(mp.getOpenPath());
if(content != null)
fileVec.addAll(content);
/*
java.util.logging.Logger.getLogger("ps2vfs").warning("Not a supported path: " +
dir.getAbsolutePath() + " mp: " + mp);
*/
}
}
}
}
return fileVec;
}
public String getOpenPath() { return openPath; }
public boolean isRecursive() { return recursive; }
public boolean isVirtual() { return virtual; }
public boolean isHidden() { return hidden; }
};
| 24.210526 | 85 | 0.608213 |
fdeb9bf978217b1bcf95637c9792949b18ca8115 | 8,673 | /*
* Copyright 2017-2021 original authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.micronaut.inject.provider;
import io.micronaut.context.BeanContext;
import io.micronaut.context.BeanResolutionContext;
import io.micronaut.context.Qualifier;
import io.micronaut.context.annotation.Any;
import io.micronaut.context.annotation.BootstrapContextCompatible;
import io.micronaut.context.exceptions.BeanInstantiationException;
import io.micronaut.context.exceptions.DisabledBeanException;
import io.micronaut.context.exceptions.NoSuchBeanException;
import io.micronaut.core.annotation.*;
import io.micronaut.core.naming.Named;
import io.micronaut.core.type.Argument;
import io.micronaut.core.type.ArgumentCoercible;
import io.micronaut.inject.BeanDefinition;
import io.micronaut.inject.BeanDefinitionReference;
import io.micronaut.inject.BeanFactory;
import io.micronaut.inject.InjectionPoint;
import io.micronaut.inject.annotation.MutableAnnotationMetadata;
import io.micronaut.inject.qualifiers.AnyQualifier;
import io.micronaut.inject.qualifiers.Qualifiers;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
/**
* Abstract bean definition for other providers to extend from.
*
* @param <T> The generic type
* @since 3.0.0
* @author graemerocher
*/
public abstract class AbstractProviderDefinition<T> implements BeanDefinition<T>, BeanFactory<T>, BeanDefinitionReference<T> {
private static final Argument<Object> TYPE_VARIABLE = Argument.ofTypeVariable(Object.class, "T");
private final AnnotationMetadata annotationMetadata;
public AbstractProviderDefinition() {
MutableAnnotationMetadata metadata = new MutableAnnotationMetadata();
metadata.addDeclaredAnnotation(Any.class.getName(), Collections.emptyMap());
metadata.addDeclaredStereotype(
Collections.singletonList(Any.class.getName()),
AnnotationUtil.QUALIFIER,
Collections.emptyMap()
);
metadata.addDeclaredAnnotation(BootstrapContextCompatible.class.getName(), Collections.emptyMap());
try {
metadata.addDeclaredAnnotation(Indexes.class.getName(), Collections.singletonMap(AnnotationMetadata.VALUE_MEMBER, getBeanType()));
} catch (NoClassDefFoundError e) {
// ignore, might happen if javax.inject is not the classpath
}
annotationMetadata = metadata;
}
@Override
public boolean isContainerType() {
return false;
}
@Override
public boolean isEnabled(@NonNull BeanContext context, @Nullable BeanResolutionContext resolutionContext) {
return isPresent();
}
@Override
public String getBeanDefinitionName() {
return getClass().getName();
}
@Override
public BeanDefinition<T> load() {
return this;
}
@Override
public boolean isPresent() {
return false;
}
/**
* Builds a provider implementation.
*
* @param resolutionContext The resolution context
* @param context The context
* @param argument The argument
* @param qualifier The qualifier
* @param singleton Whether the bean is a singleton
* @return The provider
*/
protected abstract @NonNull T buildProvider(
@NonNull BeanResolutionContext resolutionContext,
@NonNull BeanContext context,
@NonNull Argument<Object> argument,
@Nullable Qualifier<Object> qualifier,
boolean singleton);
@Override
public T build(
BeanResolutionContext resolutionContext,
BeanContext context,
BeanDefinition<T> definition) throws BeanInstantiationException {
final BeanResolutionContext.Segment<?> segment = resolutionContext.getPath().currentSegment().orElse(null);
if (segment != null) {
final InjectionPoint<?> injectionPoint = segment.getInjectionPoint();
if (injectionPoint instanceof ArgumentCoercible) {
Argument<?> injectionPointArgument = ((ArgumentCoercible<?>) injectionPoint)
.asArgument();
Argument<?> resolveArgument = injectionPointArgument;
if (resolveArgument.isOptional()) {
resolveArgument = resolveArgument.getFirstTypeVariable().orElse(Argument.OBJECT_ARGUMENT);
}
@SuppressWarnings("unchecked") Argument<Object> argument =
(Argument<Object>) resolveArgument
.getFirstTypeVariable()
.orElse(null);
if (argument != null) {
Qualifier<Object> qualifier = (Qualifier<Object>) resolutionContext.getCurrentQualifier();
if (qualifier == null && segment.getDeclaringType().isIterable()) {
final Object n = resolutionContext.getAttribute(Named.class.getName());
if (n != null) {
qualifier = Qualifiers.byName(n.toString());
}
}
boolean hasBean = context.containsBean(argument, qualifier);
if (hasBean) {
return buildProvider(
resolutionContext,
context,
argument,
qualifier,
definition.isSingleton()
);
} else {
if (injectionPointArgument.isOptional()) {
return (T) Optional.empty();
} else if (injectionPointArgument.isNullable()) {
throw new DisabledBeanException("Nullable bean doesn't exist");
} else {
if (qualifier instanceof AnyQualifier || isAllowEmptyProviders(context)) {
return buildProvider(
resolutionContext,
context,
argument,
qualifier,
definition.isSingleton()
);
} else {
throw new NoSuchBeanException(argument, qualifier);
}
}
}
}
}
}
throw new UnsupportedOperationException("Cannot inject provider for Object type");
}
/**
* Return whether missing providers are allowed for this implementation. If {@code false} a {@link io.micronaut.context.exceptions.NoSuchBeanException} is thrown.
* @param context The context
* @return Returns {@code true} if missing providers are allowed
*/
protected boolean isAllowEmptyProviders(BeanContext context) {
return context.getContextConfiguration().isAllowEmptyProviders();
}
@Override
public final boolean isAbstract() {
return false;
}
@Override
public final boolean isSingleton() {
return false;
}
@Override
@NonNull
public final List<Argument<?>> getTypeArguments(Class<?> type) {
if (type == getBeanType()) {
return getTypeArguments();
}
return Collections.emptyList();
}
@Override
@NonNull
public final List<Argument<?>> getTypeArguments() {
return Collections.singletonList(TYPE_VARIABLE);
}
@Override
public AnnotationMetadata getAnnotationMetadata() {
return annotationMetadata;
}
@Override
public Qualifier<T> getDeclaredQualifier() {
return null;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
return o != null && getClass() == o.getClass();
}
@Override
public int hashCode() {
return getClass().hashCode();
}
}
| 36.906383 | 166 | 0.6104 |
665815388270acc3b03b7801dd2543909ac0f97d | 681 | package me.konskon.explodingbed.events;
import org.bukkit.ChatColor;
import org.bukkit.Material;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.entity.PlayerDeathEvent;
import org.bukkit.event.player.PlayerItemConsumeEvent;
import org.bukkit.event.server.BroadcastMessageEvent;
public class TheConsoleResponds implements Listener {
@EventHandler
public void theConsoleResponds(PlayerItemConsumeEvent e) {
Player p = e.getPlayer();
if(e.getItem().getType()== Material.MUSHROOM_STEW) {
p.setHealth(20.0);
}
}
}
| 21.967742 | 63 | 0.709251 |
1b218443f709413b3eb27ac2a472f61ac6ba4b13 | 1,785 | package dzaima.ui.node.types;
import dzaima.ui.gui.*;
import dzaima.ui.node.*;
import dzaima.ui.node.ctx.Ctx;
import dzaima.ui.node.prop.Prop;
import dzaima.utils.*;
public class VNode extends FrameNode {
private int pad;
public VNode(Ctx ctx, String[] ks, Prop[] vs) {
super(ctx, ks, vs);
}
public void propsUpd() { super.propsUpd();
pad = gc.pxD(this, "pad", 0);
}
public int fillW() {
if (ch.sz==0) return 0;
return Solve.vMinW(ch);
}
public int fillH(int w) {
if (ch.sz==0) return 0;
return Solve.vMinH(ch, w) + pad*(ch.sz-1);
}
public void drawCh(Graphics g, boolean full) {
if (g.clip==null || ch.sz<10) { super.drawCh(g, full); return; }
for (int i = Solve.vBinSearch(ch, g.clip.sy); i<ch.sz; i++) {
Node c = ch.get(i);
if (c.dy+c.h < g.clip.sy) continue;
if (c.dy > g.clip.ey) break;
c.draw(g, full);
}
}
public Node findCh(int x, int y) {
if (ch.sz<20) return super.findCh(x, y);
Node c = ch.get(Solve.vBinSearch(ch, y));
if (XY.inWH(x, y, c.dx, c.dy, c.w, c.h)) return c;
return null;
}
public Node nearestCh(int x, int y) {
return Solve.vFindNearest(ch, x, y);
}
public void resized() {
if (ch.sz==0) return;
int xal = xalign();
int yal = yalign();
int padTotal = pad*(ch.sz - 1);
int[] div = Solve.solve(ch, h-padTotal, w, true);
int th = padTotal; for (int i = 0; i<ch.sz; i++) th+= div[i];
int y = align(yal, h, th);
boolean r = th!=h;
for (int i = 0; i < ch.sz; i++) {
Node c = ch.get(i);
int cminH = div[i];
int nw = Math.min(c.maxW(), w);
int x = align(xal, w, nw);
c.resize(nw, cminH, x, y);
r|= nw!=w;
y+= cminH+pad;
}
if (r) mRedraw();
}
}
| 25.140845 | 68 | 0.54958 |
49d51268b0ccb259b180c025175d16079b0ab500 | 3,459 | package com.easysoft.sma;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.PropertySource;
import org.springframework.stereotype.Component;
@Component
@PropertySource(value = "classpath:custome.properties", ignoreResourceNotFound = true, encoding = "UTF-8")
@ConfigurationProperties(prefix = "com.easysoft.sma")
public class CustomProperties {
private int jpaBatchPageSize;
private int excelRowAccessWindowSize;
private int importPageSize;
private String importPostIdColumn;
private String importConsigneeTelephoneColumn;
private String importSpecColumn;
private int exportPageSize;
private String defaultSender;
private String defaultSenderTelephone;
private String defaultSenderAddress;
private String defaultConsignee;
private String defaultConsigneeTelephone;
private String defaultConsigneeAddress;
public int getJpaBatchPageSize() {
return jpaBatchPageSize;
}
public void setJpaBatchPageSize(int jpaBatchPageSize) {
this.jpaBatchPageSize = jpaBatchPageSize;
}
public int getImportPageSize() {
return importPageSize;
}
public void setImportPageSize(int importPageSize) {
this.importPageSize = importPageSize;
}
public int getExportPageSize() {
return exportPageSize;
}
public void setExportPageSize(int exportPageSize) {
this.exportPageSize = exportPageSize;
}
public int getExcelRowAccessWindowSize() {
return excelRowAccessWindowSize;
}
public void setExcelRowAccessWindowSize(int exportRowAccessWindowSize) {
this.excelRowAccessWindowSize = exportRowAccessWindowSize;
}
public String getDefaultSender() {
return defaultSender;
}
public void setDefaultSender(String defaultSender) {
this.defaultSender = defaultSender;
}
public String getDefaultSenderTelephone() {
return defaultSenderTelephone;
}
public void setDefaultSenderTelephone(String defaultSenderTelephone) {
this.defaultSenderTelephone = defaultSenderTelephone;
}
public String getDefaultConsignee() {
return defaultConsignee;
}
public void setDefaultConsignee(String defaultConsignee) {
this.defaultConsignee = defaultConsignee;
}
public String getDefaultConsigneeTelephone() {
return defaultConsigneeTelephone;
}
public void setDefaultConsigneeTelephone(String defaultConsigneeTelephone) {
this.defaultConsigneeTelephone = defaultConsigneeTelephone;
}
public String getDefaultConsigneeAddress() {
return defaultConsigneeAddress;
}
public void setDefaultConsigneeAddress(String defaultConsigneeAddress) {
this.defaultConsigneeAddress = defaultConsigneeAddress;
}
public String getDefaultSenderAddress() {
return defaultSenderAddress;
}
public void setDefaultSenderAddress(String defaultSenderAddress) {
this.defaultSenderAddress = defaultSenderAddress;
}
public String getImportPostIdColumn() {
return importPostIdColumn;
}
public void setImportPostIdColumn(String importPostIdColumn) {
this.importPostIdColumn = importPostIdColumn;
}
public String getImportConsigneeTelephoneColumn() {
return importConsigneeTelephoneColumn;
}
public void setImportConsigneeTelephoneColumn(String importConsigneeTelephoneColumn) {
this.importConsigneeTelephoneColumn = importConsigneeTelephoneColumn;
}
public String getImportSpecColumn() {
return importSpecColumn;
}
public void setImportSpecColumn(String importSpecColumn) {
this.importSpecColumn = importSpecColumn;
}
}
| 24.188811 | 106 | 0.813241 |
157e6694c4f028dd8255cce510158a83ba02f351 | 4,365 | /**
* Copyright 2019 ForgeRock AS.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.forgerock.openbanking.aspsp.as.migration.tppschema;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.forgerock.openbanking.aspsp.as.configuration.OpenBankingDirectoryConfiguration;
import com.forgerock.openbanking.aspsp.as.service.registrationrequest.DirectorySoftwareStatementFactory;
import com.forgerock.openbanking.model.DirectorySoftwareStatement;
import com.forgerock.openbanking.model.Tpp;
import com.github.mongobee.changeset.ChangeLog;
import com.github.mongobee.changeset.ChangeSet;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.util.StopWatch;
import java.io.IOException;
import java.util.List;
@ChangeLog
@Slf4j
public class MongoTppSchemaChangeLog {
private ObjectMapper objectMapper;
@Autowired
public MongoTppSchemaChangeLog(){
this.objectMapper = new ObjectMapper();
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
}
@ChangeSet(order = "001", id = "tpp-to-multi-software-statement-tpp", author = "Jamie Bowen")
public void migrateTpps(MongoTemplate mongoTemplate) throws IOException {
StopWatch elapsedTime = new StopWatch();
elapsedTime.start();
long docsUpdated = 0;
long docsWithNoAuthorisationNumber = 0;
log.info("-----------------------------------------------------------------------");
log.info("Migrating Tpp data to have full softwareStatement info");
OpenBankingDirectoryConfiguration openBankingDirectoryConfiguration =
new OpenBankingDirectoryConfiguration();
openBankingDirectoryConfiguration.issuerId = "OpenBanking Ltd";
DirectorySoftwareStatementFactory directorySoftwareStatementFactory =
new DirectorySoftwareStatementFactory(openBankingDirectoryConfiguration);
Query query = new Query();
List<Tpp> tpps = mongoTemplate.find(query, Tpp.class);
log.info("Found {} tpps", tpps.size());
for(Tpp tpp: tpps){
String ssa = tpp.getSsa();
DirectorySoftwareStatement directorySoftwareStatement =
directorySoftwareStatementFactory.getSoftwareStatementFromJsonString(ssa, objectMapper);
String authorisationNumber = directorySoftwareStatement.getAuthorisationNumber();
if(authorisationNumber == null || authorisationNumber.isBlank()){
log.error("Failed to set authorisation number of document id '{}'", tpp.getId());
docsWithNoAuthorisationNumber++;
} else {
tpp.setAuthorisationNumber(authorisationNumber);
}
tpp.setSoftwareId(directorySoftwareStatement.getSoftware_client_id());
tpp.setDirectorySoftwareStatement(directorySoftwareStatement);
mongoTemplate.save(tpp);
docsUpdated++;
}
elapsedTime.stop();
log.info("Upgraded {} documents in {} seconds.", docsUpdated, elapsedTime.getTotalTimeSeconds());
log.info("Failed to create authorisationNumbers for {} documents", docsWithNoAuthorisationNumber);
log.info("-----------------------------------------------------------------------");
log.info("Finished updating Tpps to have full software statement information");
}
}
| 45 | 108 | 0.707446 |
b182b40c60a247c42a780a462d99907121eaa6d5 | 262 | package org.bricolages.streaming.event;
import lombok.*;
import lombok.extern.slf4j.Slf4j;
@AllArgsConstructor
@Slf4j
public class LogQueue {
final SQSQueue queue;
public void send(LogQueueEvent e) {
queue.sendMessage(e.messageBody());
}
}
| 18.714286 | 43 | 0.725191 |
dd1bc24eb8ba7fde824a6b4876e62706cfe0435e | 2,330 | /*
* Copyright 2012 Benjamin Glatzel <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.miniion.pathfinder;
/**
* Created with IntelliJ IDEA.
* User: Overdhose
* Date: 10/05/12
* Time: 23:00
* To change this template use File | Settings | File Templates.
*/
import java.util.List;
import javax.vecmath.Vector3f;
import org.terasology.world.WorldProvider;
import org.terasology.world.block.Block;
public abstract class Pathfinder {
protected WorldProvider world;
protected boolean pathSmoothing = false;
public Pathfinder(WorldProvider provider) {
this.world = provider;
}
public boolean isPathSmoothing() {
return pathSmoothing;
}
public void setPathSmoothing(boolean pathSmoothing) {
this.pathSmoothing = pathSmoothing;
}
protected List<Vector3f> smoothPath(List<Vector3f> path) {
// @TODO Path smoothing algorithm
return path;
}
protected boolean isPassable(Block block) {
// @todo add more complicated check
return block.isPenetrable();
}
protected boolean isPassable(int x, int y, int z) {
// @todo add more complicated check
return isPassable(world.getBlock(x, y, z));
}
protected boolean isPassable(Vector3f vec) {
// @todo add more complicated check
return isPassable(world.getBlock(vec));
}
protected boolean isWalkable(int x, int y, int z) {
// @todo consider creature height
return (isPassable(x, y + 1, z)) && (!isPassable(x, y, z));
}
protected boolean isWalkable(Vector3f vec) {
return (isPassable(new Vector3f(vec.x, vec.y + 1, vec.z))) && (!isPassable(vec));
}
public abstract List<Vector3f> findPath(Vector3f from, Vector3f to);
}
| 27.738095 | 89 | 0.682403 |
7e80b495ccc6309ff2926e9de98cd08274c3cb0b | 1,249 | package com.wjiec.springaio.rpc;
import com.wjiec.springaio.rpc.service.EchoService;
import com.wjiec.springaio.rpc.service.EchoServiceImpl;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.remoting.rmi.RmiServiceExporter;
@Configuration
@SuppressWarnings("deprecation")
public class RmiServerApplication {
public static void main(String[] args) {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.register(RmiServerApplication.class);
context.register(EchoServiceImpl.class);
context.refresh();
// do nothing
}
@Bean
public RmiServiceExporter rmiServiceExporter(EchoService echoService) {
RmiServiceExporter rmiServiceExporter = new RmiServiceExporter();
rmiServiceExporter.setService(echoService);
rmiServiceExporter.setServiceName("EchoService");
rmiServiceExporter.setServiceInterface(EchoService.class);
rmiServiceExporter.setRegistryPort(1088);
return rmiServiceExporter;
}
}
| 35.685714 | 95 | 0.755805 |
725edaf5632dc40e1409a0c461edc905e8b3d542 | 1,093 | package com.tasly.score.core;
import com.tasly.score.core.api.TCCScoreService;
import org.bytesoft.compensable.Compensable;
import org.bytesoft.compensable.CompensableCancel;
import org.bytesoft.compensable.CompensableConfirm;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.bind.annotation.RestController;
import lombok.extern.slf4j.Slf4j;
/**
* Created by dulei on 18/1/9.
*/
@Slf4j
@Compensable(interfaceClass = TCCScoreService.class,simplified = true)
@RestController
public class TCCScoreServiceImpl implements TCCScoreService {
@Override
@Transactional
public String reduceScore(Integer score) {
log.info("try reduce Score ");
return "3";
}
@CompensableConfirm
@Transactional
public String reduceScoreConfirm(Integer score) {
log.info("reduceScoreConfirm : [{}]",score);
return "4";
}
@CompensableCancel
@Transactional
public String reduceScoreCancle(Integer score) {
log.info("reduceScoreCancle : [{}]",score);
return "5";
}
}
| 22.770833 | 70 | 0.722781 |
8806dc904f07ea5e4c533baa2c0bb8fe951246a2 | 6,126 | package com.folioreader.ui.adapter;
import android.content.Context;
import android.graphics.Color;
import android.graphics.Typeface;
import android.text.SpannableString;
import android.text.style.StyleSpan;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import androidx.core.content.ContextCompat;
import androidx.recyclerview.widget.RecyclerView;
import com.folioreader.Config;
import com.folioreader.R;
import com.folioreader.model.dictionary.*;
import com.folioreader.ui.base.DictionaryCallBack;
import com.folioreader.util.AppUtil;
import java.util.ArrayList;
import java.util.List;
/**
* @author gautam chibde on 4/7/17.
*/
public class DictionaryAdapter extends RecyclerView.Adapter<DictionaryAdapter.DictionaryHolder> {
private List<DictionaryResults> results;
private Context context;
private DictionaryCallBack callBack;
private static Config config;
public DictionaryAdapter(Context context, DictionaryCallBack callBack) {
this.results = new ArrayList<>();
this.context = context;
this.callBack = callBack;
config = AppUtil.getSavedConfig(context);
}
@Override
public DictionaryHolder onCreateViewHolder(ViewGroup parent, int viewType) {
return new DictionaryHolder(LayoutInflater.from(parent.getContext())
.inflate(R.layout.item_dictionary, parent, false));
}
@Override
@SuppressWarnings("PMD.InefficientEmptyStringCheck")
public void onBindViewHolder(DictionaryHolder holder, int position) {
final DictionaryResults res = results.get(position);
if (res.getPartOfSpeech() != null) {
int wordLength = res.getHeadword().length();
SpannableString spannableString = new SpannableString(res.getHeadword() + " - " + res.getPartOfSpeech());
spannableString.setSpan(new StyleSpan(Typeface.BOLD), 0, wordLength, 0);
spannableString.setSpan(new StyleSpan(Typeface.ITALIC), wordLength + 2, spannableString.length(), 0);
holder.name.setText(spannableString);
} else {
holder.name.setTypeface(Typeface.DEFAULT_BOLD);
holder.name.setText(res.getHeadword());
}
StringBuilder def = new StringBuilder();
StringBuilder exp = new StringBuilder();
if (res.getSenses() != null) {
for (Senses senses : res.getSenses()) {
if (senses.getDefinition() != null) {
for (String s : senses.getDefinition()) {
def.append("\u2022 ").append(s).append('\n');
}
}
}
for (Senses senses : res.getSenses()) {
if (senses.getExamples() != null) {
for (Example s : senses.getExamples()) {
exp.append("\u2022 ").append(s.getText()).append('\n');
}
}
}
}
if (!def.toString().trim().isEmpty()) {
def.insert(0, "Definition\n");
holder.definition.setText(def.toString());
} else {
holder.definition.setVisibility(View.GONE);
}
if (!exp.toString().trim().isEmpty()) {
exp.insert(0, "Example\n");
holder.example.setText(exp.toString());
} else {
holder.example.setVisibility(View.GONE);
}
// if (res.getPronunciations() != null) {
// final String url = getAudioUrl(res.getPronunciations());
// if (url == null) {
// holder.sound.setVisibility(View.GONE);
// }
// }
// holder.sound.setOnClickListener(new View.OnClickListener() {
// @Override
// public void onClick(View v) {
// Log.i("DictionaryAdapter", "clicked");
// if (res.getPronunciations() != null) {
// final String url = getAudioUrl(res.getPronunciations());
// callBack.playMedia(url);
// }
// }
// });
}
private String getAudioUrl(List<Pronunciations> pronunciations) {
if (!pronunciations.isEmpty()
&& pronunciations.get(0).getAudio() != null
&& !pronunciations.get(0).getAudio().isEmpty()) {
Audio audio = pronunciations.get(0).getAudio().get(0);
if (audio.getUrl() != null) {
return audio.getUrl();
}
}
return null;
}
public void setResults(List<DictionaryResults> resultsList) {
if (resultsList != null && !resultsList.isEmpty()) {
results.clear();
results.addAll(resultsList);
notifyDataSetChanged();
}
}
public void clear() {
results.clear();
notifyItemRangeRemoved(0, results.size());
}
@Override
public int getItemCount() {
return results.size();
}
public static class DictionaryHolder extends RecyclerView.ViewHolder {
private TextView name, definition, example;
//TODO private ImageButton sound;
public DictionaryHolder(View itemView) {
super(itemView);
/* name = (TextView) itemView.findViewById(R.id.tv_word);
//sound = (ImageButton) itemView.findViewById(R.id.ib_speak);
definition = (TextView) itemView.findViewById(R.id.tv_definition);
example = (TextView) itemView.findViewById(R.id.tv_examples);
View rootView = itemView.findViewById(R.id.rootView);
if (config.isNightMode()) {
rootView.setBackgroundColor(Color.BLACK);
int nightTextColor = ContextCompat.getColor(itemView.getContext(),
R.color.night_text_color);
name.setTextColor(nightTextColor);
definition.setTextColor(nightTextColor);
example.setTextColor(nightTextColor);
} else {
rootView.setBackgroundColor(Color.WHITE);
}*/
}
}
}
| 36.248521 | 117 | 0.597127 |
d51bba04a0e751047020d541c7788602a0758f48 | 2,681 | //
// ========================================================================
// Copyright (c) 1995-2020 Mort Bay Consulting Pty Ltd and others.
//
// This program and the accompanying materials are made available under
// the terms of the Eclipse Public License 2.0 which is available at
// https://www.eclipse.org/legal/epl-2.0
//
// This Source Code may also be made available under the following
// Secondary Licenses when the conditions for such availability set
// forth in the Eclipse Public License, v. 2.0 are satisfied:
// the Apache License v2.0 which is available at
// https://www.apache.org/licenses/LICENSE-2.0
//
// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0
// ========================================================================
//
package org.eclipse.jetty.util.log;
import java.util.HashSet;
import java.util.Set;
/**
* A try-with-resources compatible layer for {@link StdErrLog#setHideStacks(boolean) hiding stacktraces} within the scope of the <code>try</code> block when
* logging with {@link StdErrLog} implementation.
* <p>
* Use of other logging implementation cause no effect when using this class
* <p>
* Example:
*
* <pre>
* try (StacklessLogging scope = new StacklessLogging(EventDriver.class,Noisy.class))
* {
* doActionThatCausesStackTraces();
* }
* </pre>
*/
public class StacklessLogging implements AutoCloseable
{
private final Set<StdErrLog> squelched = new HashSet<>();
public StacklessLogging(Class<?>... classesToSquelch)
{
for (Class<?> clazz : classesToSquelch)
{
Logger log = Log.getLogger(clazz);
// only operate on loggers that are of type StdErrLog
if (log instanceof StdErrLog && !log.isDebugEnabled())
{
StdErrLog stdErrLog = ((StdErrLog)log);
if (!stdErrLog.isHideStacks())
{
stdErrLog.setHideStacks(true);
squelched.add(stdErrLog);
}
}
}
}
public StacklessLogging(Logger... logs)
{
for (Logger log : logs)
{
// only operate on loggers that are of type StdErrLog
if (log instanceof StdErrLog && !log.isDebugEnabled())
{
StdErrLog stdErrLog = ((StdErrLog)log);
if (!stdErrLog.isHideStacks())
{
stdErrLog.setHideStacks(true);
squelched.add(stdErrLog);
}
}
}
}
@Override
public void close()
{
for (StdErrLog log : squelched)
{
log.setHideStacks(false);
}
}
}
| 30.816092 | 156 | 0.569937 |
e99c8e44d9986d5f81df5555ca41603282124775 | 5,726 | /*
* Copyright 2020 HPB Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hpb.bc.entity;
public class ParamInfo extends BaseEntity {
/**
*
* This field was generated by MyBatis Generator.
* This field corresponds to the database column param_info.param_id
*
* @mbg.generated
*/
private String paramId;
/**
*
* This field was generated by MyBatis Generator.
* This field corresponds to the database column param_info.gas_limit
*
* @mbg.generated
*/
private String gasLimit;
/**
*
* This field was generated by MyBatis Generator.
* This field corresponds to the database column param_info.gas_price
*
* @mbg.generated
*/
private String gasPrice;
/**
*
* This field was generated by MyBatis Generator.
* This field corresponds to the database column param_info.value
*
* @mbg.generated
*/
private String value;
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column param_info.param_id
*
* @return the value of param_info.param_id
*
* @mbg.generated
*/
public String getParamId() {
return paramId;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column param_info.param_id
*
* @param paramId the value for param_info.param_id
*
* @mbg.generated
*/
public void setParamId(String paramId) {
this.paramId = paramId == null ? null : paramId.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column param_info.gas_limit
*
* @return the value of param_info.gas_limit
*
* @mbg.generated
*/
public String getGasLimit() {
return gasLimit;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column param_info.gas_limit
*
* @param gasLimit the value for param_info.gas_limit
*
* @mbg.generated
*/
public void setGasLimit(String gasLimit) {
this.gasLimit = gasLimit == null ? null : gasLimit.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column param_info.gas_price
*
* @return the value of param_info.gas_price
*
* @mbg.generated
*/
public String getGasPrice() {
return gasPrice;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column param_info.gas_price
*
* @param gasPrice the value for param_info.gas_price
*
* @mbg.generated
*/
public void setGasPrice(String gasPrice) {
this.gasPrice = gasPrice == null ? null : gasPrice.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column param_info.value
*
* @return the value of param_info.value
*
* @mbg.generated
*/
public String getValue() {
return value;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column param_info.value
*
* @param value the value for param_info.value
*
* @mbg.generated
*/
public void setValue(String value) {
this.value = value == null ? null : value.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table param_info
*
* @mbg.generated
*/
@Override
public boolean equals(Object that) {
if (this == that) {
return true;
}
if (that == null) {
return false;
}
if (getClass() != that.getClass()) {
return false;
}
ParamInfo other = (ParamInfo) that;
return (this.getParamId() == null ? other.getParamId() == null : this.getParamId().equals(other.getParamId()))
&& (this.getGasLimit() == null ? other.getGasLimit() == null : this.getGasLimit().equals(other.getGasLimit()))
&& (this.getGasPrice() == null ? other.getGasPrice() == null : this.getGasPrice().equals(other.getGasPrice()))
&& (this.getValue() == null ? other.getValue() == null : this.getValue().equals(other.getValue()));
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table param_info
*
* @mbg.generated
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((getParamId() == null) ? 0 : getParamId().hashCode());
result = prime * result + ((getGasLimit() == null) ? 0 : getGasLimit().hashCode());
result = prime * result + ((getGasPrice() == null) ? 0 : getGasPrice().hashCode());
result = prime * result + ((getValue() == null) ? 0 : getValue().hashCode());
return result;
}
} | 29.822917 | 122 | 0.615264 |
e3e6e090ad48a7bba4b27f98ff11979f847cad4d | 102 | package com.app.listener;
public interface ListenerAddToDabase {
void notesAddedToDatabase();
}
| 14.571429 | 38 | 0.77451 |
57d42f4c73885731528346a78993e5ce5a6a52ac | 3,105 | package com.iankoulski.problems.ccibook.tg;
import com.iankoulski.problems.ccibook.tg.*;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runners.model.TestClass;
/**
* Unit test for simple App.
*/
public class BuildOrderTest extends TestClass
{
public BuildOrderTest( )
{
super( BuildOrderTest.class );
}
// test main
@Test
public void testApp()
{
System.out.println("App test:");
com.iankoulski.problems.ccibook.tg.BuildOrder.main(null);
Assert.assertTrue(true);
}
public Graph<String> getTestGraph(boolean loop){
/*
loop = false
a k l
/ | \ / |
b c d m
/\ | /
e f-g h
/ \
i j
loop = true
a k l
/ | \ / |
b c d m
/\ | /
e f-g h
/ \
i <-> j
*/
String[] projects = new String[] {"a","b","c","d","e","f","g","h","i","j","k","l","m"};
String[][] dependencies = null;
if (loop==false){
dependencies = new String[][] {{"a","b"},{"a","c"},{"a","d"},{"b","e"},
{"b","f"},{"f","g"},{"c","g"},{"k","d"},
{"l","m"},{"d","h"},{"h","i"},{"h","j"}};
}else{
dependencies = new String[][] {{"a","b"},{"a","c"},{"a","d"},{"b","e"},
{"b","f"},{"f","g"},{"c","g"},{"k","d"},
{"l","m"},{"d","h"},{"h","i"},{"h","j"},
{"i","j"},{"j","i"}};
}
BuildOrder bo = new BuildOrder();
Graph<String> graph = bo.buildGraph(projects,dependencies);
return graph;
}
@Test
public void testNoLoopByRoot()
{
//Graph<String> g = getTestData(false);
BuildOrder bo = new BuildOrder();
Graph<String> g = getTestGraph(false);
Queue<String> buildOrder = bo.getBuildOrderByRoot(g);
Assert.assertEquals("a<-k<-l<-b<-c<-d<-m<-e<-f<-h<-g<-i<-j",buildOrder.getString());
}
@Test
public void testLoopByRoot()
{
Graph<String> g = getTestGraph(true);
BuildOrder bo = new BuildOrder();
Queue<String> buildOrder = bo.getBuildOrderByRoot(g);
Assert.assertNull(buildOrder);
}
@Test
public void testNoLoopByDFS()
{
Graph<String> g = getTestGraph(false);
BuildOrder bo = new BuildOrder();
Stack<String> buildOrder = bo.getBuildOrderByDFS(g);
Assert.assertEquals("l->m->k->a->d->h->j->i->c->b->f->g->e",buildOrder.getString());
}
@Test
public void testLoopByDFS()
{
Graph<String> g = getTestGraph(true);
BuildOrder bo = new BuildOrder();
Stack<String> buildOrder = bo.getBuildOrderByDFS(g);
Assert.assertNull(buildOrder);
}
}
| 29.018692 | 95 | 0.445411 |
509ced21059b3bc8a1debb67b3113bcf5e54fc25 | 1,009 | package uk.gov.justice.digital.hmpps.keyworker.model;
import java.util.HashMap;
import java.util.Map;
public enum DeallocationReason {
OVERRIDE("OVERRIDE"),
RELEASED("RELEASED"),
KEYWORKER_STATUS_CHANGE("KEYWORKER_STATUS_CHANGE"),
TRANSFER("TRANSFER"),
MERGED("MERGED"),
MISSING("MISSING"),
DUP("DUPLICATE"),
MANUAL("MANUAL");
private final String reasonCode;
DeallocationReason(final String reasonCode) {
this.reasonCode = reasonCode;
}
public String getReasonCode() {
return reasonCode;
}
@Override
public String toString() {
return reasonCode;
}
// Reverse lookup
private static final Map<String, DeallocationReason> lookup = new HashMap<>();
static {
for (final var reason : DeallocationReason.values()) {
lookup.put(reason.reasonCode, reason);
}
}
public static DeallocationReason get(final String reasonCode) {
return lookup.get(reasonCode);
}
}
| 22.931818 | 82 | 0.656095 |
1fb7a8d80c9ddba959388e01020a634ae8655025 | 2,358 | package id.placeholderlabs.laundry.util;
import android.content.Context;
import android.content.SharedPreferences;
/**
* Created by alfredo on 4/8/2018.
*/
public class SharedPreferenceManager {
public static final String APP_NAME = "Laundryan";
public static final String APP_USERNAME = "AppUsername";
public static final String APP_EMAIL = "AppEmail";
public static final String APP_DEVICE_TOKEN = "AppDeviceToken";
public static final String APP_ACCESS_TOKEN = "AppAccessToken";
public static final String APP_SALDO = "AppSaldo";
public static final String APP_LOGIN_STATE = "AppLoginState";
SharedPreferences sharedPreference;
SharedPreferences.Editor sharedPreferenceEditor;
public SharedPreferenceManager(Context context){
sharedPreference = context.getSharedPreferences(APP_NAME, Context.MODE_PRIVATE);
sharedPreferenceEditor = sharedPreference.edit();
}
public void saveString(String key, String value){
sharedPreferenceEditor.putString(key, value);
sharedPreferenceEditor.commit();
}
public void saveInt(String key, int value){
sharedPreferenceEditor.putInt(key, value);
sharedPreferenceEditor.commit();
}
public void saveBoolean(String key, boolean value){
sharedPreferenceEditor.putBoolean(key, value);
sharedPreferenceEditor.commit();
}
public void saveFloat(String key, float value){
sharedPreferenceEditor.putFloat(key, value);
sharedPreferenceEditor.commit();
}
public String getAppUsername(){
return sharedPreference.getString(APP_USERNAME, "");
}
public String getAppEmail(){
return sharedPreference.getString(APP_EMAIL, "");
}
public Boolean getLoginState(){
return sharedPreference.getBoolean(APP_LOGIN_STATE, false);
}
public String getAppAccessToken() {
return sharedPreference.getString(APP_ACCESS_TOKEN, "");
}
public Float getAppSaldo() {
return sharedPreference.getFloat(APP_SALDO, (float) 0.0);
}
public void clear() {
saveString(APP_USERNAME, "");
saveString(APP_ACCESS_TOKEN, "");
saveString(APP_DEVICE_TOKEN, "");
saveString(APP_EMAIL, "");
saveBoolean(APP_LOGIN_STATE, false);
saveFloat(APP_SALDO, (float) 0.0);
}
} | 29.848101 | 88 | 0.699746 |
6288ecd423e405e9922ddcbe743b3fda1695a91a | 3,260 | package io.github.daichim.jach.internal;
import io.github.daichim.jach.channel.BufferedChannel;
import io.github.daichim.jach.exception.ClosedChannelException;
import io.github.daichim.jach.exception.NoSuchChannelElementException;
import java.util.Iterator;
import java.util.NoSuchElementException;
import java.util.function.Consumer;
/**
* An {@link Iterator} for {@link BufferedChannel}. It is a blocking iterator, where if there is no
* message on the channel, the iterator will block until a new message is read in that thread or the
* channel is closed.
*/
public class ChannelIterator<T> implements Iterator<T> {
private final BufferedChannel<T> parentChannel;
private volatile boolean done;
public ChannelIterator(BufferedChannel<T> parentChannel) {
this.parentChannel = parentChannel;
this.done = false;
}
/**
* Returns {@literal true} if the parent channel has not been closed, {@literal false}
* otherwise.
*
* @return {@literal true} if the parent channel has not been closed, {@literal false}
* otherwise.
*/
@Override
synchronized public boolean hasNext() {
return !this.done || parentChannel.canRead();
}
/**
* Gets the next message for this thread from the underlying channel. If no message is
* available, this calls blocks until a new message is read in this thread or the channel is
* closed.
*
* @return The next message that is read from the channel.
*
* @throws NoSuchChannelElementException If the channel is closed before the next message could
* be read successfully.
*/
@Override
public T next() throws NoSuchChannelElementException {
if (!hasNext()) {
throw new NoSuchChannelElementException();
}
try {
return parentChannel.read();
} catch (ClosedChannelException | IllegalStateException ex) {
this.done = true;
throw new NoSuchChannelElementException();
} catch (NullPointerException ex) {
return next();
}
}
/**
* Remove is unsupported operation for a channel. This always throws an {@link
* UnsupportedOperationException}.
*
* @throws UnsupportedOperationException Always.
*/
@Override
public void remove() throws UnsupportedOperationException {
throw new UnsupportedOperationException("Cannot remove from channel");
}
/**
* Runs the give action for each of the remaining messages that are read in this thread from the
* underlying channel, until the channel is closed. When the channel is closed, this method
* returns without throwing the {@link NoSuchChannelElementException} to the caller.
*/
@Override
public void forEachRemaining(Consumer<? super T> action) {
try {
while (this.hasNext()) {
T val = this.next();
action.accept(val);
}
} catch (NoSuchElementException ex) {
}
}
/**
* Mark the iterator as done (i.e., the underlying channel has been closed).
*/
synchronized public void markDone() {
this.done = true;
}
}
| 33.608247 | 100 | 0.651227 |
7b9c19669488c67360de49eecec2eb434a5d5abb | 3,318 |
package org.usfirst.frc.team619.subsystems.drive;
import org.usfirst.frc.team619.hardware.DigitalEncoder;
import org.usfirst.frc.team619.hardware.TalonCan;
/**
* Mecanum drive base that is based off the code for the tank drive base, just has extra motors and servos added
* @author admin
*/
public class SRXMecanumDriveBase{
protected TalonCan topLeftmotor, topRightmotor, bottomLeftmotor, bottomRightmotor;
protected DigitalEncoder tL, tR, bL, bR;
public SRXMecanumDriveBase(int topLeftmotorChannel, int topRightmotorChannel, int bottomLeftmotorChannel, int bottomRightmotorChannel){
topLeftmotor = new TalonCan(topLeftmotorChannel);
topRightmotor = new TalonCan(topRightmotorChannel);
bottomLeftmotor = new TalonCan(bottomLeftmotorChannel);
bottomRightmotor = new TalonCan(bottomRightmotorChannel);
}
public SRXMecanumDriveBase(TalonCan topLeftmotor, TalonCan topRightmotor, TalonCan bottomLeftmotor, TalonCan bottomRightmotor){
this.topLeftmotor = topLeftmotor;
this.topRightmotor = topRightmotor;
this.bottomLeftmotor = bottomLeftmotor;
this.bottomRightmotor = bottomRightmotor;
}
public SRXMecanumDriveBase(TalonCan topLeftmotor, TalonCan topRightmotor, TalonCan bottomLeftmotor, TalonCan bottomRightmotor,
DigitalEncoder tL, DigitalEncoder tR, DigitalEncoder bL, DigitalEncoder bR){
this.topLeftmotor = topLeftmotor;
this.topRightmotor = topRightmotor;
this.bottomLeftmotor = bottomLeftmotor;
this.bottomRightmotor = bottomRightmotor;
this.tL = tL;
this.tR = tR;
this.bL = bL;
this.bR = bR;
}
public TalonCan getTopleftTalon() {
return topLeftmotor;
}
public TalonCan getToprightTalon() {
return topRightmotor;
}
public TalonCan getBottomleftTalon() {
return bottomLeftmotor;
}
public TalonCan getBottomrightTalon() {
return bottomRightmotor;
}
public DigitalEncoder getTL(){
return tL;
}
public DigitalEncoder getTR(){
return tR;
}
public DigitalEncoder getBL(){
return bL;
}
public DigitalEncoder getBR(){
return bR;
}
//drive forward and bakcwards by having all motors go in same direction
public void drive(double percent){
topLeftmotor.set(percent);
topRightmotor.set(percent);
bottomLeftmotor.set(percent);
bottomRightmotor.set(percent);
}
//slide left to right by having front motors go in the opposite direction of the back motors
public void slide(double sidepercent){
topLeftmotor.set(-sidepercent);
topRightmotor.set(-sidepercent);
bottomLeftmotor.set(sidepercent);
bottomRightmotor.set(sidepercent);
}
//turn left and right by having left-side motors go in opposite direction of right-side motors
public void turn(double turnpercent){
topLeftmotor.set(-turnpercent);
topRightmotor.set(turnpercent);
bottomLeftmotor.set(-turnpercent);
bottomRightmotor.set(turnpercent);
}
public void stop(){
topLeftmotor.set(0);
topRightmotor.set(0);
bottomLeftmotor.set(0);
bottomRightmotor.set(0);
}
}
| 30.722222 | 139 | 0.689873 |
d5d5b58e1a73e55251369fc972e026a5341e946c | 3,873 | /*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example;
import java.util.Map;
import org.eclipse.paho.client.mqttv3.MqttException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.actions.api.smarthome.SmartHomeApp;
import com.google.gson.Gson;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.google.home.graph.v1.HomeGraphApiServiceProto;
import com.google.protobuf.Struct;
import com.google.protobuf.Value;
import com.google.protobuf.util.JsonFormat;
/**
* A singleton class to encapsulate state reporting behavior with changing ColorSetting state
* values.
*/
final class ReportState {
private static final Logger LOGGER = LoggerFactory.getLogger(MySmartHomeApp.class);
private ReportState() {
}
/**
* Creates and completes a ReportStateAndNotification request
*
* @param actionsApp The SmartHomeApp instance to use to make the gRPC request
* @param userId The agent user ID
* @param deviceId The device ID
* @param states A Map of state keys and their values for the provided device ID
*/
public static void makeRequest(
SmartHomeApp actionsApp, String userId, String deviceId, Map<String, Object> states) {
// Convert a Map of states to a JsonObject
JsonObject jsonStates = (JsonObject) JsonParser.parseString(new Gson().toJson(states));
ReportState.makeRequest(actionsApp, userId, deviceId, jsonStates);
}
/**
* Creates and completes a ReportStateAndNotification request
*
* @param actionsApp The SmartHomeApp instance to use to make the gRPC request
* @param userId The agent user ID
* @param deviceId The device ID
* @param states A JSON object of state keys and their values for the provided device ID
*/
public static void makeRequest(
SmartHomeApp actionsApp, String userId, String deviceId, JsonObject states) {
// Do state name replacement for ColorSetting trait
// See https://developers.google.com/assistant/smarthome/traits/colorsetting#device-states
JsonObject colorJson = states.getAsJsonObject("color");
if (colorJson != null && colorJson.has("spectrumRgb")) {
colorJson.add("spectrumRGB", colorJson.get("spectrumRgb"));
colorJson.remove("spectrumRgb");
}
Struct.Builder statesStruct = Struct.newBuilder();
try {
JsonFormat.parser().ignoringUnknownFields().merge(new Gson().toJson(states), statesStruct);
} catch (Exception e) {
LOGGER.error("FAILED TO BUILD");
}
HomeGraphApiServiceProto.ReportStateAndNotificationDevice.Builder deviceBuilder =
HomeGraphApiServiceProto.ReportStateAndNotificationDevice.newBuilder()
.setStates(
Struct.newBuilder()
.putFields(deviceId, Value.newBuilder().setStructValue(statesStruct).build()));
HomeGraphApiServiceProto.ReportStateAndNotificationRequest request =
HomeGraphApiServiceProto.ReportStateAndNotificationRequest.newBuilder()
.setRequestId(String.valueOf(Math.random()))
.setAgentUserId(userId) // our single user's id
.setPayload(
HomeGraphApiServiceProto.StateAndNotificationPayload.newBuilder()
.setDevices(deviceBuilder))
.build();
actionsApp.reportState(request);
}
}
| 38.346535 | 99 | 0.729151 |
74e66416344250ea851e04fab4d14941c47a767b | 13,233 | /*
* Copyright 2021 OPPO ESA Stack Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.esastack.httpclient.core;
import io.esastack.commons.net.http.HttpHeaderValues;
import io.esastack.commons.net.http.HttpMethod;
import io.esastack.commons.net.netty.buffer.BufferImpl;
import io.esastack.httpclient.core.netty.NettyHttpClient;
import io.esastack.httpclient.core.util.Futures;
import org.junit.jupiter.api.Test;
import java.io.File;
import java.util.Collections;
import static org.assertj.core.api.Java6BDDAssertions.then;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
class CompositeRequestTest {
@Test
void testBasic() {
final String uri = "http://127.0.0.1:8080/abc";
final HttpClientBuilder builder = HttpClient.create();
final NettyHttpClient client = mock(NettyHttpClient.class);
final HttpMethod method = HttpMethod.PUT;
final SegmentRequest chunk0 = mock(SegmentRequest.class);
final CompositeRequest request = new CompositeRequest(builder,
client, () -> chunk0, method, uri);
final MultipartRequest multipart = request.multipart();
then(multipart.isMultipart()).isTrue();
then(multipart.isSegmented()).isFalse();
then(multipart.buffer()).isNull();
then(multipart.file()).isNull();
then(multipart.attrs()).isEmpty();
then(multipart.files()).isEmpty();
multipart.attr("a", "b");
multipart.attr("x", "y");
multipart.attr("xxxxx", "mmmmmmm");
final File file = new File("/abc");
multipart.file("a1", file);
multipart.file("a2", file, HttpHeaderValues.APPLICATION_OCTET_STREAM);
multipart.file("a3", file, "xxx", true);
multipart.file("a4", "file1", file, HttpHeaderValues.TEXT_PLAIN, true);
multipart.file("xxxx", new File("/def"), "mmm", true);
then(multipart.multipartEncode()).isTrue();
multipart.multipartEncode(false);
then(multipart.multipartEncode()).isFalse();
then(request.attrs().size()).isEqualTo(3);
then(request.files().size()).isEqualTo(5);
assertThrows(IllegalStateException.class, request::segment);
assertThrows(IllegalStateException.class, () -> request.body(new File("")));
then(new CompositeRequest(builder, client, () -> chunk0, method, uri).body(new File(""))
.file()).isNotNull();
assertThrows(IllegalStateException.class, () -> request.body(new BufferImpl().writeBytes("Hello".getBytes())));
then(new CompositeRequest(builder, client, () -> chunk0, method, uri)
.body(new BufferImpl().writeBytes("Hello".getBytes()))).isNotNull();
}
@Test
void testType() {
final String uri = "http://127.0.0.1:8080/abc";
final HttpClientBuilder builder = HttpClient.create();
final NettyHttpClient client = mock(NettyHttpClient.class);
when(client.execute(any(HttpRequest.class), any(Context.class), any(), any()))
.thenReturn(Futures.completed());
final HttpMethod method = HttpMethod.PUT;
final SegmentRequest chunk0 = mock(SegmentRequest.class);
final CompositeRequest request0 = new CompositeRequest(builder,
client, () -> chunk0, method, uri);
then(request0.isMultipart()).isFalse();
then(request0.isFile()).isFalse();
then(request0.isMultipart()).isFalse();
request0.multipart();
then(request0.isMultipart()).isTrue();
then(request0.isFile()).isFalse();
then(request0.isSegmented()).isFalse();
final CompositeRequest request1 = new CompositeRequest(builder,
client, () -> chunk0, method, uri);
request1.body(mock(File.class));
then(request1.isMultipart()).isFalse();
then(request1.isFile()).isTrue();
then(request1.isSegmented()).isFalse();
}
@Test
void testCopy() throws Exception {
final String uri = "http://127.0.0.1:8080/abc";
final HttpClientBuilder builder = HttpClient.create();
final NettyHttpClient client = mock(NettyHttpClient.class);
when(client.execute(any(HttpRequest.class), any(Context.class), any(), any()))
.thenReturn(Futures.completed());
final HttpMethod method = HttpMethod.PUT;
final SegmentRequest chunk0 = mock(SegmentRequest.class);
final boolean multipartEncode = true;
final CompositeRequest request = new CompositeRequest(builder,
client, () -> chunk0, method, uri);
final MultipartRequest multipart = request.multipart();
multipart.attr("a", "b");
multipart.attr("x", "y");
multipart.attr("xxxxx", "mmmmmmm");
final File file = new File("/abc");
multipart.file("a1", file);
multipart.file("a2", file, HttpHeaderValues.APPLICATION_OCTET_STREAM);
multipart.file("a3", file, "xxx", true);
multipart.file("a4", "file1", file, HttpHeaderValues.TEXT_PLAIN, true);
multipart.file("xxxx", new File("/def"), "mmm", true);
multipart.multipartEncode(multipartEncode);
then(request.attrs().size()).isEqualTo(3);
then(request.files().size()).isEqualTo(5);
final CompositeRequest copied = request.copy();
then(copied.multipartEncode()).isEqualTo(multipartEncode);
then(copied.attrs().size()).isEqualTo(3);
then(copied.files().size()).isEqualTo(5);
then(copied.file()).isNull();
then(copied.buffer()).isNull();
then(copied.isSegmented()).isFalse();
then(copied.isFile()).isFalse();
then(copied.isMultipart()).isTrue();
// Test isolation
request.file("m", file);
request.attr("mm", "nn");
then(request.attrs().size()).isEqualTo(4);
then(request.files().size()).isEqualTo(6);
then(copied.attrs().size()).isEqualTo(3);
then(copied.files().size()).isEqualTo(5);
then(request.execute().get()).isNull();
assertThrows(IllegalStateException.class, request::execute);
then(copied.execute().get()).isNull();
assertThrows(IllegalStateException.class, copied::execute);
}
@Test
void testUnmodifiableAfterStarted() {
final String uri = "http://127.0.0.1:8080/abc";
final HttpClientBuilder builder = HttpClient.create();
final NettyHttpClient client = mock(NettyHttpClient.class);
final HttpMethod method = HttpMethod.PUT;
final SegmentRequest chunk0 = mock(SegmentRequest.class);
final byte[] data = "Hello".getBytes();
final CompositeRequest request = new CompositeRequest(builder,
client, () -> chunk0, method, uri);
when(client.execute(any(), any(), any(), any()))
.thenReturn(Futures.completed(mock(HttpResponse.class)));
// Before writing
request.enableUriEncode();
then(request.uriEncode()).isTrue();
request.maxRedirects(10);
request.maxRetries(10);
request.readTimeout(100);
then(request.readTimeout()).isEqualTo(100);
request.addHeaders(Collections.singletonMap("a", "b"));
then(request.getHeader("a")).isEqualTo("b");
request.addParams(Collections.singletonMap("m", "n"));
then(request.getParam("m")).isEqualTo("n");
request.handle((h) -> {
});
request.handler(mock(Handler.class));
request.addHeader("x", "y");
then(request.getHeader("x")).isEqualTo("y");
request.setHeader("a", "bb");
then(request.getHeader("a")).isEqualTo("bb");
request.removeHeader("a");
then(request.getHeader("a")).isNullOrEmpty();
request.addParam("p", "q");
then(request.getParam("p")).isEqualTo("q");
final File file = new File("/abc");
assertThrows(IllegalStateException.class, () -> request.file("a1", file));
assertThrows(IllegalStateException.class, () -> request.attr("a", "b"));
request.multipart();
request.file("a1", file);
request.file("a2", file, HttpHeaderValues.APPLICATION_OCTET_STREAM);
request.file("a3", file, "xxx", true);
request.file("a4", "file1", file, HttpHeaderValues.TEXT_PLAIN, true);
request.attr("a", "b");
request.multipartEncode(true);
assertThrows(IllegalStateException.class, () -> request.body(new BufferImpl().writeBytes(data)));
assertThrows(IllegalStateException.class, () -> request.body(file));
request.execute();
// After writing
// Header ops are allowed
request.addHeaders(Collections.singletonMap("a", "b"));
request.addHeader("x", "y");
request.setHeader("a", "bb");
request.removeHeader("a");
assertThrows(IllegalStateException.class, request::disableExpectContinue);
assertThrows(IllegalStateException.class, request::enableUriEncode);
assertThrows(IllegalStateException.class, () -> request.maxRedirects(10));
assertThrows(IllegalStateException.class, () -> request.maxRetries(10));
assertThrows(IllegalStateException.class, () -> request.readTimeout(100));
assertThrows(IllegalStateException.class, () -> request.addParams(Collections.singletonMap("m", "n")));
assertThrows(IllegalStateException.class, () -> request.handle((h) -> {
}));
assertThrows(IllegalStateException.class, () -> request.handler(mock(Handler.class)));
assertThrows(IllegalStateException.class, () -> request.addParam("p", "q"));
assertThrows(IllegalStateException.class, () -> request.file("a1", file));
assertThrows(IllegalStateException.class,
() -> request.file("a2", file, HttpHeaderValues.APPLICATION_OCTET_STREAM));
assertThrows(IllegalStateException.class,
() -> request.file("a3", file, "xxx", true));
assertThrows(IllegalStateException.class,
() -> request.file("a4", "file1", file, HttpHeaderValues.TEXT_PLAIN, true));
assertThrows(IllegalStateException.class, () -> request.attr("a", "b"));
assertThrows(IllegalStateException.class, () -> request.multipartEncode(true));
assertThrows(IllegalStateException.class, () -> request.body(file));
assertThrows(IllegalStateException.class, () -> request.body(new BufferImpl().writeBytes(data)));
assertThrows(IllegalStateException.class, request::execute);
}
@Test
void testSegmentRequest() {
final String key = "key";
final String value = "value";
final String value1 = "value1";
final String uri = "http://127.0.0.1:8080/abc";
final HttpClient httpClient = HttpClient.ofDefault();
final HttpRequestFacade httpRequestFacade = httpClient.post(uri)
.addHeader(key, value)
.addParam(key, value)
.addParam(key, value1)
.readTimeout(10)
.maxRedirects(18)
.maxRetries(18)
.enableUriEncode();
final SegmentRequest segmentRequest = httpRequestFacade.segment();
assertEquals(httpRequestFacade.uri(), segmentRequest.uri());
assertEquals(httpRequestFacade.headers().toString(), segmentRequest.headers().toString());
assertEquals(httpRequestFacade.paramNames().toString(), segmentRequest.paramNames().toString());
assertEquals(httpRequestFacade.getParams(key).toString(), segmentRequest.getParams(key).toString());
assertEquals(httpRequestFacade.readTimeout(), segmentRequest.readTimeout());
assertEquals(httpRequestFacade.uriEncode(), segmentRequest.uriEncode());
assertEquals(((HttpRequestBaseImpl) httpRequestFacade).ctx.maxRetries(),
((HttpRequestBaseImpl) segmentRequest).ctx.maxRetries());
assertEquals(((HttpRequestBaseImpl) httpRequestFacade).ctx.maxRedirects(),
((HttpRequestBaseImpl) segmentRequest).ctx.maxRedirects());
assertEquals(((HttpRequestBaseImpl) httpRequestFacade).handle,
((HttpRequestBaseImpl) segmentRequest).handle);
assertEquals(((HttpRequestBaseImpl) httpRequestFacade).handler,
((HttpRequestBaseImpl) segmentRequest).handler);
then(segmentRequest.isMultipart()).isFalse();
then(segmentRequest.isFile()).isFalse();
then(segmentRequest.isSegmented()).isTrue();
}
}
| 43.386885 | 119 | 0.647094 |
5e7957e712b0c35857cd9474ba673f1c18ec03e6 | 2,222 | package org.researchstack.backbone.model;
import java.io.Serializable;
import java.util.Date;
/*
Created by bradleymcdermott on 10/22/15.
*/
public class User implements Serializable
{
private String name;
private String email;
private Date birthDate;
private UserHealth userHealth;
/**
* See description above DataSharingScope inner enum below
*/
private DataSharingScope dataSharingScope;
/** Default constructor for Serializable */
public User()
{
}
public String getName()
{
return name;
}
public void setName(String name)
{
this.name = name;
}
public String getEmail()
{
return email;
}
public void setEmail(String email)
{
this.email = email;
}
public Date getBirthDate()
{
return birthDate;
}
public void setBirthDate(Date birthDate)
{
this.birthDate = birthDate;
}
public DataSharingScope getDataSharingScope() {
return dataSharingScope;
}
public void setDataSharingScope(DataSharingScope dataSharingScope) {
this.dataSharingScope = dataSharingScope;
}
public UserHealth getUserHealth() {
return userHealth;
}
public void setUserHealth(UserHealth userHealth) {
this.userHealth = userHealth;
}
/*!
* DataSharingScope is an enumeration of the choices for the scope of sharing collected data.
* NONE - The user has not consented to sharing their data.
* STUDY - The user has consented only to sharing their de-identified data with the sponsors and partners of the current research study.
* ALL - The user has consented to sharing their de-identified data for current and future research, which may or may not involve the same institutions or investigators.
*/
public enum DataSharingScope {
NONE("no_sharing"),
STUDY("sponsors_and_partners"),
ALL("all_qualified_researchers");
private String identifier;
DataSharingScope(String identifier) {
this.identifier = identifier;
}
public String getIdentifier() {
return identifier;
}
}
}
| 22.907216 | 175 | 0.651665 |
bae92f4ad8667740c6bdc62797238c6f13915da6 | 388 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.martin.defaultServer.system;
/**
*
* @author martin
*/
public class SysInfo {
public static final short DEFAULT_SLEEP_TIME = 300;
public static final short DEFAULT_PORT = 2400;
}
| 24.25 | 79 | 0.729381 |
83734df91a8d694fdcb48000e706d60a6c651a6f | 726 | /*
* Copyright (c) 2020 Markus Neifer
* Licensed under the MIT License.
* See file LICENSE in parent directory of project root.
*/
package de.mneifercons.examples;
/**
* Bit fiddler.
*/
final class BitFiddler {
private BitFiddler() {
}
/**
* Application entry point.
*
* @param args the application arguments
*/
public static void main(final String[] args) {
if (args.length > 0) {
int i = 0;
int noOfBytes = args.length;
byte[] bytes = new byte[noOfBytes];
for (i = 0; i < noOfBytes; ++i) {
bytes[i] = Byte.parseByte(args[0], 2);
}
String string = new String(bytes);
System.out.println(string);
}
}
}
| 20.742857 | 57 | 0.568871 |
c3a3e675d7742fa8c9a90db82e27cbcdc1511747 | 1,256 | package edu.pitt.isg.deserializers;
import com.google.gson.*;
import edu.pitt.isg.mdc.dats2_2.Organization;
import edu.pitt.isg.mdc.dats2_2.Person;
import edu.pitt.isg.mdc.dats2_2.PersonComprisedEntity;
import java.lang.reflect.Type;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
public class PersonComprisedEntityDeserializer extends CustomDeserializer<PersonComprisedEntity> {
Gson gson = new GsonBuilder().serializeNulls().enableComplexMapKeySerialization().create();
@Override
public PersonComprisedEntity deserialize(JsonElement json, Type type, JsonDeserializationContext jsonDeserializationContext) throws JsonParseException {
List<String> personQualifiers = Arrays.asList("fullName", "firstName", "lastName", "affiliations", "email");
List<String> organizationQualifiers = Arrays.asList("name", "abbreviation", "location");
List<String> jsonMembers = getJsonMembers(json);
if (!Collections.disjoint(jsonMembers, organizationQualifiers)) {
return gson.fromJson(json, Organization.class);
} else if (!Collections.disjoint(jsonMembers, personQualifiers)) {
return gson.fromJson(json, Person.class);
}
return null;
}
}
| 41.866667 | 156 | 0.746815 |
2db13bda298c56628cfa4d0b082846d544cbd78c | 484 | package com.gravityray.examples.mvvm.database;
import android.arch.persistence.room.Database;
import android.arch.persistence.room.RoomDatabase;
import com.gravityray.examples.mvvm.models.database.MessageEntity;
@Database(
entities = {
MessageEntity.class
},
version = 1
)
public abstract class MessageDatabase extends RoomDatabase {
public static final String NAME = "MessageDatabase";
public abstract MessageDao messageDao();
}
| 23.047619 | 66 | 0.729339 |
bfab3421aaf3d7019105ed33ed71c471e2c3ac8a | 1,345 | package sndml.servicenow;
import org.junit.*;
import org.slf4j.Logger;
import static org.junit.Assert.*;
import java.io.IOException;
import java.util.Properties;
public class SessionVerificationTest {
Logger logger = TestManager.getLogger(this.getClass());
@Test
public void testValidate() throws Exception {
Session session = TestManager.getDefaultProfile().getSession();
session.verifyUser();
Table user = session.table("sys_user");
TableWSDL wsdl = user.getWSDL();
int wsdlCount = wsdl.getReadFieldNames().size();
logger.info("wsdl fields=" + wsdlCount);
TableSchema schema = user.getSchema();
int schemaCount = schema.getFieldNames().size();
logger.info("schema fields=" + schemaCount);
session.verifyUser();
assertEquals(wsdlCount, schemaCount);
}
@Test
public void testAutoVerify() throws Exception {
Properties props = new Properties();
props.setProperty("servicenow.instance", "dev00000");
props.setProperty("servicenow.username", "admin");
props.setProperty("servicenow.password", "secret");
Session session1 = new Session(props);
assertNotNull(session1);
props.setProperty("servicenow.verify_session", "true");
Session session2 = null;
try {
session2 = new Session(props);
}
catch (IOException e) {
logger.info(Log.TEST, e.getMessage());
}
assertNull(session2);
}
}
| 27.44898 | 65 | 0.728625 |
efcf3108df03d24ba159599ed20bb841f2bd8d4e | 5,980 | package com.dianrong.common.uniauth.common.bean.request;
import io.swagger.annotations.ApiModel;
import lombok.ToString;
import java.util.List;
@ToString
@ApiModel("组操作请求参数")
public class GroupParam extends PageParam {
private static final long serialVersionUID = -4261321484001881493L;
private Integer id;
private String code;
private String name;
private String description;
private Byte status;
private Byte userStatus;
// when add
private Integer targetGroupId;
// when delete
private List<Integer> targetGroupIds;
// if true only group, ignore members under each group
// if false, both group and members returned
private Boolean onlyShowGroup;
// if onlyShowGroup=false and userGroupType=0, then return members, if if onlyShowGroup=false
// and userGroupType=1 return owners
private Byte userGroupType;
private Boolean needOwnerMarkup;
private Integer roleId;
private Integer tagId;
private Integer domainId;
private List<Integer> roleIds;
private List<Integer> tagIds;
private Boolean onlyNeedGrpInfo;
// 是否包含组的角色信息
private Boolean needGrpRole;
// 是否包含组的扩展属性信息
private Boolean needGrpExtendVal;
// 是否包含组的标签信息
private Boolean needGrpTag;
// 是否包含组关联的用户信息
private Boolean needGrpUser;
// 在返回的user信息列表中是否包含禁用的用户信息.
private Boolean includeDisableUser;
// 是否包含组关联用户的角色信息
private Boolean needGrpUserRole;
// 是否包含组关联用户的标签信息
private Boolean needGrpUserTag;
// 是否包含组关联用户的扩展信息
private Boolean needGrpUserExtendVal;
public Boolean getNeedGrpRole() {
return needGrpRole;
}
public GroupParam setNeedGrpRole(Boolean needGrpRole) {
this.needGrpRole = needGrpRole;
return this;
}
public Boolean getNeedGrpExtendVal() {
return needGrpExtendVal;
}
public GroupParam setNeedGrpExtendVal(Boolean needGrpExtendVal) {
this.needGrpExtendVal = needGrpExtendVal;
return this;
}
public Boolean getNeedGrpTag() {
return needGrpTag;
}
public GroupParam setNeedGrpTag(Boolean needGrpTag) {
this.needGrpTag = needGrpTag;
return this;
}
public Boolean getNeedGrpUser() {
return needGrpUser;
}
public GroupParam setNeedGrpUser(Boolean needGrpUser) {
this.needGrpUser = needGrpUser;
return this;
}
public Boolean getNeedGrpUserRole() {
return needGrpUserRole;
}
public GroupParam setNeedGrpUserRole(Boolean needGrpUserRole) {
this.needGrpUserRole = needGrpUserRole;
return this;
}
public Boolean getNeedGrpUserTag() {
return needGrpUserTag;
}
public GroupParam setNeedGrpUserTag(Boolean needGrpUserTag) {
this.needGrpUserTag = needGrpUserTag;
return this;
}
public Boolean getNeedGrpUserExtendVal() {
return needGrpUserExtendVal;
}
public GroupParam setNeedGrpUserExtendVal(Boolean needGrpUserExtendVal) {
this.needGrpUserExtendVal = needGrpUserExtendVal;
return this;
}
public Integer getId() {
return id;
}
public GroupParam setId(Integer id) {
this.id = id;
return this;
}
public String getCode() {
return code;
}
public GroupParam setCode(String code) {
this.code = code;
return this;
}
public String getName() {
return name;
}
public GroupParam setName(String name) {
this.name = name;
return this;
}
public String getDescription() {
return description;
}
public GroupParam setDescription(String description) {
this.description = description;
return this;
}
public Byte getStatus() {
return status;
}
public GroupParam setStatus(Byte status) {
this.status = status;
return this;
}
public Integer getTargetGroupId() {
return targetGroupId;
}
public GroupParam setTargetGroupId(Integer targetGroupId) {
this.targetGroupId = targetGroupId;
return this;
}
public Boolean getOnlyShowGroup() {
return onlyShowGroup;
}
public GroupParam setOnlyShowGroup(Boolean onlyShowGroup) {
this.onlyShowGroup = onlyShowGroup;
return this;
}
public Integer getRoleId() {
return roleId;
}
public GroupParam setRoleId(Integer roleId) {
this.roleId = roleId;
return this;
}
public Integer getDomainId() {
return domainId;
}
public GroupParam setDomainId(Integer domainId) {
this.domainId = domainId;
return this;
}
public List<Integer> getRoleIds() {
return roleIds;
}
public GroupParam setRoleIds(List<Integer> roleIds) {
this.roleIds = roleIds;
return this;
}
public Byte getUserGroupType() {
return userGroupType;
}
public GroupParam setUserGroupType(Byte userGroupType) {
this.userGroupType = userGroupType;
return this;
}
public Boolean getNeedOwnerMarkup() {
return needOwnerMarkup;
}
public GroupParam setNeedOwnerMarkup(Boolean needOwnerMarkup) {
this.needOwnerMarkup = needOwnerMarkup;
return this;
}
public List<Integer> getTargetGroupIds() {
return targetGroupIds;
}
public GroupParam setTargetGroupIds(List<Integer> targetGroupIds) {
this.targetGroupIds = targetGroupIds;
return this;
}
public Integer getTagId() {
return tagId;
}
public GroupParam setTagId(Integer tagId) {
this.tagId = tagId;
return this;
}
public List<Integer> getTagIds() {
return tagIds;
}
public GroupParam setTagIds(List<Integer> tagIds) {
this.tagIds = tagIds;
return this;
}
public Boolean getOnlyNeedGrpInfo() {
return onlyNeedGrpInfo;
}
public GroupParam setOnlyNeedGrpInfo(Boolean onlyNeedGrpInfo) {
this.onlyNeedGrpInfo = onlyNeedGrpInfo;
return this;
}
public Boolean getIncludeDisableUser() {
return includeDisableUser;
}
public GroupParam setIncludeDisableUser(Boolean includeDisableUser) {
this.includeDisableUser = includeDisableUser;
return this;
}
public Byte getUserStatus() {
return userStatus;
}
public void setUserStatus(Byte userStatus) {
this.userStatus = userStatus;
}
}
| 20.340136 | 95 | 0.718896 |
c5369ec8c056d12567771b62e1407f4def44e6e8 | 5,835 | /*
* Copyright 2015-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.task.repository.database.support;
import java.util.HashMap;
import java.util.Map;
import javax.sql.DataSource;
import org.springframework.batch.item.database.Order;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.cloud.task.repository.database.PagingQueryProvider;
import org.springframework.cloud.task.repository.support.DatabaseType;
import org.springframework.jdbc.support.MetaDataAccessException;
import org.springframework.util.Assert;
import org.springframework.util.StringUtils;
import static org.springframework.cloud.task.repository.support.DatabaseType.DB2;
import static org.springframework.cloud.task.repository.support.DatabaseType.DB2AS400;
import static org.springframework.cloud.task.repository.support.DatabaseType.DB2VSE;
import static org.springframework.cloud.task.repository.support.DatabaseType.DB2ZOS;
import static org.springframework.cloud.task.repository.support.DatabaseType.H2;
import static org.springframework.cloud.task.repository.support.DatabaseType.HSQL;
import static org.springframework.cloud.task.repository.support.DatabaseType.MYSQL;
import static org.springframework.cloud.task.repository.support.DatabaseType.ORACLE;
import static org.springframework.cloud.task.repository.support.DatabaseType.POSTGRES;
import static org.springframework.cloud.task.repository.support.DatabaseType.SQLSERVER;
/**
* Factory bean for {@link PagingQueryProvider} interface. The database type will be
* determined from the data source if not provided explicitly. Valid types are given by
* the {@link DatabaseType} enum.
*
* @author Glenn Renfro
*/
public class SqlPagingQueryProviderFactoryBean
implements FactoryBean<PagingQueryProvider> {
private DataSource dataSource;
private String databaseType;
private String fromClause;
private String whereClause;
private String selectClause;
private Map<String, Order> sortKeys;
private Map<DatabaseType, AbstractSqlPagingQueryProvider> providers = new HashMap<>();
{
this.providers.put(HSQL, new HsqlPagingQueryProvider());
this.providers.put(H2, new H2PagingQueryProvider());
this.providers.put(MYSQL, new MySqlPagingQueryProvider());
this.providers.put(POSTGRES, new PostgresPagingQueryProvider());
this.providers.put(ORACLE, new OraclePagingQueryProvider());
this.providers.put(SQLSERVER, new SqlServerPagingQueryProvider());
this.providers.put(DB2, new Db2PagingQueryProvider());
this.providers.put(DB2VSE, new Db2PagingQueryProvider());
this.providers.put(DB2ZOS, new Db2PagingQueryProvider());
this.providers.put(DB2AS400, new Db2PagingQueryProvider());
}
/**
* @param databaseType the databaseType to set
*/
public void setDatabaseType(String databaseType) {
Assert.hasText(databaseType, "databaseType must not be empty nor null");
this.databaseType = databaseType;
}
/**
* @param dataSource the dataSource to set
*/
public void setDataSource(DataSource dataSource) {
Assert.notNull(dataSource, "dataSource must not be null");
this.dataSource = dataSource;
}
/**
* @param fromClause the fromClause to set
*/
public void setFromClause(String fromClause) {
Assert.hasText(fromClause, "fromClause must not be empty nor null");
this.fromClause = fromClause;
}
/**
* @param whereClause the whereClause to set
*/
public void setWhereClause(String whereClause) {
this.whereClause = whereClause;
}
/**
* @param selectClause the selectClause to set
*/
public void setSelectClause(String selectClause) {
Assert.hasText(selectClause, "selectClause must not be empty nor null");
this.selectClause = selectClause;
}
/**
* @param sortKeys the sortKeys to set
*/
public void setSortKeys(Map<String, Order> sortKeys) {
this.sortKeys = sortKeys;
}
/**
* Get a {@link PagingQueryProvider} instance using the provided properties and
* appropriate for the given database type.
*
* @see FactoryBean#getObject()
*/
@Override
public PagingQueryProvider getObject() throws Exception {
DatabaseType type;
try {
type = this.databaseType != null
? DatabaseType.valueOf(this.databaseType.toUpperCase())
: DatabaseType.fromMetaData(this.dataSource);
}
catch (MetaDataAccessException e) {
throw new IllegalArgumentException(
"Could not inspect meta data for database type. You have to supply it explicitly.",
e);
}
AbstractSqlPagingQueryProvider provider = this.providers.get(type);
Assert.state(provider != null,
"Should not happen: missing PagingQueryProvider for DatabaseType="
+ type);
provider.setFromClause(this.fromClause);
provider.setWhereClause(this.whereClause);
provider.setSortKeys(this.sortKeys);
if (StringUtils.hasText(this.selectClause)) {
provider.setSelectClause(this.selectClause);
}
provider.init(this.dataSource);
return provider;
}
/**
* Always returns {@link PagingQueryProvider}.
*
* @see FactoryBean#getObjectType()
*/
@Override
public Class<PagingQueryProvider> getObjectType() {
return PagingQueryProvider.class;
}
/**
* Always returns true.
*
* @see FactoryBean#isSingleton()
*/
@Override
public boolean isSingleton() {
return true;
}
}
| 31.540541 | 89 | 0.768295 |
f7c9a47fee81ec4664c285098edd35c0d733555f | 241 | package org.cryptoapi.bench.http;
import java.net.MalformedURLException;
import java.net.URL;
public class HttpProtocolABMC1 {
public void go(String url) throws MalformedURLException {
System.out.println(new URL(url));
}
}
| 21.909091 | 61 | 0.742739 |
962981cf83c6abc70562c9081be1df99dd519376 | 2,334 | /*
* Copyright (c) 2002 Ernest Yu. All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
package pfc.export;
import java.io.File;
/**
* Class to create an Exporter object.
* @author Ernie Yu
*/
public class ExporterFactory {
private int type;
/** Constructor.
* @param type factory type - see constants in Exporter interface
*/
public ExporterFactory(int type) {
this.type = type;
}
/** Returns a new Exporter object based on the factory type. The
* Exporter is initialized to write to the specified file.
*/
public Exporter getExporter(File exportFile) {
Exporter exporter = null;
switch (type) {
case Exporter.MBOX:
exporter = new MboxMailExporter();
break;
case Exporter.MBOX_TOC:
exporter = new EudoraMailExporter();
break;
case Exporter.FAVE_HTML:
exporter = new FavoriteHtmlExporter();
break;
case Exporter.TEXT:
exporter = new TextMailExporter();
break;
default:
// Do nothing.
}
if (exporter != null) { exporter.setFile(exportFile); }
return exporter;
}
}
| 34.323529 | 79 | 0.651671 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.