content
stringlengths 10
4.9M
|
---|
<filename>__tests__/compressFiles.spec.ts
import fs from "fs";
import { join } from "path";
import compressFiles from "../compressFiles";
const root = process.cwd();
const fakeFile = join(root, "hello.js");
const fakeFile2 = join(root, "goodbye.js");
const code = `
(() => {
try {
console.log("Hello World!");
} catch(err) {
console.error(err.toString())
}
})()
`;
const compressedCode =
'(()=>{try{console.log("Hello World!")}catch(o){console.error(o.toString())}})();';
describe("Compress Files", () => {
beforeAll(() => {
fs.writeFileSync(fakeFile, code, {
encoding: "utf-8"
});
fs.writeFileSync(fakeFile2, "", {
encoding: "utf-8"
});
});
it("fails to find a file", async () => {
try {
await compressFiles(["nonexistantfile.js"]);
} catch (error) {
expect(error.toString()).toEqual(
"Error: Unable to locate nonexistantfile.js. The file doesn't appear to exist!"
);
}
});
it("fails to compress an empty file", async () => {
try {
await compressFiles(["goodbye.js"]);
} catch (error) {
fs.unlinkSync(fakeFile2);
expect(error.toString()).toEqual(
"Error: Unable to minify goodbye.js. No minified code was returned from terser!"
);
}
});
it("compresses the file", async () => {
await compressFiles(["hello.js"]);
const compressedFile = fs.readFileSync(fakeFile);
expect(compressedFile.toString()).toEqual(compressedCode);
fs.unlinkSync(fakeFile);
});
});
|
package com.task.phones.rest;
import com.task.phones.mapper.DeviceMapper;
import com.task.phones.rest.model.CreateDeviceDto;
import com.task.phones.rest.model.DeviceDto;
import com.task.phones.service.TestingDeviceService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
@Slf4j
@RestController
@RequestMapping("/api/v1/devices")
public class DeviceResource {
private final TestingDeviceService testingDeviceService;
private final DeviceMapper deviceMapper;
@Autowired
public DeviceResource(TestingDeviceService testingDeviceService, DeviceMapper deviceMapper) {
this.testingDeviceService = testingDeviceService;
this.deviceMapper = deviceMapper;
}
@GetMapping
public ResponseEntity<DeviceDto> get() {
return ResponseEntity.ok(new DeviceDto());
}
@PostMapping
public ResponseEntity<DeviceDto> createDevice(@RequestBody CreateDeviceDto newDevice) {
return ResponseEntity.ok(
deviceMapper.toDto(
testingDeviceService.add(newDevice.getModel())
)
);
}
}
|
# Copyright 1999-2020 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import os
from ..... import tensor as mt
from ..... import dataframe as md
from .....utils import lazy_import
from .. import gen_tensorflow_dataset, run_tensorflow_script
tf_installed = lazy_import("tensorflow") is not None
@pytest.mark.skipif(not tf_installed, reason="tensorflow not installed")
def test_mars_dataset(setup_cluster):
import numpy as np
import pandas as pd
tf_dataset_ops = lazy_import("tensorflow.python.data.ops.dataset_ops")
# Mars tensor
data = mt.random.rand(1000, 32, dtype="f4")
data_verify = data[:10].execute().fetch()
dataset = gen_tensorflow_dataset(data)
assert isinstance(dataset, tf_dataset_ops.DatasetV2)
for _, data_1batch in enumerate(dataset.repeat().batch(10).take(1)):
np.testing.assert_array_equal(data_1batch, data_verify)
# Mars tensors
data = mt.random.rand(1000, 32, dtype="f4")
labels = mt.random.randint(0, 2, (1000, 10), dtype="f4")
data_verify = data[:10].execute().fetch()
labels_verify = labels[:10].execute().fetch()
dataset = gen_tensorflow_dataset((data, labels))
assert isinstance(dataset, tf_dataset_ops.DatasetV2)
for _, (data_1batch, label_1batch) in enumerate(dataset.repeat().batch(10).take(1)):
np.testing.assert_array_equal(data_1batch, data_verify)
np.testing.assert_array_equal(label_1batch, labels_verify)
# np ndarray
data = np.random.rand(1000, 32)
labels = np.random.randint(0, 2, (1000, 10))
data_verify = data[:10]
labels_verify = labels[:10]
dataset = gen_tensorflow_dataset((data, labels))
assert isinstance(dataset, tf_dataset_ops.DatasetV2)
for _, (data_1batch, label_1batch) in enumerate(dataset.repeat().batch(10).take(1)):
np.testing.assert_array_equal(data_1batch, data_verify)
np.testing.assert_array_equal(label_1batch, labels_verify)
# Mars dataframe
data = md.DataFrame(data)
labels = md.DataFrame(labels)
data_verify = data.iloc[:10].execute().fetch().values
labels_verify = labels.iloc[:10].execute().fetch().values
dataset = gen_tensorflow_dataset(
(data, labels), fetch_kwargs={"extra_config": {"check_series_name": False}}
)
assert isinstance(dataset, tf_dataset_ops.DatasetV2)
for _, (data_1batch, label_1batch) in enumerate(dataset.repeat().batch(10).take(1)):
np.testing.assert_array_equal(data_1batch, data_verify)
np.testing.assert_array_equal(label_1batch, labels_verify)
# Mars series
label = labels[1]
label_verify = label[:10].execute().fetch()
dataset = gen_tensorflow_dataset(
(data, label), fetch_kwargs={"extra_config": {"check_series_name": False}}
)
assert isinstance(dataset, tf_dataset_ops.DatasetV2)
for _, (data_1batch, label_1batch) in enumerate(dataset.repeat().batch(10).take(1)):
np.testing.assert_array_equal(data_1batch, data_verify)
np.testing.assert_array_equal(label_1batch, label_verify)
# pandas dataframe
data = pd.DataFrame(np.random.rand(1000, 32))
labels = pd.DataFrame(np.random.randint(0, 2, (1000, 10)), dtype="float32")
data_verify = data.iloc[:10].values
labels_verify = labels.iloc[:10].values
dataset = gen_tensorflow_dataset((data, labels))
assert isinstance(dataset, tf_dataset_ops.DatasetV2)
for _, (data_1batch, label_1batch) in enumerate(dataset.repeat().batch(10).take(1)):
np.testing.assert_array_equal(data_1batch, data_verify)
np.testing.assert_array_equal(label_1batch, labels_verify)
# pandas series
label = labels[1]
label_verify = label[:10]
dataset = gen_tensorflow_dataset((data, label))
assert isinstance(dataset, tf_dataset_ops.DatasetV2)
for _, (data_1batch, label_1batch) in enumerate(dataset.repeat().batch(10).take(1)):
np.testing.assert_array_equal(data_1batch, data_verify)
np.testing.assert_array_equal(label_1batch, label_verify)
# list
label = label.tolist()
label_verify = label[:10]
dataset = gen_tensorflow_dataset((data, label))
assert isinstance(dataset, tf_dataset_ops.DatasetV2)
for _, (data_1batch, label_1batch) in enumerate(dataset.repeat().batch(10).take(1)):
np.testing.assert_array_equal(data_1batch, data_verify)
np.testing.assert_array_equal(label_1batch, label_verify)
# test TypeError
label = tuple(range(1000))
with pytest.raises(TypeError) as e:
dataset = gen_tensorflow_dataset((data, label))
exec_msg = e.value.args[0]
assert exec_msg == "Unexpected dataset type: <class 'tuple'>"
@pytest.mark.skipif(not tf_installed, reason="tensorflow not installed")
def test_mars_dataset_script(setup_cluster):
sess = setup_cluster
path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "tf_dataset.py")
data = mt.random.rand(1000, 32, dtype="f4")
labels = mt.random.randint(0, 2, (1000, 10), dtype="f4")
assert (
run_tensorflow_script(
path,
n_workers=2,
data={"feature_data": data, "labels": labels},
command_argv=["multiple"],
session=sess,
).fetch()["status"]
== "ok"
)
|
/**
* The <tt>UserAgentPlugIn</tt> handles setting the global and per-feed
* HTTP user agent settings, overriding the default <i>curn</i> user agent
* setting. It intercepts the following configuration parameters:
*
* <table border="1">
* <tr valign="top" align="left">
* <th>Section</th>
* <th>Parameter</th>
* <th>Meaning</th>
* </tr>
* <tr valign="top">
* <td><tt>[curn]</tt></td>
* <td><tt>UserAgent</tt></td>
* <td>The default user agent, if none is supplied in individual feed
* sections.</td>
* </tr>
* <tr valign="top">
* <td><tt>[Feed<i>xxx</i>]</tt></td>
* <td><tt>UserAgent</tt></td>
* <td>User agent to use for a given feed.</td>
* </tr>
* </table>
*
* @version <tt>$Revision$</tt>
*/
public class UserAgentPlugIn
implements MainConfigItemPlugIn,
FeedConfigItemPlugIn,
PostConfigPlugIn,
PreFeedDownloadPlugIn
{
/*----------------------------------------------------------------------*\
Private Constants
\*----------------------------------------------------------------------*/
private static final String VAR_USER_AGENT = "UserAgent";
/*----------------------------------------------------------------------*\
Private Data Items
\*----------------------------------------------------------------------*/
/**
* Feed save data, by feed
*/
private Map<FeedInfo,String> perFeedUserAgentMap =
new HashMap<FeedInfo,String>();
/**
* Default user agent
*/
private String defaultUserAgent = null;
/**
* For log messages
*/
private static final Logger log = new Logger (UserAgentPlugIn.class);
/*----------------------------------------------------------------------*\
Constructor
\*----------------------------------------------------------------------*/
/**
* Default constructor (required).
*/
public UserAgentPlugIn()
{
// Nothing to do
}
/*----------------------------------------------------------------------*\
Public Methods Required by *PlugIn Interfaces
\*----------------------------------------------------------------------*/
/**
* Get a displayable name for the plug-in.
*
* @return the name
*/
public String getPlugInName()
{
return "User Agent";
}
/**
* Get the sort key for this plug-in.
*
* @return the sort key string.
*/
public String getPlugInSortKey()
{
return ClassUtil.getShortClassName (getClass().getName());
}
/**
* Initialize the plug-in. This method is called before any of the
* plug-in methods are called.
*
* @throws CurnException on error
*/
public void initPlugIn()
throws CurnException
{
}
/**
* Called immediately after <i>curn</i> has read and processed a
* configuration item in the main [curn] configuration section. All
* configuration items are passed, one by one, to each loaded plug-in.
* If a plug-in class is not interested in a particular configuration
* item, this method should simply return without doing anything. Note
* that some configuration items may simply be variable assignment;
* there's no real way to distinguish a variable assignment from a
* blessed configuration item.
*
* @param sectionName the name of the configuration section where
* the item was found
* @param paramName the name of the parameter
* @param config the {@link CurnConfig} object
*
* @throws CurnException on error
*
* @see CurnConfig
*/
public void runMainConfigItemPlugIn (String sectionName,
String paramName,
CurnConfig config)
throws CurnException
{
try
{
if (paramName.equals (VAR_USER_AGENT))
{
defaultUserAgent = config.getConfigurationValue (sectionName,
paramName);
}
}
catch (ConfigurationException ex)
{
throw new CurnException (ex);
}
}
/**
* Called immediately after <i>curn</i> has read and processed a
* configuration item in a "feed" configuration section. All
* configuration items are passed, one by one, to each loaded plug-in.
* If a plug-in class is not interested in a particular configuration
* item, this method should simply return without doing anything. Note
* that some configuration items may simply be variable assignment;
* there's no real way to distinguish a variable assignment from a
* blessed configuration item.
*
* @param sectionName the name of the configuration section where
* the item was found
* @param paramName the name of the parameter
* @param config the active configuration
* @param feedInfo partially complete <tt>FeedInfo</tt> object
* for the feed. The URL is guaranteed to be
* present, but no other fields are.
*
* @return <tt>true</tt> to continue processing the feed,
* <tt>false</tt> to skip it
*
* @throws CurnException on error
*
* @see CurnConfig
* @see FeedInfo
* @see FeedInfo#getURL
*/
public boolean runFeedConfigItemPlugIn (String sectionName,
String paramName,
CurnConfig config,
FeedInfo feedInfo)
throws CurnException
{
try
{
if (paramName.equals (VAR_USER_AGENT))
{
String value = config.getConfigurationValue (sectionName,
paramName);
perFeedUserAgentMap.put (feedInfo, value);
log.debug ("[" + sectionName + "]: UserAgent=" + value);
}
return true;
}
catch (ConfigurationException ex)
{
throw new CurnException (ex);
}
}
/**
* Called after the entire configuration has been read and parsed, but
* before any feeds are processed. Intercepting this event is useful
* for plug-ins that want to adjust the configuration. For instance,
* the <i>curn</i> command-line wrapper intercepts this plug-in event
* so it can adjust the configuration to account for command line
* options.
*
* @param config the parsed {@link CurnConfig} object
*
* @throws CurnException on error
*
* @see CurnConfig
*/
public void runPostConfigPlugIn (CurnConfig config)
throws CurnException
{
if (defaultUserAgent == null)
{
StringBuilder buf = new StringBuilder();
// Standard format seems to be:
//
// tool/version (+url)
//
// e.g.: Googlebot/2.1 (+http://www.google.com/bot.htm
Version version = Version.getInstance();
buf.append (version.getApplicationName());
buf.append ('/');
buf.append (version.getVersion());
buf.append (" (+");
buf.append (version.getWebSite());
buf.append (')');
defaultUserAgent = buf.toString();
}
}
/**
* <p>Called just before a feed is downloaded. This method can return
* <tt>false</tt> to signal <i>curn</i> that the feed should be
* skipped. The plug-in method can also set values on the
* <tt>URLConnection</tt> used to download the plug-in, via
* <tt>URL.setRequestProperty()</tt>. (Note that <i>all</i> URLs, even
* <tt>file:</tt> URLs, are passed into this method. Setting a request
* property on the <tt>URLConnection</tt> object for a <tt>file:</tt>
* URL will have no effect--though it isn't specifically harmful.)</p>
*
* <p>Possible uses for a pre-feed download plug-in include:</p>
*
* <ul>
* <li>filtering on feed URL to prevent downloading non-matching feeds
* <li>changing the default User-Agent value
* <li>setting a non-standard HTTP header field
* </ul>
*
* @param feedInfo the {@link FeedInfo} object for the feed to be
* downloaded
* @param urlConn the <tt>java.net.URLConnection</tt> object that will
* be used to download the feed's XML.
*
* @return <tt>true</tt> if <i>curn</i> should continue to process the
* feed, <tt>false</tt> to skip the feed
*
* @throws CurnException on error
*
* @see FeedInfo
*/
public boolean runPreFeedDownloadPlugIn (FeedInfo feedInfo,
URLConnection urlConn)
throws CurnException
{
String userAgent = perFeedUserAgentMap.get(feedInfo);
if (userAgent == null)
userAgent = defaultUserAgent;
// Set the user-agent header.
log.debug ("Using user agent \"" + userAgent + "\" for feed \"" +
feedInfo.getURL() + "\"");
urlConn.setRequestProperty ("User-Agent", userAgent);
return true;
}
} |
class JobList:
"""Generic job class."""
def __init__(self, data=None, **kw):
self._raw = data
@property
def values(self):
raise NotImplementedError()
def __len__(self):
return len(self.values())
def __iter__(self):
for job in self.values():
yield job
def schedulled_pods(self):
for key, value in self.items():
if len(value) > 0:
# XXX work with both nomad and k8s?
schedulled = list(filter(lambda x: x[0] == 'PodScheduled', value[0].events)) |
Evaluation of Urea-motility-indole medium for recognition and differentiation of Salmonella and Shigella species in stool cultures
A semisolid urea-motility-indole medium designed for detection in Enterobacteriaceae of urease activity, motility, and indole production in one tube was prepared and evaluated. The formulation of the medium was similar to that of Christensen urea agar, but the agar concentration was 0.2%, and 1% tryptone was added. Results with 687 strains of Enterobacteriaceae were the same as those obtained with standard test media (98% overall agreement). The urea-motility-indole medium was also used in combination with Kligler iron agar for the recognition and differentiation of Salmonella and Shigella species from colonies picked from plating media in fecal cultures. This combination was compared with the combination of Kligler iron agar and lysine iron agar with 507 strains of non-lactose-fermenting Enterobacteriaceae. Although both combinations enabled the presumptive recognition and differentiation of Salmonella and Shigella species, an analysis of data indicated that the combination of Kligler iron agar and urea-motility-indole medium performed better than the combination of Kligler iron agar and lysine iron agar in detecting Salmonella and Shigella species. |
Cherokee Townhouses: Architectural Adaptation to European Contact in the Southern Appalachians
Public structures known as townhouses were hubs of public life in Cherokee towns in the southern Appalachians during the seventeenth and eighteenth centuries A.D., and in towns predating European contact. Townhouses were sources of cultural stability and conservatism during periods of dramatic change, and they were an architectural medium through which Cherokee towns adapted to life in the postcontact Southeast. This article summarizes the characteristics of townhouses in the southern Appalachians dating from the thirteenth through the eighteenth centuries A.D., focusing on size and shape, the surfaces on which they were built, sequences of building and rebuilding, and the presence or absence of burials inside townhouses. The architectural form of townhouses rooted people to particular places, but Cherokee townhouses also enabled towns to move from one place to another, because a town could build a townhouse at any particular place, old or new. |
Scalable Application-Dependent Diagnosisof Interconnects of SRAM-Based FPGAs
This paper presents a new method for diagnosing (detection and location) multiple faults in an application-dependent interconnect of a SRAM-based FPGA. For fault detection, the proposed technique retains the original interconnect configuration and modifies the function of the LUTs using the new LUT programming function 1-Bit Sum Function (1-BSF); in addition, it utilizes features such as branches in the nets as well as the primary (unused) IOs of the FPGAs. The proposed method detects all possible stuck-at and bridging faults of all cardinalities in a single configuration; fault detection requires 1 + log2k test configurations for multiple stuck-at location and 2 + 2log2k additional test configurations to locate more than one pair-wise bridging faults (where k denotes the maximum combinational depth of the FPGA circuit). Following detection, the locations of multiple faults are hierarchically identified using the walking-1 test set and an adaptive approach for the interconnect structure. Net ordering independence is accomplished by utilizing features such as the presence of paths of nets that are either disjoint or joint between the primary input and at least one primary output. As validated by simulation on benchmark circuits, the proposed method scales extremely well for different Virtex FPGA families; this results in a significant reduction in the number of configurations for diagnosing multiple faults. |
<reponame>griwes/reaveros
/*
* Copyright © 2021 Michał 'Griwes' Dominiak
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <boot-memmap.h>
#include <climits>
#ifndef EFIAPI
#ifdef _MSC_EXTENSIONS
#define EFIAPI __cdecl
#else
#define EFIAPI __attribute__((ms_abi))
#endif
#endif
#include "types.h"
namespace efi_loader
{
constexpr auto EFI_SYSTEM_TABLE_SIGNATURE = 0x5453595320494249;
constexpr auto EFI_2_60_SYSTEM_TABLE_REVISION = ((2 << 16) | (60));
constexpr auto EFI_2_50_SYSTEM_TABLE_REVISION = ((2 << 16) | (50));
constexpr auto EFI_2_40_SYSTEM_TABLE_REVISION = ((2 << 16) | (40));
constexpr auto EFI_2_31_SYSTEM_TABLE_REVISION = ((2 << 16) | (31));
constexpr auto EFI_2_30_SYSTEM_TABLE_REVISION = ((2 << 16) | (30));
constexpr auto EFI_2_20_SYSTEM_TABLE_REVISION = ((2 << 16) | (20));
constexpr auto EFI_2_10_SYSTEM_TABLE_REVISION = ((2 << 16) | (10));
constexpr auto EFI_2_00_SYSTEM_TABLE_REVISION = ((2 << 16) | (00));
constexpr auto EFI_1_10_SYSTEM_TABLE_REVISION = ((1 << 16) | (10));
constexpr auto EFI_1_02_SYSTEM_TABLE_REVISION = ((1 << 16) | (02));
constexpr auto EFI_SYSTEM_TABLE_REVISION = EFI_2_60_SYSTEM_TABLE_REVISION;
constexpr auto EFI_SPECIFICATION_VERSION = EFI_SYSTEM_TABLE_REVISION;
constexpr auto high_bit = 1ull << (sizeof(EFI_STATUS) * CHAR_BIT - 1);
constexpr auto EFI_SUCCESS = 0;
constexpr auto EFI_LOAD_ERROR = 1 | high_bit;
constexpr auto EFI_INVALID_PARAMETER = 2 | high_bit;
constexpr auto EFI_UNSUPPORTED = 3 | high_bit;
constexpr auto EFI_BAD_BUFFER_SIZE = 4 | high_bit;
constexpr auto EFI_BUFFER_TOO_SMALL = 5 | high_bit;
constexpr auto EFI_NOT_READY = 6 | high_bit;
constexpr auto EFI_DEVICE_ERROR = 7 | high_bit;
constexpr auto EFI_WRITE_PROTECTED = 8 | high_bit;
constexpr auto EFI_OUT_OF_RESOURCES = 9 | high_bit;
constexpr auto EFI_VOLUME_CORRUPTED = 10 | high_bit;
constexpr auto EFI_VOLUME_FULL = 11 | high_bit;
constexpr auto EFI_NO_MEDIA = 12 | high_bit;
constexpr auto EFI_MEDIA_CHANGED = 13 | high_bit;
constexpr auto EFI_NOT_FOUND = 14 | high_bit;
constexpr auto EFI_ACCESS_DENIED = 15 | high_bit;
constexpr auto EFI_NO_RESPONSE = 16 | high_bit;
constexpr auto EFI_NO_MAPPING = 17 | high_bit;
constexpr auto EFI_TIMEOUT = 18 | high_bit;
constexpr auto EFI_NOT_STARTED = 19 | high_bit;
constexpr auto EFI_ALREADY_STARTED = 20 | high_bit;
constexpr auto EFI_ABORTED = 21 | high_bit;
constexpr auto EFI_ICMP_ERROR = 22 | high_bit;
constexpr auto EFI_TFTP_ERROR = 23 | high_bit;
constexpr auto EFI_PROTOCOL_ERROR = 24 | high_bit;
constexpr auto EFI_INCOMPATIBLE_VERSION = 25 | high_bit;
constexpr auto EFI_SECURITY_VIOLATION = 26 | high_bit;
constexpr auto EFI_CRC_ERROR = 27 | high_bit;
constexpr auto EFI_END_OF_MEDIA = 28 | high_bit;
constexpr auto EFI_END_OF_FILE = 31 | high_bit;
constexpr auto EFI_INVALID_LANGUAGE = 32 | high_bit;
constexpr auto EFI_COMPROMISED_DATA = 33 | high_bit;
constexpr auto EFI_IP_ADDRESS_CONFLICT = 34 | high_bit;
constexpr auto EFI_HTTP_ERROR = 35 | high_bit;
struct EFI_SYSTEM_TABLE;
extern EFI_SYSTEM_TABLE * system_table;
extern EFI_HANDLE image_handle;
inline void initialize(EFI_SYSTEM_TABLE * table, EFI_HANDLE handle)
{
system_table = table;
image_handle = handle;
}
void * open_protocol_by_guid(EFI_HANDLE, const EFI_GUID &, const char *);
void * open_protocol_by_guid(const EFI_GUID &, const char *);
void * allocate_pages(std::size_t, EFI_MEMORY_TYPE type);
void deallocate_pages(void *, std::size_t);
struct acpi_information
{
std::size_t revision;
std::uintptr_t root;
};
acpi_information find_acpi_root();
struct memory_map
{
std::size_t key;
std::size_t size;
std::uint32_t efi_entry_size;
char * efi_entries;
boot_protocol::memory_map_entry * entries;
EFI_MEMORY_DESCRIPTOR * get_efi_entry(std::size_t index)
{
return reinterpret_cast<EFI_MEMORY_DESCRIPTOR *>(efi_entries + efi_entry_size * index);
}
void account_for_stack(std::uintptr_t begin, std::uintptr_t end);
};
memory_map get_memory_map();
void exit_boot_services(const memory_map &);
}
#define PROTO_GUID(PROTO) EFI_##PROTO##_PROTOCOL_GUID
#define PROTOCOL(PROTO) EFI_##PROTO##_PROTOCOL
#define FIND_PROTOCOL_FROM_HANDLE(HANDLE, PROTO) \
(reinterpret_cast<PROTOCOL(PROTO) *>( \
::efi_loader::open_protocol_by_guid(HANDLE, PROTO_GUID(PROTO), #PROTO)))
#define FIND_PROTOCOL(PROTO) \
(reinterpret_cast<PROTOCOL(PROTO) *>(::efi_loader::open_protocol_by_guid(PROTO_GUID(PROTO), #PROTO)))
|
Democrats appear to have enough votes to filibuster the confirmation of Neil Gorsuch to the U.S. Supreme Court under the existing procedures of the Senate. So now the question is whether the Republicans have enough votes to “go nuclear” and change those procedures, essentially wiping out the ability of a minority party to filibuster a Supreme Court pick.
Republican Senate leaders, according to party officials, are laying out a path to put Gorsuch on the court this week, no matter what Democrats do. Republicans had wanted to do it in two votes — the vote on “cloture,” the term for ending debate or a filibuster, on Thursday, and — if they got at least 60 yes votes — moving to an up-or-down vote on Friday.
But Democrats aren’t expected to allow Gorsuch’s confirmation this way. Already, 44 Democratic-leaning senators have said they will not support cloture. So if the initial cloture vote fails on Thursday, Senate Majority Leader Mitch McConnell is expected to call for a vote that would change Senate procedure and require only 51 votes for the nomination of a Supreme Court justice to proceed, instead of 60. These changes to Senate procedure can be done by a simple majority vote.
If the Senate takes this path, there would be four votes: the initial cloture; the vote on the rule change; the cloture vote under this new rule, and then the vote on Gorsuch. There could be more — McConnell could hold more than one vote on the initial cloture in an attempt to get Democrats to back down from the filibuster and to avoid the rule change, which is controversial even among Republicans.
Under this process, the votes on cloture and the various rules around it are expected on Thursday, with a formal vote on Gorsuch’s confirmation on Friday.
But the Gorsuch confirmation could still all fall apart. Since Vice President Mike Pence can cast a tie-breaking vote, Republicans need at least 50 of their 52 members to support changing the rules for Supreme Court nominees. The GOP can afford only two defections. Three would scuttle everything.
So far, no Senate Republican has said that he or she will oppose the rule change. But Maine’s Susan Collins has expressed wariness about the move, and both she and Alaska’s Lisa Murkowski have voted against the GOP on high-profile issues this year, such as the confirmation of Betsy DeVos to be the secretary of education.
But even if Collins and Murkowski oppose the rule change, it is hard to see where Democrats find a third vote.
According to FiveThirtyEight’s Trump Score, the GOP members who vote against the president’s position most often are, in order: Collins, Kentucky’s Rand Paul, Murkowski, Tennessee’s Bob Corker and Arizona’s John McCain.
Corker and McCain, in their public comments, sound as if they will join with McConnell if necessary, as does South Carolina’s Lindsey Graham, a frequent Trump critic. Paul has not indicated that he will break from party lines on the issue.
Only one Republican, Nevada’s Dean Heller, is up for re-election next year in a state that Hillary Clinton won in 2016. He also seems very likely to take his party’s position.
“There is an overwhelming consensus from Nevadans: It’s time to put Judge Neil Gorsuch on the Supreme Court,” Heller wrote in an op-ed published on Monday in the Reno Gazette-Journal. “I agree and fully support him, and I’ll work to see that Judge Gorsuch is confirmed to serve on our nation’s highest court.”
As with the health care process in the House last month, watch the timeline. McConnell has repeatedly said that Gorsuch will be confirmed this Friday. If he pushes back that timeline, it suggests the Republican leader does not have 50 votes (plus Pence) for the rule change. Paul Ryan, McConnell’s counterpart in the House, scheduled a vote on the American Health Care Act on a Thursday, delayed it to a Friday and then pulled the bill altogether. Will McConnell be more successful? |
def _forward_stft(wav: th.Tensor,
kernel: th.Tensor,
window: th.Tensor,
return_polar: bool = False,
pre_emphasis: float = 0,
frame_hop: int = 256,
onesided: bool = False,
center: bool = False,
eps: float = EPSILON) -> th.Tensor:
wav_dim = wav.dim()
if wav_dim not in [2, 3]:
raise RuntimeError(f"STFT expect 2D/3D tensor, but got {wav_dim:d}D")
N, S = wav.shape[0], wav.shape[-1]
wav = wav.view(-1, 1, S)
if center:
pad = kernel.shape[-1] // 2
wav = tf.pad(wav, (pad, pad), mode="reflect")
kernel = kernel * window
if pre_emphasis > 0:
frames = tf.unfold(wav[:, None], (1, kernel.shape[-1]),
stride=frame_hop,
padding=0)
frames[:, 1:] = frames[:, 1:] - pre_emphasis * frames[:, :-1]
frames[:, 0] *= (1 - pre_emphasis)
packed = th.matmul(kernel[:, 0][None, ...], frames)
else:
packed = tf.conv1d(wav, kernel, stride=frame_hop, padding=0)
if wav_dim == 3:
packed = packed.view(N, -1, packed.shape[-2], packed.shape[-1])
real, imag = th.chunk(packed, 2, dim=-2)
if onesided:
num_bins = kernel.shape[0] // 4 + 1
real = real[..., :num_bins, :]
imag = imag[..., :num_bins, :]
if return_polar:
mag = (real**2 + imag**2 + eps)**0.5
pha = th.atan2(imag, real)
return th.stack([mag, pha], dim=-1)
else:
return th.stack([real, imag], dim=-1) |
SPATIAL PLANNING TEXT INFORMATION PROCESSING WITH USE OF MACHINE LEARNING METHODS
Spatial development plans provide an important information on future land development capabilities. Unfortunately, at the moment access to planning information in Poland is limited. Despite many initiatives taken to standardize planning documents, the standard for recording plans has not yet been developed. Each of the planning areas has a symbol and a category of land use, which is different in each of the plans. For this reason, it is very difficult to carry out an analysis enabling aggregation of all areas with a specific, the same development function. The authors in the article conduct experiments aimed at using machine learning methods for the needs of processing the text part of plans and their classification. The main aim was to find the best method for grouping texts of zones with the same land use. The experiment consists in an attempt to automatically classify the texts of findings for individual areas into the 10 defined categories of land use. Thanks to this, it is possible to predict the future land use function for a specific zone text regulation and aggregate all zones with specific land use type. In the proposed solution for the classification problem of heterogeneous planning information authors used k-means algorithm and artificial neural networks. The main challenge for this solution, however, was not the design of the classification tool but rather the preprocessing of the text. In this paper an approach for text preprocessing as well as selected methods of text classification is presented. The results of the work indicate greater use of CNN's usability to solve the problem presented. K-means clustering produces clusters, in which texts are not grouped according to land use function, which is not useful in the context of zones aggregation.
INTRODUCTION
Spatial planning information is information that, as the name suggests, apply to a planned and regulated state. In the context of spatial planning, it is, therefore, information that concerns desirable future changes that are planned in and as administrative space. The sources of this information are spatial planning documents. Depending on the level of detail they may contain plans, directions or spatial policy implemented in a given area. These documents can also contain more detailed rules and arrangements regarding future land development.
The basic planning document in Poland in communities is the local spatial development plan, which is drawn up at the local (municipal) level. It is the most important document shaping spatial planning in the commune. This is primarily due to the fact that the local plan has the status of an act of local law and its provisions directly determine the rights of property owners. They have direct binding effects for property owners. The plan is also a document based on which other decisions are made, e.g. measures for economic development or decisions on * Corresponding author environmental conditions. Planning information from the plan is of key importance for monitoring planned changes in land use.
Poland has a hierarchical planning system. Plans are prepared at national, regional and local level. In Poland, municipalities are not obliged to create local plans (Böhm A., 2008). However, a lack of a local plan causes spatial development that is often chaotic and uncontrolled, resulting in dysfunctional spatial organization. Work has been undertaken for many years to standardize local plans in Poland. In some European countries, i.e. the Netherlands (IMRO standard) or in Germany there are standards for recording plans, thanks to which it is possible to compare and analyse planned changes in space across the whole country. In Poland each region/city has its own rules for developing plans. Although there are legal conditions determining the minimum scope of the local plan (Regulation, 2003), this scope is extended and invariably modified. The lack of standards for writing and publishing plans makes obtaining comprehensive information on planned changes on a scale larger than a city or region very difficult. Performing even a simple analysis, for example, finding all areas for multi-family housing in a region, is currently very difficult. It can be an easy task on a city scale that uses a certain standard. At present, at the scale of the entire region, it is impossible. Analysis of this kind of information is important from the point of view of monitoring planned investment processes. Lack of monitoring can cause serious effects in space, such as urban sprawl and escalating spatial conflicts (Kazak J., 2013).
The authors in the article undertake a study involving the processing of the text parts of the local spatial development plan and its classification into specific categories of land types for analysis and comparisons. For this purpose, they use data that describes detailed regulations for individual zones in a plan, available in the form of HTML files.
For the solution of the classification of heterogeneous planning information problem we propose the use of well-known methods of supervised and unsupervised learning methods. Both of them have some advantages and disadvantages. The main advantage of unsupervised learning method is that the data does not need to be labelled, what minimize human efforts to obtain data manually.
However, supervised learning models thanks to direct feedback and evaluation based on it are able to produce more accurate results. In general we can use supervised learning to predict exact known output and unsupervised learning for tasks where we don`t know what exact output should be and we want to find hidden patterns in our data. Indeed unsupervised learning methods are more adaptive as they infer patterns straight from the data, not from their labels. The target variables may change over time and lead to Concept Drift or Concept Evolution. This characteristic could be useful in tasks where all sets of labels are not completely known or they are evolving. One of disadvantages of unsupervised methods is that they are harder to evaluate. If we know the labels and want to compare accuracy of classification task with the clusters produced by selected unsupervised algorithm we need to take care of things like labels permutations among clusters, homogeneity and completeness of labels in particular clusters. We could also combine supervised and unsupervised learning methods and use semi-supervised learning where some of the training data are missing their labels.
From the unsupervised learning methods we chose one of the simplest ones -the k-means method (MacQueen, 1967). It is a well-known technique for data cluster analysis and together with its simplicity it constitutes a great tool which quickly provides training results. From the supervised learning methods we decided to focus on artificial neural networks, which are well known from their advantages like: generalization ability, prediction of output data based on input data without the need to explicitly define the relationship between them or that they can learn any dynamic, non-linear input-output relationship arbitrarily well (Świetlicka et al., 2019). It was also shown that artificial neural networks can solve many of the natural language processing (NLP), like classification, named entity recognition (NER), image captioning, language translation and many others (Brownlee, 2019). Supervised machine learning (e.g. artificial neural networks) models require a lot of labeled data to reach good accuracybut in many cases we know all set of labels but we don`t have resources to label our whole dataset. In such cases we could use techniques called Few-Shot Learning to produce classifiers trained with a very small amount of labelled training data.
Using machine learning methods in classification of texts from zoning plans requires several steps, as presented in Fig. 1. The paper is organised as follows. In section 2 we are providing a short description of the context of our problem. In section 3 we describe methods of preprocessing and supervised and unsupervised methods of classification, while in section 4 we provide results of the proposed methods. Finally in section 5 we provide a short conclusion with prospective future works.
BACKGROUND AND CONTEXT OF A PROBLEM
The local spatial development plan (zoning plan) is an act of law in which the purpose of a given area is determined, the location of public purpose investments is determined and the development methods and development conditions are determined. It consists of text -plan text and graphic -plan drawing ( Figure 2). Both parts are integrally connected and legally binding. The zoning plan in Poland, like in many European countries, sets out the socalled zones where specific types of arrangements apply (regulations, restrictions, rules). The plan defines the general regulations that apply to the entire area covered by the plan and specific regulations that apply to specific zones in the plan. The representation of zoning plans varies greatly. The differences appear in both the textual and graphic parts. In the textual part they concern both the substantive content and various ways of editing the plan's text. In the graphic part, the differences are mainly associated with the use of various symbols and markings defining the future land use of the area. Sharing local plans also varies. Most municipalities provide plans in the form of a georeferenced raster image. A small percentage of plans are available in vector form. Usually, larger cities that have a standard, choose this form of sharing plans (Kaczmarek et al., 2014).
The service of National Integration of Local Spatial Development Plans has been available in Poland, following the INSPIRE Directive, since 2017 (National Integration of Local Spatial Development Plans, 2020) It provides current integrated local spatial development plans from 1296 local government units from 2477 municipalities in the country using the WMS (Web Map Service) standard. Currently 303 units are vector plans, while the remaining 993 are in the form of a raster with geo-reference (as of 30/03/2020). Detailed plans for specific areas have been made available for the plans in vector form using the GetFeatureInfo method (Fig.3). This form of the integrated document is particularly desirable because it provides the user not only with information about what is on the plan drawing, but also detailed land development regulations recorded in the text part of the plan. The interpretation of the plan based on the graphic part (plan drawing) allows obtaining information only about the intended purpose of the area. Obtaining information about other arrangements, i.e. land development rules, building lines in force, etc. is only possible after consulting the plan text. Thanks to this form of integration, it is possible to obtain information directly from the geoportal not only about the textual regulations related to a given zone, but also it is possible to download the entire content of the plan (usually in the form of PDF files) and the legend of the plan drawing.
Due to the lack of a uniform classification of future land development, it is not possible to integrate all areas for which single-family housing is intended. According to the current ordinance and currently prevailing planning practice, the symbol reserved for this type of area is MN. However, due to the occurrence of often mixed functions, such areas can be represented using many different symbols, e.g. 1-10MN_RM, MNu, MN/U, 30dMN_U, MNU_B1, MNMT3, and many others. It is not possible to uniquely identify the purpose of the area by its symbol and the designation.
The authors carry out research aimed at assigning individual texts of the plans' findings together with their symbols to 10 defined categories of land use, i.e.: 1. communication areas 2. agriculture 3. single-family housing 4. multi-family residential development 5. residential areas 6. areas of technical and production buildings 7. service development areas 8. green areas and water 9. areas of technical infrastructure 10. other The above classification is based on the regulation regarding the required scope of the local plan (Regulation, 2003). The regulation specifies basic land use categories as well as graphic symbols used while preparing spatial development plans in Poland (Jaroszewicz, 2016).
The research question is if machine learning methods can help to solve problems in spatial planning related to the integration of zoning plans. Are there methods that allow for automatic classification of the areas with the same land use and then allow their aggregation in the wider scale (e.g. country)?
Preprocessing of data
The main challenge in the task of classification of spatial planning documents is the problem of preprocessing of the textwhich is obviously in Polish -with complex declensions of nouns, adjectives, adverbs, and counting words.
According to many available on-line articles, Polish is in the top 10 of the most difficult languages in the world (e.g. Macedo, 2015). It is indeed very complex, not only from the pronunciation point of view but mostly because of its grammar. Seven cases of nouns together with the conjugation of verbs creates a wide variety of a single word's forms. As a simple example in Table 1 we are showing a comparison of the word "run" with its conjugation in English and in Polish. An additional problem is related to the complexity of a single sentence, which in Polish can be built with many dependent clauses. The preprocessing of text includes preparation of data for further text mining. The main steps of preprocessing text are: tokenizing, stemming and lemmatization. Tokenization is the process of dividing text into meaningful pieces, which are called tokens. The text is divided into words, which then are grouped into sequences. The best grouping is with respect to sentences. Stemming allows to extract the base form of words (eg. playing -play). Lemmatization is similar to stemming but takes into consideration the morphological analysis of the words.
Text representation can be done in different ways. One can be a bag-of-words where words appear independently and the order is not taken into account (Huang, 2008). The bag-of-words model allow us to represent text as numerical feature vectors. In such vector only the occurrences of given word is counted in text. In aim to reduce the weight of unimportant words in a text tf-idf method (term-frequency -inverse document-frequency) is often used (Aizawa, 2003, Robertson, 2004. It allows to describe the relevance of words in the text. The relevance is high if the word is often found in a specific text and rarely in others. Tf-idf can be calculated by multiplying the value of term frequency with inverse document frequency. There are many formulas to calculate term frequency (tf), the most common is: where t denotes single term, d is a single document, df is a document frequency of t, and N is a number of considered documents. Inverse Document Frequency (idf) value can be then calculated from the following formula: Different methods require different preparation of data. While for the unsupervised methods we chose the tf-idf method, for the supervised algorithms we decided to use standard tokenization methods.
To perform tokenization of text we decided to focus only on the basic forms of words. For this purpose we reached for the so called Morfeusz -inflectional analyzer and generator for Polish language morphology (Woliński, 2014). This public tool enables analysis of the text in order to extract the basic meaning of the word together with its features (lemma; tag; entity type: name, surname, geographical name, etc.). The basic forms of words were used to create the Tokenizer, which allows to vectorize a text corpus, by turning each text into a sequence of integers (Fig. 4).
K-means (unsupervised learning)
As a part of the first stage of research, the classification was performed by the unsupervised method using the k-means algorithm. This algorithm is one of the simplest and best known algorithms for clustering (MacQueen, 1967). The purpose of the k-means algorithm is to divide a given data set into k clusters, in which the number of clusters is defined by the user. The main task is to find centroids in each of the clusters. These centroids are created on the basis of mutual similarity, measured by a specific measure of distance (usually Euclidean distance). The algorithm classifies objects in such a way that the variance inside a single cluster is the smallest and the highest between all the clusters at the same time.
Artificial neural networks (supervised learning)
For the classification problem we have considered a few different artificial neural network structures and finally chose one for further analysis. Structure of this neural network is based on convolutional and GRU (Gated Recurrent Unit) layers arranged into a multi-channel structure (see Fig. 5). Each thread uses the embedding layer, which enables representing words as ndimensional vectors (word2vec). By using the embedding layer we are able to convey the relative meaning of words for the neural network. Embedding layer is a matrix of coefficients, where each row represents a single word. Polish word2vec is available in many versions thanks to the Polish Academy of Science. This great collection of representations of Polish words was created with Python Gensim package (Řehůřek, et. al., 2010) based on two text corpuses: National Corpus of Polish (pol. Narodowy Korpus Języka Polskiego -NKJP) and Polish Wikipedia (see Fig. 6).
Computational simplicity of convolutional layers enables making more sophisticated, extensive structures. In the presented solution we used three parallel threads as shown in Figure 5. Each thread was implemented with a different kernel size of the convolution, which can be understood as a grouping factor of words in the input sequence. The feed-back connections in the recurrent layer result in the context units always maintaining a copy of the previous values of the hidden units (since they propagate over the connections before the learning rule is applied). Thus, the network can maintain the previous state, allowing it to perform such tasks as sequence-prediction that are beyond the power of a standard multilayer perceptron. The last fully-connected layer (Dense) is performing the classification. As an activation function in this layer we used softmax function, while in the remaining layers we used relu (rectified linear unit) function. The model was compiled with categorical crossentropy loss function and Adam optimizer. To prevent neural network from overfitting we added dropout layers, which were set to drop 20% of neurons in the preceding convolutional and fullyconnected layers.
K-means clustering
Ten classes have been defined in the input parameters to the model. One of the main issue in k-means clustering is determining the number of clusters which has an effect on the clustering results. A number of methods for estimating the optimal number of clusters have been proposed e.g. elbow method (Coates et al.,2012), cross-validation (Kaufman et al.,1990), gap statistic method (Tibshirani et al., 2001). In this case the number of clusters was determined by number of land use categories. The aim was to group texts into 10 defined categories, therefore none of the statistical methods was used.
Entry to the model were fragments of plan regulations (text) without assigned categories. The Frequency-Inverse Document Frequency (tf-idf) algorithm was used to convert the text to numerical form. The scikit learn package was used to convert the texts to numeric form. The tf-idf algorithm is implemented in TfidfVectorizer module.
The results of clustering are presented in Fig. 7. Figure 7. Results of k-means clustering. Figure 7 shows the separate clusters grouping according to the kmeans clusters. In order to check the usefulness of clustering for the needs of spatial planning, the analysis and verification of texts in individual clusters were carried out. The analysis of the most commonly used words in the separated clusters indicates the low utility of the k-means algorithm for the solution to the problem presented in the article, i.e. the grouping of texts according to the category of future land use. In order to check whether there is a correlation between the created clusters and the categories that were assigned to each of the zones, a combination of both information was presented in Fig.8 and Tab.2 Evaluation of k-means clustering.
Artificial neural networks
Artificial neural networks were implemented in Python with use of Tensorflow 2 and Keras packages (Gulli et al., 2019). To carry out the experiment we had only about labeled 4 100 texts (fragments of plan regulations). Training data includes symbol of a zone, textual regulations of a zone and label: category of land use (Fig.9). In order to train and check the effectiveness of prediction, the entire set was divided into a train (75%) and validation (25%) datasets. The training and validation samples were generated randomly.
In Figures 10 and 11, the model loss and accuracy are presented. From these performance plots we can observe, that despite a discrepancy between waveforms for train and validation sets, that occur between 5th and 20th epoch, after around 25 epochs they start to converge. This observation leads us to a conclusion, that the neural network after around 30 epochs is already trained, as the value of loss does not go any lower, and at the same time the value of accuracy does not go any higher. Further training was not necessary, however it did not make the neural network overtrain. This allows us to conclude that the architecture was properly chosen. After 60 epochs, loss of train and validation data reached 9.7960e-04 and 0.1033, while accuracy reached 1 and 0.9823, respectively. Figure 10. The plot of model loss on the training and validation datasets over training epochs.
ISPRS Annals of the Photogrammetry, Remote Sensing and Spatial Information Sciences, Volume VI-4/W2-2020, 2020 5th International Conference on Smart Data and Smart Cities, 30 September -2 October 2020, Nice, France Figure 11. The plot of model accuracy on the training and validation datasets.
In any classification problem (binary or multiclass) accuracy seems to be insufficient, as classes can be unbalanced. That is why we reached for precision, recall and 1 scores together with a plot of the ROC curve (Receiver Operating Characteristic). As we considered a multiclass problem the scores were calculated as a weighted average of the scores, where the support was chosen as the number of true instances for each label. The precision, recall and 1 scores reached 0.9817, 0.9823 and 0.9816, respectively, where we used a weighted average. Additionally in Figure 12 the ROC curves are presented, each ROC curve was calculated for a single label versus all the remaining ones.
Analysing this waveform we can observe that even with a small threshold we can obtain a high TPR (True Positive Ratio) for a small FPR (False Positive Ratio), which leads us to a conclusion, that our model of neural network classifies correctly most of the texts.
CONCLUSIONS AND FUTURE WORKS
While this analysis shows the potential of classification approach using select machine learning methods, it already demonstrates that it provides helpful analytical ways to assess and synthesize Polish planning activities in regions and possibly even larger areas. The results of the experiments show that machine learning methods can be used with success in helping solving problems in the spatial planning domain. The classification of textual regulations of zones can be information enriching the spatial data (e.g. published in GML). Thanks to this, it is possible to get an answer to the question about the total area of zones with a specific type of land use in the area where vector data is available.
Each of the methods which was used in the aim of text classification has its own drawbacks. Using the CNN requires a lot of labeled data, while k-means results did not provide expected results in the context of grouping areas with the same land use. Working on NLP problems it is almost impossible not to consider Transformers by Hugging Face, which at this moment offers over 30 pre-trained models, including BERT (Vaswani, et. al. 2017, Devlin, et al. 2018, RoBERTa (Liu, et al. 2019) and XLNet (Yang, et al. 2019). The biggest struggle is again with the morphology of the Polish language, while these models are trained on full sentences, which in Polish, as we mentioned, can be very long, with many dependent clauses. Adapting these models or preparing text to make it possible to use them is one of our next milestones.
Future work involves at first developing the CNN and connecting the spatial features in text to the graphic planning documents to be able to better assess the accuracy of the classifications, also given the noted grammatical issues in the Polish language.
In future work, experiments are also planned using other unsupervised methods, i.e. DBSCAN and Affinity propagation. It is also planned to extend and modify the classification of zones in order to extract more detailed information on the future land use of the zone. Future work also includes experiments involving the extraction from the text of the spatial plan document of other relevant information, i.e. indicators related to land development (e.g. floor area ratio, maximum building height). |
TUMOR NECROSIS FACTOR AND PULMONARY HOST DEFENSE. • 1098
Tumor Necrosis Factor (TNF) is a multifunctional cytokine that has been implicated in a variety of pathologic processes. To evaluate the role of TNF during pulmonary inflammation and infection, we have generated an animal model in which the human surfactant apoprotein C promoter directs the expression of a soluble TNF antagonist. This antagonist consists of a fusion protein of the p75 soluble TNF receptor and the Fc portion of human IgG1. The transgenic mice were analyzed by Southern blot, northern blot and an ELISA analysis of serum and bronchoalveolar lavage fluid. Of the three lines of mice analyzed to date, one line expresses the transgene mRNA uniquely in the lung and secretes the fusion protein into the BAL fluid and serum. Bronchoalveolar lavage levels of the protein, corrected for dilution by the lavage fluid, are 10-40 times those in the serum (1795.8 +/- 282 ng/ml vs. 58.1 +/- 14.3 ng/ml). The mice were then evaluated by intranasal instillation of Micropolyspora faeni antigen, which is known to induce pulmonary inflammation and TNF elaboration by alveolar macrophages. Compared to littermate wildtype controls, the transgenic mice have diminished PMN recruitment into the BAL fluid (9.2% +/- 3.5 vs. 24.8% +/- 8.3), indicating attenuation of the inflammatory response. Further studies are planned to investigate the utility of this model to study pulmonary inflammation and infection including systemic and aerosol challenge with LPS and gram negative bacteria. |
def isbn_has_valid_check_digit(self, isbn):
if not self.ISBN_RE.match(isbn):
raise ValueError(str(isbn) + " is no valid 13-digit ISBN!")
checksum = 0
for index, digit in enumerate(isbn):
if index % 2 == 0:
checksum += int(digit)
else:
checksum += 3 * int(digit)
return checksum % 10 == 0 |
def wrap_to_octave(self, cents, octave_length=1200):
octave_cents = cents % octave_length
return octave_cents |
n,k = map(int,raw_input().strip().split(' '))
if(n==1):
print 0
elif(k>=n/2):
print (n*(n-1)/2)
else:
print ((2*n*k) - (k*(2*k+1)))
|
// Put enqueues a job to the named queue
func (q *queue) PutOrReplace(class string, jid string, data interface{}, opt ...putOptionFn) (int64, error) {
pd := newPutData()
if err := pd.setOptions(opt); err != nil {
return -1, err
}
args := []interface{}{"put", timestamp(), "", q.name, jid, class, marshal(data), pd.delay}
args = append(args, pd.args...)
args = append(args, "replace", 0)
r, err := q.c.Do(args...)
if err != nil {
return -1, err
}
if r, ok := r.(int64); ok {
return r, nil
}
return -1, nil
} |
string = str(raw_input())
def palindromo(string):
string_inv = string[::-1]
if string == string_inv:
string = string[0:len(string)-1]
return 1
else:
return 0
while(palindromo(string) == 1 and len(string) > 0):
string = string[0:len(string)-1]
print len(string)
|
#include<cstdio>
#include<cstring>
#include<algorithm>
#include<map>
#include<vector>
using namespace std;
int n,m;
struct edge{
int s,t,nxt;
}e[1000005];
int e_cnt,last[300005],tot,e_cnt_bak,last_bak[300005];
inline void ins(int a,int b){
e[e_cnt]=(edge){a,b,last[a]};
last[a]=e_cnt++;
}
int bl[300005],num;
int dfn[300005],low[300005],TOT;
bool inst[300005];
int st[300005],top;
void tarjan(int x){
dfn[x]=low[x]=++TOT;
inst[x]=1;
st[++top]=x;
for(int i=last[x];i!=-1;i=e[i].nxt)if(!dfn[e[i].t]){
tarjan(e[i].t);
low[x]=min(low[x],low[e[i].t]);
}else if(inst[e[i].t])low[x]=min(low[x],low[e[i].t]);
if(low[x]==dfn[x]){
while(inst[x]){
inst[st[top]]=0;
bl[st[top]]=x;
top--;
}
}
}
vector<int> T[300005],mb[300005];
int d[300005];
int q[300005];
bool val[300005];
bool check(){
memset(dfn,0,sizeof(dfn));
TOT=0;
for(int i=2;i<=tot*2+1;i++)if(!dfn[i])tarjan(i);
// for(int i=2;i<=tot*2+1;i++)printf("%d ",bl[i]); puts("");
for(int i=2;i<=tot*2+1;i++)mb[i].clear(),T[i].clear();
for(int i=2;i<=tot*2+1;i++)mb[bl[i]].push_back(i);
for(int i=0;i<e_cnt;i++)if(bl[e[i].s]!=bl[e[i].t])T[bl[e[i].t]].push_back(bl[e[i].s]),d[bl[e[i].s]]++;//,printf("%d %d %d %d\n",e[i].s,e[i].t,bl[e[i].s],bl[e[i].t]);
int h,t;
h=t=0;
for(int i=2;i<=tot*2+1;i++)if(bl[i]==i&&!d[i])q[++t]=i;
memset(val,0,sizeof(val));
while(h<t){
h++;
int x=q[h];
bool ok=1;
for(int i=0;i<mb[x].size();i++)if(val[mb[x][i]^1])ok=0;
if(ok)for(int i=0;i<mb[x].size();i++)val[mb[x][i]]=1;
for(int i=0;i<T[x].size();i++){
d[T[x][i]]--;
if(!d[T[x][i]])q[++t]=T[x][i];
}
}
// for(int i=1;i<=t;i++)printf("%d ",q[i]); puts("");
for(int i=1;i<=tot;i++)if(!(val[i*2]^val[i*2+1]))return false;
return true;
}
struct E{
int u,v,c,t,org;
bool operator <(const E b)const{
return t<b.t;
}
}ee[50005];
bool us[50005];
map<int,int> vis;
vector<int> EE[50005];
int main(){
scanf("%d%d",&n,&m);
for(int i=1;i<=m;i++)scanf("%d%d%d%d",&ee[i].u,&ee[i].v,&ee[i].c,&ee[i].t),ee[i].org=i;
sort(ee+1,ee+m+1);
for(int i=1;i<=m;i++){
EE[ee[i].u].push_back(i);
EE[ee[i].v].push_back(i);
}
memset(last,-1,sizeof(last));
tot=m;
for(int i=1;i<=n;i++){
int cnt=0;
vis.clear();
for(int j=0;j<EE[i].size();j++){
int id=EE[i][j];
if(!vis.count(ee[id].c)){vis[ee[id].c]=id; continue;}
cnt++;
int p=vis[ee[id].c];
us[p]=us[id]=1;
ins(id*2+1,p*2);
ins(p*2+1,id*2);
}
if(cnt>1){puts("No"); return 0;}
}
for(int i=1;i<=n;i++){
bool fst=1;
int cnt=0;
for(int j=0;j<EE[i].size();j++)if(us[EE[i][j]])cnt++;
if(cnt<2)continue;
for(int j=0;j<EE[i].size();j++)if(us[EE[i][j]]){
int id=EE[i][j];
tot++;
ins(id*2,tot*2);
ins(tot*2+1,id*2+1);
if(!fst){
ins(id*2,(tot-1)*2+1);
ins((tot-1)*2,id*2+1);
ins((tot-1)*2,tot*2);
ins(tot*2+1,(tot-1)*2+1);
}
fst=0;
}
}
// printf("%d %d\n",tot,e_cnt);
// for(int i=0;i<e_cnt;i++)printf("%d %d\n",e[i].s,e[i].t);
if(!check()){puts("No"); return 0;}
e_cnt_bak=e_cnt;
memcpy(last_bak,last,sizeof(last));
int l=0,r=m;
while(l<r){
int mid=l+((r-l)>>1);
e_cnt=e_cnt_bak;
memcpy(last,last_bak,sizeof(last));
for(int i=mid+1;i<=m;i++)ins(i*2,i*2+1);
if(check())r=mid;
else l=mid+1;
}
puts("Yes");
e_cnt=e_cnt_bak;
memcpy(last,last_bak,sizeof(last));
for(int i=l+1;i<=m;i++)ins(i*2,i*2+1);
check();
int cnt=0;
for(int i=1;i<=m;i++)if(us[i]&&val[i*2])cnt++;
printf("%d %d\n",ee[l].t,cnt);
for(int i=1;i<=m;i++)if(us[i]&&val[i*2])printf("%d ",ee[i].org); puts("");
} |
/**
* The coder is very lazy for this initUI method
* void
*/
private void initUI()
{
setLayout(new GridLayout(4, 4, 3, 3));
setBackground(new Color(192, 192, 192, 192));
GridButton gridButton;
for(int i = 0; i < 16; i++)
{
gridButton = new GridButton(i % 4, i / 4);
add(gridButton);
gridButtonMap.put(i, gridButton);
}
next();
} |
THE days of craning your neck for a glimpse of the view outside a plane’s tiny porthole window could soon be over.
Plans are underway for a revolutionary fleet of business jets outfitted with huge windows that would offer unparalleled views of the earth below — as well as providing more natural light than previously imagined on a passenger plane.
Brazilian aerospace company Embraer has come up with the revolutionary design for their Lineage 1000 business jets.
The design concept, which they have dubbed the Kyoto Airship, includes massive viewing platforms with large rectangular windows and skylights, which can be enjoyed by flyers wherever they are sitting, Business Insider reports.
The Japanese-inspired design also accommodates for passengers who would prefer to sit on the floor rather than standard seats.
And while skylights have been on planes before, these would be the first to feature on large jets with a pressurised cabins.
Of course, it all comes at a great cost: the Embraer Lineage 1000 has a $74 million price tag.
“We’ve proven to ourselves that we can make this, and when a Lineage customer is ready to order this aeroplane, we will make it,” Embraer’s vice president of interior design Jay Beever told Business Insider.
The Embraer Lineage 1000 business jet is already an enviable way to travel.
The lavishly decorated jet comfortably seats 19 passengers and boasts a central living area, bedrooms, walk-in shower, dining room, fully-equipped kitchen — complete with espresso machines, ovens and a dishwasher — and iPod docking stations, televisions, Blu-ray players and Wi-Fi connectivity. |
import java.util.*;
import java.io.*;
public class Main {
public static void main(String[] args)throws IOException {
BufferedReader br;
br = new BufferedReader(new InputStreamReader(System.in));
//**********************************************************************
// br = new BufferedReader(new FileReader("input.txt"));
// PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter("output.txt")));
//**********************************************************************
String inp[] = br.readLine().trim().split(" ");
int n = Integer.parseInt(inp[0]);
int k = Integer.parseInt(inp[1]);
PriorityQueue<Integer> alice = new PriorityQueue<>();
PriorityQueue<Integer> bob = new PriorityQueue<>();
PriorityQueue<Integer> both = new PriorityQueue<>();
long ans = 0l;
for (int i = 0; i < n; i++) {
inp = br.readLine().trim().split(" ");
int a = Integer.parseInt(inp[1]);
int b = Integer.parseInt(inp[2]);
if (a == 1 && b == 1)
both.add(Integer.parseInt(inp[0]));
else if (a == 1)
alice.add(Integer.parseInt(inp[0]));
else if (b == 1)
bob.add(Integer.parseInt(inp[0]));
}
if ((alice.size() + both.size() ) < k || (bob.size() + both.size()) < k) {
System.out.println(-1);
System.exit(0);
}
int counter = 0;
while (!alice.isEmpty() && !bob.isEmpty() && !both.isEmpty() && counter < k) {
int ra = alice.peek();
int rb = bob.peek();
int b = both.peek();
if (ra + rb <= b ) {
ans += ra + rb;
alice.poll();
bob.poll();
counter++;
} else {
ans += b;
both.poll();
counter++;
}
}
if (counter == k) {
System.out.println(ans);
System.exit(0);
}
if (both.isEmpty()) {
while (counter < k) {
ans += bob.poll() + alice.poll();
counter++;
}
} else {
while (counter < k) {
ans += both.poll();
counter++;
}
}
System.out.println(ans);
}
}
|
#include <stdio.h>
int main()
{
unsigned int n;
scanf("%u", &n);
unsigned int res = 0;
char cnt, dem;
cnt = 0;
while (n != 0)
{
switch(cnt)
{
case 0:
dem = 100;
break;
case 1:
dem = 20;
break;
case 2:
dem = 10;
break;
case 3:
dem = 5;
break;
case 4:
dem = 1;
break;
}
res = res + n / dem;
n = n % dem;
cnt = cnt + 1;
}
printf("%u\n", res);
return 0;
} |
/*
* Copyright 2016 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*/
package jsinterop.base;
import static com.google.common.truth.Truth.assertThat;
import com.google.gwt.junit.client.GWTTestCase;
public class JsPropertyMapTest extends GWTTestCase {
@Override
public String getModuleName() {
return "jsinterop.base.TestModule";
}
public void testOf() {
// Check primitives not boxed.
assertThat(JsPropertyMap.of("a", 42).getAsAny("a").asInt()).isEqualTo(42);
assertThat(JsPropertyMap.of("", "", "a", 42).getAsAny("a").asInt()).isEqualTo(42);
assertThat(JsPropertyMap.of("", "", "", "", "a", 42).getAsAny("a").asInt()).isEqualTo(42);
}
public void testSetGet() {
JsPropertyMap<Object> o = JsPropertyMap.of();
assertThat(o.get("foo")).isNull();
o.set("foo", "str");
assertThat(o.get("foo")).isEqualTo("str");
}
public void testNestedGet() {
JsPropertyMap<Object> o = JsPropertyMap.of();
o.set("a", JsPropertyMap.of("b", JsPropertyMap.of("c", o)));
assertThat(o.nestedGet("a.b.c")).isSameInstanceAs(o);
assertThat(o.nestedGet("k.l.m")).isNull();
}
public void testDeleteHas() {
JsPropertyMap<Object> o = JsPropertyMap.of();
assertThat(o.has("foo")).isFalse();
o.set("foo", null);
assertThat(o.has("foo")).isTrue();
o.delete("foo");
assertThat(o.has("foo")).isFalse();
}
public void testSetgetAsAny() {
JsPropertyMap<Object> o = JsPropertyMap.of();
o.set("p0", 15.5d);
o.set("p1", 15.5f);
o.set("p2", 15L);
o.set("p3", 15);
o.set("p4", (short) 15);
o.set("p5", (char) 15);
o.set("p6", (byte) 15);
o.set("p7", true);
assertThat(o.getAsAny("p0").asDouble()).isEqualTo(15.5);
assertThat(o.getAsAny("p1").asDouble()).isEqualTo(15.5);
assertThat(o.getAsAny("p2").asLong()).isEqualTo(15L);
assertThat(o.getAsAny("p3").asInt()).isEqualTo(15);
assertThat(o.getAsAny("p4").asShort()).isEqualTo(15);
assertThat(o.getAsAny("p5").asChar()).isEqualTo(15);
assertThat(o.getAsAny("p6").asByte()).isEqualTo(15);
assertThat(o.getAsAny("p7").asBoolean()).isTrue();
}
public void testForEach() {
StringBuilder result = new StringBuilder();
JsPropertyMap.of("foo", "", "bar", "").forEach(t -> result.append(t));
assertThat(result.toString()).isEqualTo("foobar");
}
}
|
/**
* This queue takes care of loading nodes in the background.
*/
private static final class Queue extends Thread {
private volatile Stack nodes = new Stack();
private Object lock = new Object();
private volatile boolean running = true;
public Queue() {
super("DirectoryChooser-BackgroundLoader");
setDaemon(true);
}
public void add(WindowsDirectoryChooserUI.FileTreeNode node, JTree tree) {
if (!isAlive()) {
throw new IllegalArgumentException("Queue is no longer alive");
}
synchronized (lock) {
if (running) {
nodes.addElement(new QueueItem(node, tree));
lock.notifyAll();
}
}
}
public void run() {
while (running) {
while (nodes.size() > 0) {
final QueueItem item = (QueueItem)nodes.pop();
final WindowsDirectoryChooserUI.FileTreeNode node = item.node;
final JTree tree = item.tree;
// ask how many items we got
node.getChildCount();
Runnable runnable = new Runnable() {
public void run() {
((DefaultTreeModel)tree.getModel()).nodeChanged(node);
tree.repaint();
}
};
try {
SwingUtilities.invokeAndWait(runnable);
} catch (InterruptedException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
}
// wait for 5 seconds for someone to use the queue, else just
// ends this
// queue
try {
synchronized (lock) {
lock.wait(5000);
}
if (nodes.size() == 0) {
running = false;
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
} |
/**
* Encrypt a string using provided key
*
* @param key UUID key
* @param text Data to encrypt
* @return byte[] of encrypted data
*/
public static byte[] encrypt(String key, String text)
{
byte[] encrypted = {};
try
{
key = key.replaceAll("-", "");
if (key.length() > 16)
{
key = key.substring(0, 16);
}
Key aesKey = new SecretKeySpec(key.getBytes(), "AES");
if (cipher == null)
{
cipher = Cipher.getInstance("AES");
}
cipher.init(Cipher.ENCRYPT_MODE, aesKey);
encrypted = cipher.doFinal(text.getBytes());
}
catch (Exception e)
{
logger.error(e.getMessage());
}
return encrypted;
} |
/**
* Created by Aaron on 6/12/15.
*/
public class DiskFragment extends Fragment {
private ObservableScrollView mScrollView;
private View rootView;
private Timer mTimer;
private static String VALUE_START = "";
private static String VALUE_END = "";
private boolean instanceLoaded = false;
private Context mContext;
//expandablelistview with animation
private AnimatedExpandableListView listView;
private ExpandableListAdapter adapter;
private List<ExpandableListView_Helper.GroupItem> items = new ArrayList<>();
private ExpandableListView_Helper.GroupItem groupItem = new ExpandableListView_Helper.GroupItem();
public static DiskFragment newInstance() {
return new DiskFragment();
}
public Handler mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case 0:
updateUI();
}
}
};
//horbar
private String[] data_keys = {"xvdar", "xvdaw", "xvdbr", "xvdbw"};
private final TimeInterpolator enterInterpolator = new DecelerateInterpolator(1.5f);
private final TimeInterpolator exitInterpolator = new AccelerateInterpolator();
/**
* Order
*/
private static float mCurrOverlapFactor;
private static int[] mCurrOverlapOrder;
private static float mOldOverlapFactor;
private static int[] mOldOverlapOrder;
/**
* Ease
*/
private static BaseEasingMethod mCurrEasing;
private static BaseEasingMethod mOldEasing;
/**
* Enter
*/
private static float mCurrStartX;
private static float mCurrStartY;
private static float mOldStartX;
private static float mOldStartY;
/**
* Alpha
*/
private static int mCurrAlpha;
private static int mOldAlpha;
/**
* HorizontalBar
*/
private static int HOR_BAR_MAX = 100;
private final static int HOR_BAR_MIN = 0;
private final static String[] horBarLabels = {"DiskA r", "DiskA w", "DiskB r", "DiskB w"};
private final static float [][] horBarValues = { {6f, 7f, 2f, 4f},
{7f, 4f, 3f, 1f} };
private static HorizontalBarChartView mHorBarChart;
private Paint mHorBarGridPaint;
private TextView mHorBarTooltip;
private final OnEntryClickListener horBarEntryListener = new OnEntryClickListener(){
@Override
public void onClick(int setIndex, int entryIndex, Rect rect) {
if(mHorBarTooltip == null)
showHorBarTooltip(setIndex, entryIndex, rect);
else
dismissHorBarTooltip(setIndex, entryIndex, rect);
}
};
private final OnClickListener horBarClickListener = new OnClickListener(){
@Override
public void onClick(View v) {
if(mHorBarTooltip != null)
dismissHorBarTooltip(-1, -1, null);
}
};
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mContext = getActivity();
LayoutInflater inflater = getActivity().getLayoutInflater();
rootView = inflater.inflate(R.layout.diskio_layout,
(ViewGroup) getActivity().findViewById(R.id.materialViewPager), false);
init();
}
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
ViewGroup viewGroup = (ViewGroup) rootView.getParent();
if (viewGroup != null) {
viewGroup.removeAllViewsInLayout();
}
return rootView;
}
@Override
public void onViewCreated(View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
mScrollView = (ObservableScrollView) view.findViewById(R.id.diskio_ScrollView);
MaterialViewPagerHelper.registerScrollView(getActivity(), mScrollView, null);
}
@Override
public void setUserVisibleHint(boolean isVisibleToUser) {
super.setUserVisibleHint(isVisibleToUser);
if (isVisibleToUser && instanceLoaded) {
long period = new SharedPres(mContext).getInt(Constants.REFRESHGAP, 10) * 1000;
mTimer = new Timer();
mTimer.schedule(new TimerTask() {
@Override
public void run() {
mHandler.sendEmptyMessage(0);
}
}, 1000, period);
}
if (!isVisibleToUser && instanceLoaded) {
mTimer.cancel();
}
}
public void init() {
mCurrOverlapFactor = 1;
mCurrEasing = new QuintEaseOut();
mCurrStartX = -1;
mCurrStartY = 0;
mCurrAlpha = -1;
mOldOverlapFactor = 1;
mOldEasing = new QuintEaseOut();
mOldStartX = -1;
mOldStartY = 0;
mOldAlpha = -1;
mHorBarChart = (HorizontalBarChartView) rootView.findViewById(R.id.horbarchart_disk);
// mHorBarChart.setOnEntryClickListener(horBarEntryListener);
// mHorBarChart.setOnClickListener(horBarClickListener);
mHorBarGridPaint = new Paint();
mHorBarGridPaint.setColor(this.getResources().getColor(R.color.bar_grid));
mHorBarGridPaint.setStyle(Paint.Style.STROKE);
mHorBarGridPaint.setAntiAlias(true);
mHorBarGridPaint.setStrokeWidth(Tools.fromDpToPx(.75f));
updateHorBarChart();
mTimer = new Timer();
initListView();
instanceLoaded = true;
}
private void initListView() {
groupItem.title = "JSON META";
items.add(groupItem);
adapter = new ExpandableListAdapter(mContext);
adapter.setData(items);
listView = (AnimatedExpandableListView) rootView.findViewById(R.id.disk_meta_listview);
listView.setAdapter(adapter);
listView.setOnGroupClickListener(new ExpandableListView.OnGroupClickListener() {
@Override
public boolean onGroupClick(ExpandableListView parent, View v, int groupPosition, long id) {
if (listView.isGroupExpanded(groupPosition)) {
listView.collapseGroupWithAnimation(groupPosition);
DynamicListviewUtil.newInstance().resetHeight(listView, groupPosition);
} else {
DynamicListviewUtil.newInstance().setExpandableListViewHeight(listView, groupPosition);
listView.expandGroupWithAnimation(groupPosition);
}
return true;
}
});
}
public void updateUI() {
Map<String, String> params = new HashMap<>();
try {
params.put(Constants.KEY_API, Constants.VALUE_API);
VALUE_START = SystemTime.newInstance().getStart();
VALUE_END = SystemTime.newInstance().getEnd();
params.put(Constants.KEY_START, VALUE_START);
params.put(Constants.KEY_END, VALUE_END);
params.put(Constants.KEY_ACTION, Constants.VALUE_ACTION);
params.put(Constants.KEY_KEYS, Constants.DISK_VALUE_KEYS);
} catch (Exception e) {
e.printStackTrace();
}
String jsonMETA = new JsonHttpUtil().getJsonMETA(Constants.POST_URL, params);
updateListView(VALUE_START, jsonMETA);
Map<String, Float> data = new HashMap<>();
data = DiskjsonParser.newInstance().parseJSON(jsonMETA);
if (data.get("xvdar") == null || data == null) {
return;
}
fillBarValues(data);
updateHorBarChart();
}
private void updateListView(String hint, String title) {
ExpandableListView_Helper.ChildItem childItem = new ExpandableListView_Helper.ChildItem();
childItem.title = title;
childItem.hint = hint;
groupItem.items.add(childItem);
adapter.notifyDataSetChanged();
}
public void fillBarValues(Map<String, Float> data) {
for (int i = 0; i < data_keys.length; ++i) {
horBarValues[0][i] = data.get(data_keys[i]);
}
}
private void updateHorBarChart(){
mHorBarChart.reset();
BarSet barSet = new BarSet();
Bar bar;
for(int i = 0; i < horBarLabels.length; i++){
bar = new Bar(horBarLabels[i], horBarValues[0][i]);
bar.setColor(this.getResources().getColor(R.color.horbar_fill));
barSet.addBar(bar);
}
mHorBarChart.addData(barSet);
mHorBarChart.setBarSpacing(Tools.fromDpToPx(3));
mHorBarChart.setBorderSpacing(0)
.setAxisBorderValues(HOR_BAR_MIN, HOR_BAR_MAX, 2)
.setGrid(HorizontalBarChartView.GridType.VERTICAL, mHorBarGridPaint)
.setXAxis(false)
.setYAxis(false)
.setXLabels(XController.LabelPosition.NONE)
.show(getAnimation(true))
;
}
@SuppressLint("NewApi")
private void showHorBarTooltip(int setIndex, int entryIndex, Rect rect){
mHorBarTooltip = (TextView) getActivity().getLayoutInflater().inflate(R.layout.horbar_tooltip, null);
mHorBarTooltip.setText(Integer.toString((int) horBarValues[setIndex][entryIndex]) + "KB");
mHorBarTooltip.setIncludeFontPadding(false);
LayoutParams layoutParams = new LayoutParams((int) Tools.fromDpToPx(15), (int) Tools.fromDpToPx(15));
layoutParams.leftMargin = rect.right;
layoutParams.topMargin = rect.top - (int) (Tools.fromDpToPx(15)/2 - (rect.bottom - rect.top)/2);
mHorBarTooltip.setLayoutParams(layoutParams);
if(android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR1){
mHorBarTooltip.setAlpha(0);
mHorBarTooltip.animate()
.setDuration(200)
.alpha(1)
.translationX(10)
.setInterpolator(enterInterpolator);
}
mHorBarChart.showTooltip(mHorBarTooltip);
}
@SuppressLint("NewApi")
private void dismissHorBarTooltip(final int setIndex, final int entryIndex, final Rect rect){
if(android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN){
mHorBarTooltip.animate()
.setDuration(100)
.alpha(0)
.translationX(-10)
.setInterpolator(exitInterpolator).withEndAction(new Runnable(){
@Override
public void run() {
mHorBarChart.removeView(mHorBarTooltip);
mHorBarTooltip = null;
if(entryIndex != -1)
showHorBarTooltip(setIndex, entryIndex, rect);
}
});
}else{
mHorBarChart.dismissTooltip(mHorBarTooltip);
mHorBarTooltip = null;
if(entryIndex != -1)
showHorBarTooltip(setIndex, entryIndex, rect);
}
}
private void updateValues(HorizontalBarChartView chartView){
chartView.updateValues(0, horBarValues[1]);
chartView.notifyDataUpdate();
}
/*------------------------------------*
* GETTERS *
*------------------------------------*/
private Animation getAnimation(boolean newAnim){
if(newAnim)
return new Animation()
.setAlpha(mCurrAlpha)
.setEasing(mCurrEasing)
.setOverlap(mCurrOverlapFactor, mCurrOverlapOrder)
.setStartPoint(mCurrStartX, mCurrStartY);
else
return new Animation()
.setAlpha(mOldAlpha)
.setEasing(mOldEasing)
.setOverlap(mOldOverlapFactor, mOldOverlapOrder)
.setStartPoint(mOldStartX, mOldStartY);
}
} |
<gh_stars>10-100
import { Trans } from '@lingui/macro'
import { Col, Row } from 'antd'
import { ThemeContext } from 'contexts/themeContext'
import { PropsWithChildren, useContext } from 'react'
import ProjectPreview from './ProjectPreview'
const FULL_WIDTH_PX = 24
export default function ProjectConfigurationFieldsContainer({
showPreview,
previewContent,
children,
}: PropsWithChildren<{ showPreview?: boolean; previewContent?: JSX.Element }>) {
const {
theme: { colors },
} = useContext(ThemeContext)
if (!showPreview) return <>{children}</>
return (
<Row gutter={100} style={{ position: 'relative' }}>
<Col md={12} xs={FULL_WIDTH_PX}>
{children}
</Col>
<Col
md={12}
xs={FULL_WIDTH_PX}
style={{
position: 'sticky',
top: 0,
borderLeft: `1px solid ${colors.stroke.tertiary}`,
}}
>
{previewContent ?? (
<div>
<h3
style={{
marginTop: 5,
color: colors.text.secondary,
}}
>
<Trans>Preview:</Trans>
</h3>
<ProjectPreview singleColumnLayout />
</div>
)}
</Col>
</Row>
)
}
|
The Haustorium, a Specialized Invasive Organ in Parasitic Plants.
Parasitic plants thrive by infecting other plants. Flowering plants evolved parasitism independently at least 12 times, in all cases developing a unique multicellular organ called the haustorium that forms upon detection of haustorium-inducing factors derived from the host plant. This organ penetrates into the host stem or root and connects to its vasculature, allowing exchange of materials such as water, nutrients, proteins, nucleotides, pathogens, and retrotransposons between the host and the parasite. In this review, we focus on the formation and function of the haustorium in parasitic plants, with a specific emphasis on recent advances in molecular studies of root parasites in the Orobanchaceae and stem parasites in the Convolvulaceae. |
// Close closes the etcd session.
func (e *EtcdLeaderElection) Close() {
log.Info("Closing etcd session")
e.session.Close()
log.Info("Closed etcd session")
} |
import frappe
from latte.utils.caching import cache_in_mem
from frappe.model.document import Document
from frappe.modules import get_module_app
core_doctypes_list = {'DocType', 'DocField', 'DocPerm', 'User', 'Role', 'Has Role',
'Page', 'Module Def', 'Print Format', 'Report', 'Customize Form',
'Customize Form Field', 'Property Setter', 'Custom Field', 'Custom Script'}
@cache_in_mem(key=lambda dt,m=None:dt, lock_cache=True)
def get_controller(doctype, module=None):
module, custom = frappe.db.get_value(
"DocType",
doctype,
("module", "custom")
) or [module or "Core", False]
if custom:
return Document
app = get_module_app(module)
scrubbed_dt = frappe.scrub(doctype)
classname = doctype.replace(" ", "").replace("-", "")
attr_name = f'{frappe.scrub(app)}.{frappe.scrub(module)}.doctype.{scrubbed_dt}.{scrubbed_dt}.{classname}'
if doctype not in core_doctypes_list:
extended_classes = frappe.get_hooks('doctype_class_extensions')
extended_class = extended_classes and extended_classes.get(attr_name)
if extended_class:
attr_name = extended_class[0]
return frappe.get_attr(attr_name) |
Story highlights Judge rules that statute of limitations has expired
Prosecutors had asked for five years in prison
Ex-PM survived political, corruption, sex scandals before quitting in November
A judge has dismissed the corruption case against former Italian Prime Minister Silvio Berlusconi, saying that the statute of limitations has expired.
Berlusconi was charged with bribing a British lawyer, David Mills, to secure favorable testimony in legal cases.
Although Saturday's decision will spare Berlusconi prison time, the ruling is not the same as an acquittal. A written report on the case will be published within 90 days.
The ruling is a victory for the former premier's lawyers, who had argued that the statute of limitations in the case has expired. Mills' conviction in the case was overturned in 2010.
Berlusconi, 75, dominated Italian politics for a decade and a half before resigning amid a financial crisis in November. He has survived a series of political, corruption and sex scandals over the years, involving allegations of embezzlement, tax fraud and bribery.
In addition to the Mills case, he also faces trial on charges that he hired an underage prostitute and later tried to pull strings to get her out of jail when she was arrested for theft. |
/**
* This method is called by the view whenever the
* user tried to trigger an exit operation.
*/
void exitTriggered() {
iView.dispose();
iModel = null;
System.exit(0);
} |
Literature Review on Enterprise Tacit Knowledge Management
Tacit knowledge management provides potential impetus for the sustainable development of knowledge-based economy. Despite the fact that many Chinese enterprises have realized the strategic value of tacit knowledge, relevant studies on tacit knowledge management are still in their infancy. The author of the paper reviewed the present researches both at home and abroad on the measurement and acquisition of tacit knowledge as well as its transfer model and the obstructive factors of the transfer process. The author also made analysis of the deficiencies of the current study and presented his own view on the future development in this field. |
/**
* Commands Module
*
* This module contains all server commands that can be sent/received by the
* server.
*/
pub mod hello;
pub mod bye;
pub mod putobj;
use crate::utils;
pub use hello::*;
pub use bye::*;
pub use putobj::*; |
import { Client, Provider, Receipt, Result } from "@blockstack/clarity";
export class RocketMarketClient extends Client {
constructor(provider: Provider) {
super("rocket-market", "rocket-tutorial/rocket-market", provider);
}
async balanceOf(owner: string): Promise<number> {
const query = this.createQuery({ method: { name: "balance-of", args: [`'${owner}`] } });
const res = await this.submitQuery(query);
return parseInt(Result.unwrap(res));
}
async ownerOf(tokenId: number): Promise<string> {
const query = this.createQuery({ method: { name: "owner-of", args: [`${tokenId}`] } });
const res = await this.submitQuery(query);
return Result.unwrap(res).replace(/'/g, "");
}
async transfer(to: string, tokenId: number, params: { sender: string }): Promise<Receipt> {
const tx = this.createTransaction({
method: { name: "transfer", args: [`'${to}`, `${tokenId}`] }
});
await tx.sign(params.sender);
const res = await this.submitTransaction(tx);
return res;
}
}
|
def add_case_detection(list_of_flows, available_compartments):
case_detection_flows = CASE_DETECTION_FLOWS
case_detection_flows[0].update(
{
"to": Compartment.ON_TREATMENT
if Compartment.ON_TREATMENT in available_compartments
else Compartment.RECOVERED
}
)
list_of_flows += CASE_DETECTION_FLOWS
return list_of_flows |
def win(s, e):
if e % 2:
return s % 2 ^ 1
if s > e // 2:
return s % 2
if s > e // 4:
return 1
return win(s, e // 4)
def lose(s, e):
if s > e // 2:
return 1
return win(s, e // 2)
def game(n):
res = [0, 1]
for i in range(n):
s, e = map(int, input().split())
res[0], res[1] = res[win(s, e)], res[lose(s, e)]
return res
n = int(input())
print(*game(n))
|
<reponame>Xuyuanp/hodor
/*
* Copyright 2017 <NAME>
* Author: <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hodor
import (
"context"
"errors"
"fmt"
"net/http"
"strings"
)
type paramsKeyType string
const paramsKey paramsKeyType = "HodorParam"
func ParamsOfReq(r *http.Request, name string) (string, bool) {
return ParamsOfCtx(r.Context(), name)
}
func ParamsOfCtx(ctx context.Context, name string) (string, bool) {
if p, ok := ctx.Value(paramsKey).(Params); ok {
return p.Get(name)
}
return "", false
}
type Params interface {
Get(string) (string, bool)
}
type emptyParams struct{}
func (p *emptyParams) Get(_ string) (string, bool) {
return "", false
}
var background = new(emptyParams)
type valueParam struct {
parent Params
name string
value string
}
func (p *valueParam) Get(name string) (string, bool) {
if p.name == name {
return p.value, true
}
return p.parent.Get(name)
}
func withValue(p Params, name, value string) Params {
return &valueParam{
parent: p,
name: name,
value: value,
}
}
type node struct {
parent *node
named bool
empty bool
pattern string
name string
handlers map[Method]http.Handler
children map[byte]*node
}
func newNode(parent *node, pattern string) *node {
return &node{
parent: parent,
pattern: pattern,
named: false,
empty: true,
handlers: map[Method]http.Handler{},
children: map[byte]*node{},
}
}
func (n *node) addRoute(method Method, pattern string, handler http.Handler) {
if n.empty {
n.init(method, pattern, handler)
return
}
if n.named {
n.addNamedRoute(method, pattern, handler)
return
}
i := longestPrefix(n.pattern, pattern)
if i < len(n.pattern) {
n.splitAt(i)
}
if i == len(pattern) {
n.handle(method, handler)
return
}
n.getChildMust(pattern[i]).addRoute(method, pattern[i:], handler)
}
func (n *node) init(method Method, pattern string, handler http.Handler) {
n.empty = false
i := longestPrefix(pattern, pattern)
if i == 0 {
n.addNamedRoute(method, pattern, handler)
return
}
if i == len(pattern) {
n.pattern = pattern
n.handle(method, handler)
return
}
n.pattern = pattern[:i]
n.getChildMust(pattern[i]).addRoute(method, pattern[i:], handler)
}
func (n *node) addNamedRoute(method Method, pattern string, handler http.Handler) {
index := strings.Index(pattern, "/")
var name string
if index == -1 {
name = pattern[1:]
n.name = name
n.handle(method, handler)
} else {
name = pattern[1:index]
n.name = name
n.getChildMust(pattern[index]).addRoute(method, pattern[index:], handler)
}
}
func longestPrefix(p1, p2 string) int {
i := 0
for i < len(p1) && i < len(p2) && p1[i] == p2[i] && p2[i] != ':' {
i++
}
return i
}
func (n *node) handle(method Method, handler http.Handler) {
if _, ok := n.handlers[method]; ok {
panic("duplicated handlers for same method")
}
n.handlers[method] = handler
}
func (n *node) splitAt(index int) {
child := newNode(n, n.pattern[index:])
child.handlers = n.handlers
child.children = n.children
child.named = false
child.empty = false
n.handlers = map[Method]http.Handler{}
n.children = map[byte]*node{n.pattern[index]: child}
n.pattern = n.pattern[:index]
}
func (n *node) getChildMust(c byte) *node {
if child, ok := n.children[c]; ok {
return child
}
child := newNode(n, "")
n.children[c] = child
if c == ':' {
child.named = true
}
return child
}
var (
errNotFound = errors.New("Not Found")
errMethodNotAllowed = errors.New("Method Not Allowed")
)
func (n *node) match(p Params, method Method, pattern string) (Params, http.Handler, error) {
if n.named {
return n.matchNamed(p, method, pattern)
}
i := longestPrefix(n.pattern, pattern)
if i < len(n.pattern) {
return p, nil, errNotFound
}
if i < len(pattern) {
if child, ok := n.children[pattern[i]]; ok {
if p, h, err := child.match(p, method, pattern[i:]); err == nil || err == errMethodNotAllowed {
return p, h, err
}
if child, ok := n.children[':']; ok {
return child.match(p, method, pattern[i:])
}
return p, nil, errNotFound
}
if child, ok := n.children[':']; ok {
return child.match(p, method, pattern[i:])
}
return p, nil, errNotFound
}
return n.handleMethod(p, method)
}
func (n *node) matchNamed(p Params, method Method, pattern string) (Params, http.Handler, error) {
index := strings.Index(pattern, "/")
if index == -1 {
value := pattern
return n.handleMethod(withValue(p, n.name, value), method)
}
value := pattern[:index]
if child, ok := n.children['/']; ok {
return child.match(withValue(p, n.name, value), method, pattern[index:])
}
return p, nil, errNotFound
}
func (n *node) handleMethod(p Params, method Method) (Params, http.Handler, error) {
if len(n.handlers) == 0 {
return p, nil, errNotFound
}
if h, ok := n.handlers[method]; ok {
return p, h, nil
}
return p, nil, errMethodNotAllowed
}
func (n *node) printTree(prefix string) {
pattern := n.pattern
if n.named {
pattern = ":" + n.name
}
for method := range n.handlers {
fmt.Println(method, prefix+pattern)
}
for _, child := range n.children {
child.printTree(prefix + pattern)
}
}
|
// Functions
// ----------------------------------------------------------------------------
// We cannot really test much more than this for transmit without totally
// whiteboxing the test, which isn't good as we may change the implementation
// soon to buffer the requests then package them as a sync write command
TEST_F(DaisyChainShould, CallSetGpioFunctionUponTransmissionRequest){
DaisyChain chain(p);
char msg[] = "hey!";
EXPECT_CALL(gpio, writePin);
auto status = chain.requestTransmission((uint8_t*)msg, sizeof(msg));
EXPECT_TRUE(status);
} |
n, p = (int(_) for _ in input().split())
a = [input() for i in range(n)][::-1]
ans = 0
apple = 0
for s in a:
apple *= 2
if s == 'halfplus':
apple += 1
ans += p * apple // 2
print(ans)
|
// Test basic operations in a safe manner.
func TestBasicEncoderDecoder(t *testing.T) {
var values = []interface{}{
true,
int(123),
int8(123),
int16(-12345),
int32(123456),
int64(-1234567),
uint(123),
uint8(123),
uint16(12345),
uint32(123456),
uint64(1234567),
uintptr(12345678),
float32(1.2345),
float64(1.2345678),
complex64(1.2345 + 2.3456i),
complex128(1.2345678 + 2.3456789i),
[]byte("hello"),
string("hello"),
}
for _, value := range values {
b := new(bytes.Buffer)
enc := NewEncoder(b)
err := enc.Encode(value)
if err != nil {
t.Error("encoder fail:", err)
}
dec := NewDecoder(b)
result := reflect.New(reflect.TypeOf(value))
err = dec.Decode(result.Interface())
if err != nil {
t.Fatalf("error decoding %T: %v:", reflect.TypeOf(value), err)
}
if !reflect.DeepEqual(value, result.Elem().Interface()) {
t.Fatalf("%T: expected %v got %v", value, value, result.Elem().Interface())
}
}
} |
/**
* @author Oleg Cherednik
* @since 03.07.2013
*/
@Slf4j
public final class IcoFile extends AbstractIconFile {
public IcoFile(ImageInputStream in) throws IOException, IconManagerException {
super(createImageById(new FileHeader(in), in));
}
// ========== static ==========
private static List<ImageHeader> readImageHeaders(int total, ImageInputStream in) throws IOException {
assert total > 0;
assert in != null;
List<ImageHeader> headers = new ArrayList<>(total);
for (int pos = 0; pos < total; pos++)
headers.add(ImageHeader.read(pos, in));
return Collections.unmodifiableList(headers);
}
private static Map<String, Image> createImageById(FileHeader fileHeader, ImageInputStream in) throws IOException, IconManagerException {
List<ImageHeader> imageHeaders = readImageHeaders(fileHeader.getImageCount(), in);
Map<ImageHeader, Image> imageByHeader = new TreeMap<>();
int offs = FileHeader.SIZE + imageHeaders.size() * ImageHeader.SIZE;
for (ImageHeader imageHeader : imageHeaders) {
checkOffs(offs, imageHeader);
BufferedImage image = IconIO.readImage(in, imageHeader.getSize());
if (image == null)
log.error("Image '{}' cannot be read", imageHeader);
else
imageByHeader.put(imageHeader, image);
offs += imageHeader.getSize();
}
if (imageByHeader.isEmpty())
return Collections.emptyMap();
Map<String, Image> imageById = new LinkedHashMap<>();
imageByHeader.entrySet().forEach(entry -> imageById.put(entry.getKey().getId(), entry.getValue()));
return imageById;
}
private static void checkOffs(int expected, ImageHeader imageHeader) throws IconManagerException {
if (expected != imageHeader.getOffs())
throw new IconManagerException("rva image no. " + imageHeader.getPos() + " incorrect. actual=" +
imageHeader.getOffs() + ", expected=" + expected);
}
} |
{
type CoffeeCup = {
shots: number;
hasMilk: boolean;
};
class CoffeeMaker {
static BEANS_GRAM_PER_SHOT: number = 7; // class Level
coffeeBeans: number = 0; // instance (object) Level
constructor(coffeeBeans: number) {
this.coffeeBeans = coffeeBeans;
}
static makeMachine(coffeeBeans: number): CoffeeMaker {
return new CoffeeMaker(coffeeBeans);
}
makeCoffee(shots: number): CoffeeCup {
if (this.coffeeBeans < shots * CoffeeMaker.BEANS_GRAM_PER_SHOT
)
throw new Error(`Not enough coffee beans!`);
this.coffeeBeans -= shots * CoffeeMaker.BEANS_GRAM_PER_SHOT
;
return {
hasMilk: false, shots
}
};
}
const maker = new CoffeeMaker(32);
const maker2 = CoffeeMaker.makeMachine(3);
} |
<filename>tests/e2e/HeatMap/pages/default.page.ts
import { DefaultPage } from "../../DefaultPage/home.page";
class HeatMap extends DefaultPage {
get heatMap() {
return browser.element(".mx-name-heatMap1 .js-plotly-plot svg");
}
open(): void {
browser.url("/p/heatmap");
}
}
const heatMap = new HeatMap();
export default heatMap;
|
No, I am not going to defend Bill O’Reilly. I am clueless regarding his guilt or innocence. But I will say that the Left’s glaring hypocrisy regarding sexual harassment is off the chain. All of the women with strong evidence of sexual harassment, including criminal behavior, by Bill Clinton were gang assaulted by the Left (Democrats, Hollywood and fake news media).
I remember how incensed I was when the Left threw all men in powerful positions under the bus to cover for Bill Clinton. Their narrative was every man in Clinton’s position would succumb to oral favors from an intern. My dad was pastor of a large church. My brother headed his county’s youth football association. Neither would ever sink to Clinton’s behavior.
My purpose for writing is to thank Bill O’Reilly for compassionate positions he took on two extremely important issues.
As a black American who has been trying for decades to tear down the wall of my fellow blacks’ blind loyalty to Democrats, O’Reilly’s compassionate talking points memo, “The disintegration of the African-American family” had me standing up and cheering. The Left went nuts, typically branding O’Reilly a racist. Everything O’Reilly said was 100% true.
Because the Left views blacks as useful idiots in their quest to transform America into a Socialist nation, anyone (black or white) who dares offer real solutions to issues devastating black families, the Left vilifies into silence. Thanks Bill for caring and courageously going public with your common sense solutions to help black Americans.
Then, there was Bill O’Reilly proposing “Kate’s Law” in response to her shocking death at the hands of an illegal-alien, multiple criminal offender. Kate Steinle was a beautiful young woman with her whole life ahead of her. Enjoying a leisurely Sunday stroll on a San Francisco pier with her dad, Kate was shot and killed by an illegal-alien who had been deported 5 times. After Kate collapsed, Jim Steinle held his daughter in his arms. Kate’s last words were, “Help me, dad.”
O’Reilly’s “Kate’s Law” suggested a mandatory 5 year sentence in federal prison for deportees caught coming back.
Remarkably, the arrogant federal law breaking sanctuary city of San Francisco, Democrats and fake news media gave Kate Steinle’s family and the American people the finger. They rejected Kate’s Law. Thanks Bill, for trying to protect American lives by proposing Kate’s Law.
So long Bill. You kept it real, lookin’ out for us folks. |
/**
* Not-in-place intrinsic Gaussian blur filter using {@link ScriptIntrinsicBlur} and {@link
* RenderScript}. This require an Android versions >= 4.2.
*
* @param dest The {@link Bitmap} where the blurred image is written to.
* @param src The {@link Bitmap} containing the original image.
* @param context The {@link Context} necessary to use {@link RenderScript}
* @param radius The radius of the blur with a supported range 0 < radius <= {@link
* #BLUR_MAX_RADIUS}
*/
@RequiresApi(Build.VERSION_CODES.JELLY_BEAN_MR1)
public static void blurBitmap(
final Bitmap dest, final Bitmap src, final Context context, final int radius) {
Preconditions.checkNotNull(dest);
Preconditions.checkNotNull(src);
Preconditions.checkNotNull(context);
Preconditions.checkArgument(radius > 0 && radius <= BLUR_MAX_RADIUS);
RenderScript rs = null;
try {
rs = Preconditions.checkNotNull(RenderScript.create(context));
ScriptIntrinsicBlur blurScript = ScriptIntrinsicBlur.create(rs, Element.U8_4(rs));
Allocation allIn = Preconditions.checkNotNull(Allocation.createFromBitmap(rs, src));
Allocation allOut = Preconditions.checkNotNull(Allocation.createFromBitmap(rs, dest));
blurScript.setRadius(radius);
blurScript.setInput(allIn);
blurScript.forEach(allOut);
allOut.copyTo(dest);
blurScript.destroy();
allIn.destroy();
allOut.destroy();
} finally {
if (rs != null) {
rs.destroy();
}
}
} |
🔥 Ignite Your Mobile Development
An unfair head start for your React Native apps.
Gant Laborde Blocked Unblock Follow Following Jun 6, 2016
When our mobile dev team jumped into React Native 8 months ago, we couldn’t find concise information. The boilerplates we did find were either lacking or just … well sucked. Keep in mind, this is before such awesome releases as Barton Hammond’s snowflake. Back eight months ago, we did what we could, taking turns exploring and making mistakes.
React Native GO GO GO!
Fortunately, our pain has been everyone’s gain. We blogged often and fondly; plus we contributed to Open Source projects when we could. Internally, we’ve diligently worked together over the past half-year+ to figure out a groove that embodies all the lessons learned.
Nowadays, it seems a new boilerplate is available every week and though they help the community learn a few things, we still feel they don’t impact us much. We’ve really ironed out a fantastic system for React Native. As a consultancy that needs to educate one another as well as ship product, our values are strong but simple:
A unified place to demonstrate and explain best practices. Baked-in tools to help understand and optimize during development. Generators that fit 80–90% of common code to promote speed.
Our greatest contribution for all of this is version 1.0 of Ignite
What is Ignite?
For us, it’s a multi-purpose tool.
It’s our tool to accomplish the values listed above. It’s our tool for documenting updates, and it’s our tool for contributing to Open Source.
Help us define it further. Use it, open issues, PRs, and suggestions on our repo. Do you need a battle-tested head start that you know works? Here it is: |
/// Cut an u64 into a vector of bytes but only consider the bytes included
/// in the size of U.
fn cut_int_as_u64<U>(n: u64) -> Vec<u8> {
let mut ret: Vec<u8> = Vec::new();
for i in 0..size_of::<U>() {
ret.push(((n >> ((8 * i) as u8)) & 0xFF) as u8);
}
return ret;
} |
/**
* Test the behaviour when writing the result to the outputstream causes an IOException.
* @throws IOException
* @throws JspException
*/
@Test(expected = JspException.class)
public void doStartTag_exception_writing_cached_content() throws IOException, JspException {
testSubject.setKey("mykey");
testSubject.setCache("ehcachetag");
Mockito.doReturn("foo").when(testSubject).getCachedBodyContent("ehcachetag", "mykey");
Mockito.doThrow(new IOException()).when(jspWriter).write(Mockito.anyString());
ServletRequest servletRequest = Mockito.mock(ServletRequest.class);
Mockito.when(pageContext.getRequest()).thenReturn(servletRequest);
testSubject.doStartTag();
verifyCleanup();
} |
/**
* Abstraction for Magnets
*
* @author Joey Spillers
*/
public class Magnets { ///[0] Forward, [1] Right, [2] Back, [3] Left
static int[] low = {10000,10000,10000,10000};
static int[] high = {0,0,0,0};
static MagneticSensor compass = new MagneticSensor(SensorPort.S2);
/**
* Calibrates using the useless Magnet Sensor<br>
*
* @param pilot
* DifferentialPilot used
*
* @author Joey Spillers
*/
public void calibrate(DifferentialPilot pilot){
/*I2CSensor magnet = new I2CSensor(SensorPort.S1);
byte[] buf = null;
int len=10;
magnet.getData(0x41, buf, len);
magnet.*/
GUI gui = new GUI();
int[][] data = new int[4][10];
for(int i = 0; i< 4; i++){
for(int k = 0; k<10; k++){
Delay.msDelay(500);
data[i][k] = compass.readValue();
gui.valuePrint(data[i][k]);
}
pilot.rotate(95.5);
}
highAndLow(data);
}
/**
* Searches Numbs and sets the lowest and highest value in each second-level array<br>
*
* @param numbs
* To be used with the Calibration method only; Contains a ton of values
*
* @author Joey Spillers
*/
public static void highAndLow(int[][] numbs){
int count = 0;
for(int i=0; i< 4; i++){
count = 0;
while(count < numbs[i].length)
{
if(numbs[i][count]< low[i]) {
low[i] = numbs[i][count];
}
if(numbs[i][count] > high[i]) {
high[i] = numbs[i][count];
}
count++;
}
}
}
/**
* returns the North Value of the Magnet Sensor<br>
*
* @author Joey Spillers
*/
public int[] getHigh(){
return high;
}
/**
* returns the South Value of the Magnet Sensor<br>
*
* @author Joey Spillers
*/
public int[] getLow(){
return low;
}
/**
* returns the Current Value of the Magnet Sensor<br>
*
* @author Joey Spillers
*/
public int getValue(){
return compass.readValue();
}
} |
def unpack_mconfig_any(mconfig_any: Any, mconfig_struct: TAny) -> TAny:
unpacked = mconfig_any.Unpack(mconfig_struct)
if not unpacked:
raise LoadConfigError(
'Cannot unpack Any type into message: %s' % mconfig_struct,
)
return mconfig_struct |
def delete(self, *, reason=None):
yield from self._state.http.delete_channel(self.id, reason=reason) |
def filterSubreads(self, subreads_pbi):
if self.coverage_depth is not None:
if subreads_pbi.shape[0] > self.coverage_depth:
subreads_pbi = subreads_pbi.sample(self.coverage_depth)
subread_ixs = subreads_pbi.index
return subread_ixs |
class CommanderCacher:
"""A class to manage per-commander caches."""
def __init__(self):
try:
path = join(g.app.homeLeoDir, 'db', 'global_data')
self.db = SqlitePickleShare(path)
except Exception:
self.db = {} # type:ignore
#@+others
#@+node:ekr.20100209160132.5759: *3* cacher.clear
def clear(self):
"""Clear the cache for all commanders."""
# Careful: self.db may be a Python dict.
try:
self.db.clear()
except Exception:
g.trace('unexpected exception')
g.es_exception()
self.db = {} # type:ignore
#@+node:ekr.20180627062431.1: *3* cacher.close
def close(self):
# Careful: self.db may be a dict.
if hasattr(self.db, 'conn'):
# pylint: disable=no-member
self.db.conn.commit()
self.db.conn.close()
#@+node:ekr.20180627042809.1: *3* cacher.commit
def commit(self):
# Careful: self.db may be a dict.
if hasattr(self.db, 'conn'):
# pylint: disable=no-member
self.db.conn.commit()
#@+node:ekr.20180611054447.1: *3* cacher.dump
def dump(self):
"""Dump the indicated cache if --trace-cache is in effect."""
dump_cache(g.app.commander_db, tag='Commander Cache')
#@+node:ekr.20180627053508.1: *3* cacher.get_wrapper
def get_wrapper(self, c, fn=None):
"""Return a new wrapper for c."""
return CommanderWrapper(c, fn=fn)
#@+node:ekr.20100208065621.5890: *3* cacher.test
def test(self):
# pylint: disable=no-member
if g.app.gui.guiName() == 'nullGui':
# Null gui's don't normally set the g.app.gui.db.
g.app.setGlobalDb()
# Fixes bug 670108.
assert g.app.db is not None
# a PickleShareDB instance.
# Make sure g.guessExternalEditor works.
g.app.db.get("LEO_EDITOR")
# self.initFileDB('~/testpickleshare')
db = self.db
db.clear()
assert not list(db.items())
db['hello'] = 15
db['aku ankka'] = [1, 2, 313]
db['paths/nest/ok/keyname'] = [1, (5, 46)]
db.uncache() # frees memory, causes re-reads later
if 0: print(db.keys())
db.clear()
return True
#@+node:ekr.20100210163813.5747: *3* cacher.save
def save(self, c, fn):
"""
Save the per-commander cache.
Change the cache prefix if changeName is True.
save and save-as set changeName to True, save-to does not.
"""
self.commit()
if fn:
# 1484: Change only the key!
if isinstance(c.db, CommanderWrapper):
c.db.key = fn
self.commit()
else:
g.trace('can not happen', c.db.__class__.__name__)
#@-others |
/**
* Creates the python code within the response handler to parse
* the response payload assuming no encapsulating xml tag
*/
public class BaseParsePayload implements ParsePayload {
private final String typeModel;
private final Integer code;
public BaseParsePayload(final String typeModel, final int code) {
this.typeModel = typeModel;
this.code = code;
}
@Override
public String toPythonCode() {
final StringBuilder builder = new StringBuilder();
builder.append("if self.response.status == ").append(code).append(":\n")
.append(pythonIndent(3))
.append("self.result = parseModel(xmldom.fromstring(response.read()), ")
.append(typeModel);
if (!typeModel.equals("None")) {
builder.append("()");
}
return builder.append(")").toString();
}
} |
/**
* This is a wrapper class for ProxyHandler as it is implemented as final. This class implements
* the HttpHandler which can be injected into the handler.yml configuration file as another option
* for the handlers injection. The other option is to use RouterHandlerProvider in service.yml file.
*
* @author Steve Hu
*/
public class RouterHandler implements HttpHandler {
private RouterConfig config;
protected ProxyHandler proxyHandler;
public RouterHandler() {
config = RouterConfig.load();
ModuleRegistry.registerModule(RouterHandler.class.getName(), Config.getInstance().getJsonMapConfigNoCache(RouterConfig.CONFIG_NAME), null);
// As we are building a client side router for the light platform, the assumption is the server will
// be on HTTP 2.0 TSL always. No need to handle HTTP 1.1 case here.
LoadBalancingRouterProxyClient client = new LoadBalancingRouterProxyClient();
if(config.httpsEnabled) client.setSsl(Http2Client.getInstance().getDefaultXnioSsl());
if(config.http2Enabled) {
client.setOptionMap(OptionMap.create(UndertowOptions.ENABLE_HTTP2, true));
} else {
client.setOptionMap(OptionMap.EMPTY);
}
proxyHandler = ProxyHandler.builder()
.setProxyClient(client)
.setMaxConnectionRetries(config.maxConnectionRetries)
.setMaxRequestTime(config.maxRequestTime)
.setReuseXForwarded(config.reuseXForwarded)
.setRewriteHostHeader(config.rewriteHostHeader)
.setUrlRewriteRules(config.urlRewriteRules)
.setMethodRewriteRules(config.methodRewriteRules)
.setQueryParamRewriteRules(config.queryParamRewriteRules)
.setHeaderRewriteRules(config.headerRewriteRules)
.setNext(ResponseCodeHandler.HANDLE_404)
.build();
}
@Override
public void handleRequest(HttpServerExchange httpServerExchange) throws Exception {
proxyHandler.handleRequest(httpServerExchange);
}
public void reload() {
config = RouterConfig.load();
}
} |
// InsertTimezone returns a single timezone from the DB for the given identifier
func InsertTimezone(timezone Timezone) (int, error) {
var rowid int
err := db.QueryRow("INSERT INTO timezones(name, timeoffset, identifier) VALUES($1, $2, $3) RETURNING id", timezone.Name, timezone.Timeoffset, timezone.Identifier).Scan(&rowid)
if err != nil {
log.Print(err)
return 0, err
}
return rowid, nil
} |
Dual-Channel Convolution Network With Image-Based Global Learning Framework for Hyperspectral Image Classification
Recently, convolutional neural networks (CNNs) have been widely applied to hyperspectral image (HSI) classification due to their detailed representation of features. Nevertheless, the current CNN-based HSI classification methods mainly follow a patch-based learning framework. These methods are nonglobal learning methods, which not only limit the use of global information but also require a high computational cost. In this letter, an image-based global learning framework is introduced to HSI classification. Based on this framework, we propose a dual-channel convolutional network (DCCN) for HSI classification to maximize the exploitation of the global and multiscale information of HSI. The experimental results conducted on two real hyperspectral datasets indicate that our method is superior to other related methods in terms of both efficiency and accuracy for HSI classification. |
<gh_stars>1000+
package main
import (
"fmt"
"github.com/mattn/go-gtk/glib"
"github.com/mattn/go-gtk/gtk"
"strconv"
)
func main() {
gtk.Init(nil)
window := gtk.NewWindow(gtk.WINDOW_TOPLEVEL)
window.SetPosition(gtk.WIN_POS_CENTER)
window.SetTitle("GTK Go!")
window.Connect("destroy", func(ctx *glib.CallbackContext) {
fmt.Println("got destroy!", ctx.Data().(string))
gtk.MainQuit()
}, "foo")
//--------------------------------------------------------
// GtkHBox
//--------------------------------------------------------
fixed := gtk.NewFixed()
//--------------------------------------------------------
// GtkSpinButton
//--------------------------------------------------------
spinbutton1 := gtk.NewSpinButtonWithRange(1.0, 10.0, 1.0)
spinbutton1.SetDigits(3)
spinbutton1.Spin(gtk.SPIN_STEP_FORWARD, 7.0)
fixed.Put(spinbutton1, 40, 50)
spinbutton1.OnValueChanged(func() {
val := spinbutton1.GetValueAsInt()
fval := spinbutton1.GetValue()
fmt.Println("SpinButton changed, new value: " + strconv.Itoa(val) + " | " + strconv.FormatFloat(fval, 'f', 2, 64))
min, max := spinbutton1.GetRange()
fmt.Println("Range: " + strconv.FormatFloat(min, 'f', 2, 64) + " " + strconv.FormatFloat(max, 'f', 2, 64))
fmt.Println("Digits: " + strconv.Itoa(int(spinbutton1.GetDigits())))
})
adjustment := gtk.NewAdjustment(2.0, 1.0, 8.0, 2.0, 0.0, 0.0)
spinbutton2 := gtk.NewSpinButton(adjustment, 1.0, 1)
spinbutton2.SetRange(0.0, 20.0)
spinbutton2.SetValue(18.0)
spinbutton2.SetIncrements(2.0, 4.0)
fixed.Put(spinbutton2, 150, 50)
//--------------------------------------------------------
// Event
//--------------------------------------------------------
window.Add(fixed)
window.SetSizeRequest(600, 600)
window.ShowAll()
gtk.Main()
}
|
<reponame>tkertesz/hrrqt
#include "network.h"
Network::Network(QObject *parent) :QObject(parent)
{
my_socket = new QUdpSocket(this);
IsStarted = false;
}
bool Network::setIp(QHostAddress MyIP, QHostAddress OtherIP)
{
myip = MyIP;
otherip = OtherIP;
return true;
}
bool Network::startBinding()
{
if (my_socket->bind(myip, 45000))
{
connect(my_socket, SIGNAL(readyRead()), this, SLOT(processPendingDatagram()));
QCoreApplication::sendPostedEvents(); //not really necessary, but no problem :)
}
else
{
qDebug("Bind problem");
return false;
}
return IsStarted = true;
}
void Network::sendData(QImage sendimage)
{
if(IsStarted)
{
QByteArray q;
QBuffer buffer(&q);
buffer.open(QIODevice::WriteOnly);
sendimage.save(&buffer, "JPG");
my_socket->writeDatagram(q.data(),q.size(), otherip, 45000);
}
else
{
qDebug("Network is not started!");
return;
}
}
void Network::processPendingDatagram()
{
while (my_socket->hasPendingDatagrams()) {
QByteArray datagram;
QHostAddress sender;
quint16 senderPort = 45000;
datagram.resize(my_socket->pendingDatagramSize());
my_socket->readDatagram(datagram.data(), datagram.size(), &sender, &senderPort);
//check to only allow packets from the other party, not from anyone
if (sender.toString() == otherip.toString())
{
QImage recv_image;
if (datagram.isNull())
qDebug("Empty incoming datagram");
recv_image.loadFromData(datagram, "JPG");
if (recv_image.isNull()) // Check if the image was indeed received
qDebug("The image is null. Something failed.");
emit receivedImage(recv_image);
}
}
}
Network::~Network()
{
my_socket->close();
delete my_socket;
}
|
package com.cqs.legou_client;
import android.content.Intent;
import android.os.Bundle;
import android.support.design.widget.AppBarLayout;
import android.support.design.widget.TextInputLayout;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.text.TextUtils;
import android.view.View;
import android.widget.EditText;
import com.cqs.entity.ResponseObject;
import com.cqs.entity.User;
import com.cqs.util.Constant;
import com.cqs.util.MyUtils;
import com.google.gson.Gson;
import com.google.gson.reflect.TypeToken;
import com.loopj.android.http.AsyncHttpResponseHandler;
import com.loopj.android.http.RequestParams;
import com.marshalchen.common.commonUtils.basicUtils.BasicUtils;
import com.marshalchen.common.commonUtils.urlUtils.HttpUtilsAsync;
import com.marshalchen.common.ui.ToastUtil;
import com.tencent.connect.UserInfo;
import com.tencent.connect.auth.QQToken;
import com.tencent.tauth.IUiListener;
import com.tencent.tauth.Tencent;
import com.tencent.tauth.UiError;
import org.apache.http.Header;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.HashMap;
import butterknife.ButterKnife;
import butterknife.InjectView;
import butterknife.OnClick;
import cn.smssdk.EventHandler;
import cn.smssdk.SMSSDK;
import cn.smssdk.gui.RegisterPage;
public class LoginActivity extends AppCompatActivity {
@InjectView(R.id.toolbar)
Toolbar mToolbar;
@InjectView(R.id.appbar)
AppBarLayout mAppbar;
@InjectView(R.id.et_username)
EditText mEtUsername;
@InjectView(R.id.et_password)
EditText mEtPassword;
@InjectView(R.id.ti_username)
TextInputLayout mTiUsername;
private Intent mIntent;
private User mUser;
private int mState;
private String mMsg;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login);
ButterKnife.inject(this);
mToolbar.setTitle("用户登录");
setSupportActionBar(mToolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setHomeButtonEnabled(true);
mToolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
onBackPressed();
}
});
mUser = new User();
}
private void toast(String s) {
ToastUtil.show(LoginActivity.this, s);
}
@OnClick(R.id.btn_qq)
public void loginqq(View v) {
LoginQQ();
}
@OnClick({R.id.btn_login, R.id.btn_register})
public void onclick(View v) {
switch (v.getId()) {
case R.id.btn_login:
mTiUsername.setError("");
final String username = mEtUsername.getText().toString().trim();
String password = <PASSWORD>.getText().toString().trim();
if (TextUtils.isEmpty(username)) {
toast("用户名不能为空");
mEtPassword.setText("");
return;
}
if (TextUtils.isEmpty(password)) {
toast("密码不能为空");
return;
}
RequestParams params = new RequestParams();
params.add("username", username);
params.add("password", password);
mUser.setName(username);
HttpUtilsAsync.post(Constant.USER_LOGIN, params, new AsyncHttpResponseHandler() {
@Override
public void onSuccess(int i, Header[] headers, byte[] bytes) {
String data = new String(bytes);
Gson gson = new Gson();
ResponseObject<User> result = gson.fromJson(data, new TypeToken<ResponseObject<User>>() {
}.getType());
mMsg=result.getMsg();
if (1 == result.getState()) {
mIntent = new Intent(LoginActivity.this, MainActivity.class);
mIntent.putExtra("user", mUser);
setResult(MyUtils.SOCIAL_RESULTCODE, mIntent);
User datas = result.getDatas();
mUser.setId(datas.getId());
putSharePreferences(mUser);
toast(mMsg);
finish();
} else {
mEtPassword.setText("");
mEtUsername.setText("");
mEtUsername.requestFocus();
toast(mMsg);
}
}
@Override
public void onFailure(int i, Header[] headers, byte[] bytes, Throwable throwable) {
MyUtils.showFalseToast(getApplicationContext());
}
});
break;
case R.id.btn_register:
SMSSDK.initSDK(this, "abeb20b52f98", "fecab15491e63c30540f852effb013a6");
RegisterPage registerPage = new RegisterPage();
registerPage.setRegisterCallback(new EventHandler() {
public void afterEvent(int event, int result, Object data) {
// 解析注册结果
if (result == SMSSDK.RESULT_COMPLETE) {
@SuppressWarnings("unchecked")
HashMap<String, Object> phoneMap = (HashMap<String, Object>) data;
String country = (String) phoneMap.get("country");
final String phone = (String) phoneMap.get("phone");
// // 提交用户信息
// registerUser(country, phone);
RequestParams params = new RequestParams();
params.add("username", phone);
params.add("password", phone);
HttpUtilsAsync.post(Constant.USER_REGISTER, params, new AsyncHttpResponseHandler() {
@Override
public void onSuccess(int i, Header[] headers, byte[] bytes) {
String data = new String(bytes);
Gson gson = new Gson();
ResponseObject<User> result = gson.fromJson(data, new TypeToken<ResponseObject<User>>() {
}.getType());
mMsg=result.getMsg();
mUser.setName(phone);
if (1 == result.getState()) {
toast(mMsg);
User datas = result.getDatas();
mUser.setId(datas.getId());
} else {
toast(mMsg);
}
mUser.setName(phone);
mUser.setTel(phone);
mIntent = new Intent(LoginActivity.this, MainActivity.class);
mIntent.putExtra("user", mUser);
setResult(MyUtils.REGISTER_RESULTCODE, mIntent);
putSharePreferences(mUser);
finish();
}
@Override
public void onFailure(int i, Header[] headers, byte[] bytes, Throwable throwable) {
MyUtils.showFalseToast(getApplicationContext());
}
});
}
}
});
registerPage.show(this);
break;
default:
}
}
public static Tencent mTencent;
//这里是调用QQ登录的关键代码
public void LoginQQ() {
if (mTencent == null) {
mTencent = Tencent.createInstance(Constant.QQ_APP_ID, getApplicationContext());
}
mTencent.logout(this);
mTencent.login(this, "all", new IUiListener() {
@Override
public void onComplete(Object arg0) {
// TODO Auto-generated method stub
if (arg0 != null) {
final JSONObject jsonObject = (JSONObject) arg0;
try {
final String token = jsonObject.getString(com.tencent.connect.common.Constants.PARAM_ACCESS_TOKEN);
String expires = jsonObject.getString(com.tencent.connect.common.Constants.PARAM_EXPIRES_IN);
final String openId = jsonObject.getString(com.tencent.connect.common.Constants.PARAM_OPEN_ID);
/**到此已经获得OpneID以及其他你想获得的内容了
QQ登录成功了,我们还想获取一些QQ的基本信息,比如昵称,头像什么的,这个时候怎么办?
sdk给我们提供了一个类UserInfo,这个类中封装了QQ用户的一些信息,我么可以通过这个类拿到这些信息
如何得到这个UserInfo类呢? */
QQToken qqToken = mTencent.getQQToken();
UserInfo info = new UserInfo(getApplicationContext(), qqToken);
//这样我们就拿到这个类了,之后的操作就跟上面的一样了,同样是解析JSON
info.getUserInfo(new IUiListener() {
@Override
public void onComplete(Object o) {
JSONObject jsonObject1 = (JSONObject) o;
try {
final String nickname = (String) jsonObject1.get("nickname");
String avatar = (String) jsonObject1.get("figureurl_2");
RequestParams params = new RequestParams();
params.add("username", nickname);
params.add("password", <PASSWORD>);
params.add("avatar", avatar);
mUser.setAvatar(avatar);
mUser.setName(nickname);
HttpUtilsAsync.post(Constant.USER_SOCIAL, params, new AsyncHttpResponseHandler() {
@Override
public void onSuccess(int i, Header[] headers, byte[] bytes) {
String data = new String(bytes);
Gson gson = new Gson();
ResponseObject<User> result = gson.fromJson(data, new TypeToken<ResponseObject<User>>() {
}.getType());
mMsg=result.getMsg();
if (1 == result.getState()) {
User datas = result.getDatas();
mUser.setId(datas.getId());
mIntent = new Intent(LoginActivity.this, MainActivity.class);
mIntent.putExtra("user", mUser);
setResult(MyUtils.SOCIAL_RESULTCODE, mIntent);
putSharePreferences(mUser);
toast(mMsg);
} else {
toast(mMsg);
}
finish();
}
@Override
public void onFailure(int i, Header[] headers, byte[] bytes, Throwable throwable) {
MyUtils.showFalseToast(getApplicationContext());
}
});
} catch (JSONException e) {
e.printStackTrace();
}
}
@Override
public void onError(UiError uiError) {
}
@Override
public void onCancel() {
}
});
} catch (JSONException e) {
}
}
}
@Override
public void onError(UiError arg0) {
// TODO Auto-generated method stub
toast("QQ授权出错:" + arg0.errorCode + "--" + arg0.errorDetail);
}
@Override
public void onCancel() {
// TODO Auto-generated method stub
toast("取消qq授权");
}
});
}
private void putSharePreferences(User user) {
HashMap<String, String> iuser = new HashMap<String, String>();
iuser.put("username", user.getName());
iuser.put("userid",user.getId());
if (null != user.getAvatar()) {
iuser.put("avatar", user.getAvatar());
}
BasicUtils.putSharedPreferences(getApplicationContext(), Constant.FILE_NAME, iuser);
}
@Override
protected void onDestroy() {
mUser = null;
super.onDestroy();
}
}
|
Rocker Ted Nugent launched a volley of invective at local protesters and the New Haven Register during a syndicated radio interview Saturday.
Anyone anticipating wild remarks from the Motor City Madman was not disappointed by his interview for the nationally syndicated radio program “The Weekend” that was aired on WGAN in Maine and dozens of other stations across the country.
Nugent’s planned Tuesday concert at Toad’s Place in New Haven, Conn., has drawn protests from area residents who took issue with Nugent’s recent comments on race in America. There has been a petition drive calling for the cancellation of the show, and protests are expected outside Toad’s during the show.
In the radio station interview, Nugent alluded to New Haven Register editorial writers and their supporters as “subhuman numb-nuts.” A recent Register editorial denounced Nugent’s statements on race and called for Toad’s to cancel the show.
When asked about the “idiots” in Connecticut who think he “shouldn’t play rock and roll,” Nugent told the conservative host: “You and I stand on the line of reason” and must not be silenced.
“People who hate Ted Nugent hate freedom,” he said, and promised to continue to speak what he sees as the truth.
Nugent also had some choice things to say about liberals, racial activists and Trayvon Martin during the interview.
Defending his previous comments about blacks and crime, Nugent insisted the statistics bear him out.
Of the Trayvon Martin case, Nugent said, “Trayvon got justice.” He went on to say that Martin was a “gangsta wannabe” who had a “bloodthirst,” as evidenced by the fact that he was supposedly eager to “get into fights with people.” He said Martin showed racism in calling George Zimmerman a “cracka.”
Barbara Fair, a West Haven resident heavily involved in the petition drive, said she first became aware of the show after her daughter told her about comments Nugent had made regarding Martin.
While Fair said Nugent has the right to free speech, “he has to agree that we have a right to protest.”
Nugent said Zimmerman got only a “sliver of justice.” Although Zimmerman was found not guilty, powerful people from President Barack Obama to Attorney General Eric Holder have targeted him, as have “Holder’s best friends,” the New Black Panthers. Nugent implied the president and Holder are trying to subvert the legitimate verdict in the case.
“It pains me deeply” to have to acknowledge that the president and some others in the administration are “just bad people,” Nugent said. He said he had hoped to be able to speak better of the country’s top officials.
James E. Rawlings, president of the Greater New Haven branch of the NAACP, said Nugent’s comments Saturday did not merit a response.
“The Ted Nugents of the world are the most extreme,” Rawlings said, noting that, “We will be there Tuesday.”
Rawlings said, “We will deal with this as a community and come together. The sooner Mr. Nugent is out of our community, the better off we’ll be.”
Fair said Nugent’s recent statements show “he’s being more ridiculous than he’s already been.”
She said the protest is in response to Nugent’s outsized voice in the political sphere. Fair said the protest isn’t about Nugent’s music or even his personal political views, but what he says on the national stage.
Last year, two Bangor, Maine, city councilors spoke out against Nugent’s scheduled concert on the city ’s waterfront because of the rocker’s comments during a National Rifle Association convention in St. Louis. The concert went on as scheduled.
Nugent said his “humor meter” keeps him in good spirits, despite his detractors.
Asked about his family, Nugent said his is a “really good American family” because members are productive and “don’t take” but “give” to society.
“Liberals” and “racists” have targeted Nugent’s children, the rocker claimed, because political opponents are “mad” at him for “exposing the truth” about the Martin case and other issues.
Nugent said his children should be off-limits, and said his own whereabouts could always be tracked at Tednugent.com. He rattled off a list of people who might want to protest his appearances, including the Revs. Jesse Jackson and Al “Not-too-Sharpton.”
As a parting shot, the controversial musician told those he considers “haters” to “kiss my ass.”
Distributed by MCT Information Services |
/**
* A controller for a screw drive.
*
* @author Daniel Ruess
*/
public class ScrewDrive implements Updatable{
private final Jaguar motor = new Jaguar(6);
private final StringEncoder string = new StringEncoder(1);
NetworkTable server = NetworkTable.getTable("SmartDashboard");
/**
* The default speed to run at.
*/
private static final double SPEED = 1D; //TODO: calculate this.
private double dest = 1;
private int speedScale = 0;
//TODO move all these handlers and listeners to the operator controller
private ScrewDriveHighButtonEventHandler highButtonHandler;
private ScrewDrivePassButtonEventHandler passButtonHandler;
private ScrewDriveResetButtonEventHandler resetButtonHandler;
private ScrewDriveTrussButtonEventHandler trussButtonHandler;
private ScrewDriveIncreaseOffsetButtonEventHandler increaseOffsetButtonHandler;
private ScrewDriveDecreaseOffsetButtonEventHandler decreaseOffsetButtonHandler;
private ButtonListener highButtonListener = new ButtonListener();
private ButtonListener passButtonListener = new ButtonListener();
private ButtonListener resetButtonListener = new ButtonListener();
private ButtonListener trussButtonListener = new ButtonListener();
private ButtonListener increaseOffsetButtonListener = new ButtonListener();
private ButtonListener decreaseOffsetButtonListener = new ButtonListener();
private OperatorGameController operatorController;
private ScrewProperties props;
public ScrewDrive(OperatorGameController operatorController) {
//TODO talk to the team about having this be still when we enable the robot.
// set(props.pass());
this.operatorController = operatorController;
}
public void setProperties(ScrewProperties p){
props = p;
}
public void init() {
LiveWindow.addSensor("Boom", "String Encoder", string);
highButtonHandler = new ScrewDriveHighButtonEventHandler(operatorController, this);
passButtonHandler = new ScrewDrivePassButtonEventHandler(operatorController, this);
resetButtonHandler = new ScrewDriveResetButtonEventHandler(operatorController, this);
trussButtonHandler = new ScrewDriveTrussButtonEventHandler(operatorController, this);
increaseOffsetButtonHandler = new ScrewDriveIncreaseOffsetButtonEventHandler(operatorController, this);
decreaseOffsetButtonHandler = new ScrewDriveDecreaseOffsetButtonEventHandler(operatorController, this);
highButtonListener.addHandler(highButtonHandler);
passButtonListener.addHandler(passButtonHandler);
resetButtonListener.addHandler(resetButtonHandler);
trussButtonListener.addHandler(trussButtonHandler);
increaseOffsetButtonListener.addHandler(increaseOffsetButtonHandler);
decreaseOffsetButtonListener.addHandler(decreaseOffsetButtonHandler);
}
public void reset() {
set(props.reset());
server.putString("ScrewDrive.position", "resetting...");
}
public void trussShot() {
set(props.trussShot());
server.putString("ScrewDrive.position", "truss");
}
public void pass() {
set(props.pass());
server.putString("ScrewDrive.position", "pass");
}
public void high() {
set(props.highGoal());
server.putString("ScrewDrive.position", "high");
}
public void autonomous() {
set(props.highGoal());
server.putString("ScrewDrive.position", "auto shot");
}
/**
*
* @param destination
*/
private void set(Location destination) {
// if (resetting) {
// nextDestination = destination;
//} else {
this.dest = destination.loc;
//}
}
public void increaseOffset() {
dest += .1;
}
public void decreaseOffset() {
dest -= .1;
}
public int getPosition() {
return string.getDistance();
}
public void update() {
seekSetPoint();
server.putNumber("ScrewDrive.location", string.getAverageVoltage());
long currentTime = System.currentTimeMillis();
highButtonListener.updateState(operatorController.isScrewDriveHighPressed(), currentTime);
passButtonListener.updateState(operatorController.isScrewDrivePassPressed(), currentTime);
resetButtonListener.updateState(operatorController.isScrewDriveResetPressed(), currentTime);
trussButtonListener.updateState(operatorController.isScrewDriveTrussPressed(), currentTime);
server.putNumber("ScrewDrive.destination", dest);
}
void seekSetPoint() {
double loc = string.getAverageVoltage();
double dif = Math.abs(dest - loc);
double error = .01;
if (dif > error) {
if (dest < loc) {
if (dif < .1) {
motor.set(.4);
} else {
motor.set(SPEED);
}
} else {
if (dif < .07) {
motor.set(0);
} else {
motor.set(-1 * SPEED);
}
}
} else {
motor.set(0);
if (dest == props.reset().loc) {
set(props.highGoal());
server.putString("ScrewDrive.position", "high");
}
}
}
/**
* Represents a location to move the screw drive to.
*/
public static class Location {
private final double loc;
public Location(double loc) {
this.loc = loc;
}
}
} |
import React from 'react';
import './best-deals.scss';
const BestDealsPage: React.FC = () => (
<div className="BestDealsPage">
BestDealsPage
</div>
);
export default BestDealsPage;
|
Towards a wireless microsystem for liquid analysis
The paper describes both semi-wireless and wireless (433 MHz) liquid sensing systems based upon dual shear horizontal surface acoustic wave (SH-SAW) devices and introduces a novel design concept and a novel principle of detection. Each system comprises an uncoated passive SH-SAW device, a liquid microcell, a device antenna and an associated signal processing unit. The LiTaO/sub 3/-based SH-SAW sensors have dual delay line configurations with different path lengths, the principle of detection being the attenuation and time delay between the signals. The sensors have no biochemical selective layer making them non-specific but create a robust/durable and low-cost system. This sensing system may be applied in the food/beverage industry, medical applications; a functionalized coating can be added for more specific biological applications. |
TRENTON, N.J. – Mitch Walding, a former fifth-round pick who went to the same Northern California high school as former Blue Jays third baseman Ed Sprague and current Phillies utilityman Ty Kelly, has always been confident in his ability to hit.
But if you would have told him two years ago that he’d be leading the Eastern League in home runs and tapped as a top seed for the league’s Home Run Derby at the All-Star Game, he would have probably thought you were giving him a hard time. The left-handed hitting third baseman had four home runs in 461 plate appearances over 120 games in 2015 at Class A Clearwater, one level below the Eastern League.
Walding had 13 home runs, total, in 412 games between Williamsport, Lakewood, and Clearwater in his first four seasons in professional ball.
So what happened in his fifth and sixth minor league season? Walding hit 13 homers to go alongside a .269/.366/.429 slash line in 123 games between Clearwater and Reading last year.
This year? He leads the Eastern League with 20 home runs at the break. Walding hit as many home runs in June (13) as he had in all of 2016.
“Early on in my career I was always kind of an opposite field hitter, it’s the way the swing was when I was in high school,” Walding explained Thursday afternoon in Trenton, where Reading was set to wrap up a series. “I never really knew how to truly pull a baseball, like a hard fastball, I didn’t know how to pull the ball or attack it correctly. It wasn’t until the end of 2015 when I started working with (hitting coaches) Andy Tracy and Rob Ducey on how to pull a fastball the right way.
“That’s when I started to hit for more power, hitting more home runs, things started to turn around for me a little bit better. So I just had to fine-tune that swing. Basically getting the bat head out in front. I think that was a changing point in my career.”
For professional hitters, success fuels confidence, which, in turn, can sustain success. It’s the mindset often referred to in sports circles as being in the zone.
“He got a couple (of home runs) early in (June, a couple games of a couple,” Reading hitting coach John Mizerock said. “Hit a couple of them in Akron, sort of out of nowhere, maybe two days later a couple more. And he started feeling good about himself… and soon three becomes four and four became five, a couple more hits and then another one, then Player of the Week honors and you’re feeling pretty good and the next week starts out pretty good and you stay hot for a month. … In this day in age with everything being some quantifiable or numbers oriented, it’s a tough one to figure out. You watch his video from April, May, and June, throw it in a hat, you’re not going to know which one June was. It looks (the same), very minimal mechanical adjustments, not enough to make 13 home runs.
“It’s how he’s feeling about himself, how he’s walking up to the plate. You can’t put your finger on it. I’m sure someone will … there’s some gadget they’ll put on you to (grade) the positive aura coming off. But whatever it is, I’d like to do a test on him now. A couple weeks from now he goes into a slump I’ll ask him to remember June. I don’t know, what you had for lunch, how many cars you passed on the way in (to the ballpark). Try it again, dude. It worked then, no reason it can’t work again.”
Walding, who advanced to the second round of the Eastern League’s Home Run Derby on Tuesday night in Manchester, N.H., is one of five Reading players who will represent the organization in the league’s All-Star Game tonight, along with outfielders Carlos Tocci and Jiandido Tromp and pitchers Drew Anderson and Yacksel Rios.
The 24-year-old Walding was named the Eastern League Player of the Month in June, when he hit .337 with four doubles, two triples, 13 home runs, 27 RBI, a .398 on-base percentage, and a .848 slugging percentage in 25 games.
“Nobody works harder than Mitch,” Reading manager Greg Legg said. “He’s in the cage first every day, unless (Mizerock) makes him come in later. His work ethic is off the charts, so I couldn’t be happier, couldn’t be happening to a better young man.
“I’ve been fortunate enough to watch his development over the years and right now he’s just in a really good place. He’s confident, he knows he can do damage. And as time goes on, I look for him to hit another five or six this month and possibly another six to nine in August. And when you put those numbers together we should be somewhere in the 30s, and it’d be a helluva year for him.”
The Reading Fightin’ Phils lost their most two prolific hitters to promotions within the last month, with Andrew Pullin and Scott Kingery now a part of the prospect-laden lineup at Triple-A Lehigh Valley. But thanks to Walding’s power surge and the consistent productivity of Tocci, Reading has remained the second-best team in the Eastern League (going 12-8 in 20 games since Pullin’s promotion three weeks ago).
Walding’s monster June, along with his maturity as a hitter in the last two seasons, has obviously played a part.
Walding slashed .233/.318/.315 with four home runs in 120 games at Class A Clearwater in 2015, his fourth full season of pro ball. Since re-working his swing with Tracy and Ducky after that season, Walding has slashed .258/.355/.472 with 33 home runs in his last 197 games.
But there was something else that triggered his bat and tapped into his talent: a visit to Citizens Bank Park before the start of the 2016 season. Walding was one of 19 Phillies prospects who got into the Phillies Futures Game two days before the big league club’s Opening Day last year.
“That was a big moment for me, everything started to set in, realizing how close you really are,” Walding said. “The atmosphere you want to be in, as big leaguer. I’ve always wanted that obviously, but once you step foot in that clubhouse and that field, that’s when it really sets in, what your dream actually is. We got on a flight, all of the guys, and it was to the nines as an experience. It was unbelievable.
“And I think that's when it really set in with me, how close (you are), and how bad you really want it. Obviously, I look forward to going up each level and one day getting to the big leagues. I believe in myself that I’ll definitely be there at some point, whenever that may be. But as far as right now, I’m just focused on getting better every single day and helping my team win, becoming the best player I can be to one day get there.”
Follow Ryan on Twitter: @ryanlawrence21
Like the new PhillyVoice Sports page on Facebook. |
""" Common functions for package biobb_analysis.ambertools """
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
import csv
import re
from pathlib import Path, PurePath
from biobb_common.tools import file_utils as fu
from warnings import simplefilter
# ignore all future warnings
simplefilter(action='ignore', category=FutureWarning)
simplefilter(action='ignore', category=RuntimeWarning)
sns.set()
# CHECK PARAMETERS
def check_input_path(path, argument, out_log, classname):
""" Checks input file """
if not Path(path).exists():
fu.log(classname + ': Unexisting %s file, exiting' % argument, out_log)
raise SystemExit(classname + ': Unexisting %s file' % argument)
file_extension = PurePath(path).suffix
if not is_valid_file(file_extension[1:], argument):
fu.log(classname + ': Format %s in %s file is not compatible' % (file_extension[1:], argument), out_log)
raise SystemExit(classname + ': Format %s in %s file is not compatible' % (file_extension[1:], argument))
return path
def check_output_path(path, argument, optional, out_log, classname):
""" Checks output file """
if optional and not path:
return None
if PurePath(path).parent and not Path(PurePath(path).parent).exists():
fu.log(classname + ': Unexisting %s folder, exiting' % argument, out_log)
raise SystemExit(classname + ': Unexisting %s folder' % argument)
file_extension = PurePath(path).suffix
if not is_valid_file(file_extension[1:], argument):
fu.log(classname + ': Format %s in %s file is not compatible' % (file_extension[1:], argument), out_log)
raise SystemExit(classname + ': Format %s in %s file is not compatible' % (file_extension[1:], argument))
return path
def is_valid_file(ext, argument):
""" Checks if file format is compatible """
formats = {
'input_dataset_path': ['csv'],
'output_model_path': ['pkl'],
'output_dataset_path': ['csv'],
'output_results_path': ['csv'],
'input_model_path': ['pkl'],
'output_test_table_path': ['csv'],
'output_plot_path': ['png']
}
return ext in formats[argument]
def check_mandatory_property(property, name, out_log, classname):
if not property:
fu.log(classname + ': Unexisting %s property, exiting' % name, out_log)
raise SystemExit(classname + ': Unexisting %s property' % name)
return property
# UTILITIES
def adjusted_r2(x, y, r2):
n = x.shape[0]
p = x.shape[1]
return 1-(1-r2)*(n-1)/(n-p-1)
def get_list_of_predictors(predictions):
p = []
for obj in predictions:
a = []
for k, v in obj.items():
a.append(v)
p.append(a)
return p
def get_keys_of_predictors(predictions):
p = []
for obj in predictions[0]:
p.append(obj)
return p
def predictionPlot(tit, data1, data2, xlabel, ylabel):
plt.title(tit, size=15)
plt.scatter(data1, data2, alpha=0.2)
plt.xlabel(xlabel,size=14)
plt.ylabel(ylabel,size=14)
axes = plt.gca()
lims = axes.get_xlim()
plt.xlim(lims)
plt.ylim(lims)
plt.plot(lims, lims)
def histogramPlot(tit, data1, data2, xlabel, ylabel):
plt.title(tit, size=15)
error = data2 - data1
plt.hist(error, bins = 25)
plt.xlabel(xlabel,size=14)
plt.ylabel(ylabel,size=14)
def plotResults(y_train, y_hat_train, y_test, y_hat_test):
#FIGURE
plt.figure(figsize=[8,8])
plt.subplot(221)
predictionPlot('Train predictions', y_train, y_hat_train, 'true values', 'predictions')
plt.subplot(222)
histogramPlot('Train histogram', y_train, y_hat_train, 'prediction error', 'count')
plt.subplot(223)
predictionPlot('Test predictions', y_test, y_hat_test, 'true values', 'predictions')
plt.subplot(224)
histogramPlot('Test histogram', y_test, y_hat_test, 'prediction error', 'count')
plt.tight_layout()
return plt
def getIndependentVars(independent_vars, data, out_log, classname):
if 'indexes' in independent_vars:
return data.iloc[:, independent_vars['indexes']]
elif 'range' in independent_vars:
ranges_list = []
for rng in independent_vars['range']:
for x in range (rng[0], (rng[1] + 1)):
ranges_list.append(x)
return data.iloc[:, ranges_list]
elif 'columns' in independent_vars:
return data.loc[:, independent_vars['columns']]
else:
fu.log(classname + ': Incorrect independent_vars format', out_log)
raise SystemExit(classname + ': Incorrect independent_vars format')
def getIndependentVarsList(independent_vars):
if 'indexes' in independent_vars:
return ', '.join(str(x) for x in independent_vars['indexes'])
elif 'range' in independent_vars:
return ', '.join([str(y) for r in independent_vars['range'] for y in range(r[0], r[1] + 1)])
elif 'columns' in independent_vars:
return ', '.join(independent_vars['columns'])
def getTarget(target, data, out_log, classname):
if 'index' in target:
return data.iloc[:, target['index']]
elif 'column' in target:
return data[target['column']]
else:
fu.log(classname + ': Incorrect target format', out_log)
raise SystemExit(classname + ': Incorrect target format')
def getTargetValue(target):
if 'index' in target:
return str(target['index'])
elif 'column' in target:
return target['column']
def getWeight(weight, data, out_log, classname):
if 'index' in weight:
return data.iloc[:, weight['index']]
elif 'column' in weight:
return data[weight['column']]
else:
fu.log(classname + ': Incorrect weight format', out_log)
raise SystemExit(classname + ': Incorrect weight format')
def getHeader(file):
with open(file, newline='') as f:
reader = csv.reader(f)
header = next(reader)
if(len(header) == 1):
return list(re.sub('\s+|;|:|,|\t', ',', header[0]).split(","))
else:
return header
|
# import pytest
from punch import helpers
def test_optstr2dict_single_option():
optstr = "part=major"
assert helpers.optstr2dict(optstr) == {
'part': 'major'
}
def test_optstr2dict_multiple_options():
optstr = "part=major,reset=true"
assert helpers.optstr2dict(optstr) == {
'part': 'major',
'reset': 'true'
}
def test_optstr2dict_convert_boolean():
optstr = "reset=true"
assert helpers.optstr2dict(optstr, convert_boolean=True) == {
'reset': True
}
def test_optstr2dict_convert_boolean_false():
optstr = "reset=false"
assert helpers.optstr2dict(optstr, convert_boolean=True) == {
'reset': False
}
def test_optstr2dict_convert_boolean_mixed_case():
optstr = "reset=TrUe"
assert helpers.optstr2dict(optstr, convert_boolean=True) == {
'reset': True
}
|
// 2012-07-22 shift to confine horizontally or vertically, ctrl-shift to resize, ctrl to pick
public class BrushTool extends PlugInTool implements Runnable {
private final static int UNCONSTRAINED=0, HORIZONTAL=1, VERTICAL=2, RESIZING=3, RESIZED=4, IDLE=5; //mode flags
private static String BRUSH_WIDTH_KEY = "brush.width";
private static String PENCIL_WIDTH_KEY = "pencil.width";
private static String CIRCLE_NAME = "brush-tool-overlay";
private static final String LOC_KEY = "brush.loc";
private String widthKey;
private int width;
private ImageProcessor ip;
private int mode; //resizing brush or motion constrained horizontally or vertically
private int xStart, yStart;
private int oldWidth;
private boolean isPencil;
private Overlay overlay;
private Options options;
private GenericDialog gd;
public void run(String arg) {
isPencil = "pencil".equals(arg);
widthKey = isPencil ? PENCIL_WIDTH_KEY : BRUSH_WIDTH_KEY;
width = (int)Prefs.get(widthKey, isPencil ? 1 : 5);
Toolbar.addPlugInTool(this);
}
public void mousePressed(ImagePlus imp, MouseEvent e) {
ImageCanvas ic = imp.getCanvas();
int x = ic.offScreenX(e.getX());
int y = ic.offScreenY(e.getY());
xStart = x;
yStart = y;
ip = imp.getProcessor();
int ctrlMask = IJ.isMacintosh() ? InputEvent.META_MASK : InputEvent.CTRL_MASK;
int resizeMask = InputEvent.SHIFT_MASK | ctrlMask;
if ((e.getModifiers() & resizeMask) == resizeMask) {
mode = RESIZING;
oldWidth = width;
return;
} else if ((e.getModifiers() & ctrlMask) != 0) {
boolean altKeyDown = (e.getModifiers() & InputEvent.ALT_MASK) != 0;
ic.setDrawingColor(x, y, altKeyDown); //pick color from image (ignore overlay)
if (!altKeyDown && gd != null)
options.setColor(Toolbar.getForegroundColor());
mode = IDLE;
return;
}
mode = UNCONSTRAINED;
ip.snapshot();
Undo.setup(Undo.FILTER, imp);
ip.setLineWidth(width);
if (e.isAltDown())
ip.setColor(Toolbar.getBackgroundColor());
else
ip.setColor(Toolbar.getForegroundColor());
ip.moveTo(x, y);
if (!e.isShiftDown()) {
ip.lineTo(x, y);
imp.updateAndDraw();
}
}
public void mouseDragged(ImagePlus imp, MouseEvent e) {
if (mode == IDLE) return;
ImageCanvas ic = imp.getCanvas();
int x = ic.offScreenX(e.getX());
int y = ic.offScreenY(e.getY());
if (mode == RESIZING) {
showToolSize(x-xStart, imp);
return;
}
if ((e.getModifiers() & InputEvent.SHIFT_MASK) != 0) { //shift constrains
if (mode == UNCONSTRAINED) { //first movement with shift down determines direction
if (Math.abs(x-xStart) > Math.abs(y-yStart))
mode = HORIZONTAL;
else if (Math.abs(x-xStart) < Math.abs(y-yStart))
mode = VERTICAL;
else return; //constraint direction still unclear
}
if (mode == HORIZONTAL)
y = yStart;
else if (mode == VERTICAL)
x = xStart;
} else {
xStart = x;
yStart = y;
mode = UNCONSTRAINED;
}
ip.lineTo(x, y);
imp.updateAndDraw();
}
public void mouseReleased(ImagePlus imp, MouseEvent e) {
if (mode==RESIZING) {
if (overlay!=null && overlay.size()>0 && CIRCLE_NAME.equals(overlay.get(overlay.size()-1).getName())) {
overlay.remove(overlay.size()-1);
imp.setOverlay(overlay);
}
overlay = null;
if (e.isShiftDown()) {
if (gd!=null)
options.setWidth(width);
Prefs.set(widthKey, width);
}
}
}
private void showToolSize(int deltaWidth, ImagePlus imp) {
if (deltaWidth !=0) {
width = oldWidth + deltaWidth;
if (width<1) width=1;
Roi circle = new OvalRoi(xStart-width/2, yStart-width/2, width, width);
circle.setName(CIRCLE_NAME);
circle.setStrokeColor(Color.red);
overlay = imp.getOverlay();
if (overlay==null)
overlay = new Overlay();
else if (overlay.size()>0 && CIRCLE_NAME.equals(overlay.get(overlay.size()-1).getName()))
overlay.remove(overlay.size()-1);
overlay.add(circle);
imp.setOverlay(overlay);
}
IJ.showStatus((isPencil?"Pencil":"Brush")+" width: "+ width);
}
public void showOptionsDialog() {
Thread thread = new Thread(this, "Brush Options");
thread.setPriority(Thread.NORM_PRIORITY);
thread.start();
}
public String getToolName() {
if (isPencil)
return "Pencil Tool";
else
return "Paintbrush Tool";
}
public String getToolIcon() {
if (isPencil)
return "C037L4990L90b0Lc1c3L82a4Lb58bL7c4fDb4L494fC123L5a5dL6b6cD7b";
else
return "C037La077Ld098L6859L4a2fL2f4fL5e9bL9b98L6888L5e8dL888cC123L8a3fL8b6d";
}
public void run() {
new Options();
}
class Options implements DialogListener {
Options() {
if (gd != null) {
gd.toFront();
return;
}
options = this;
showDialog();
}
void setWidth(int width) {
Vector numericFields = gd.getNumericFields();
TextField widthField = (TextField)numericFields.elementAt(0);
widthField.setText(""+width);
Vector sliders = gd.getSliders();
Scrollbar sb = (Scrollbar)sliders.elementAt(0);
sb.setValue(width);
}
void setColor(Color c) {
String name = Colors.getColorName(c, "");
if (name.length() > 0) {
Vector choices = gd.getChoices();
Choice ch = (Choice)choices.elementAt(0);
ch.select(name);
}
}
public void showDialog() {
Color color = Toolbar.getForegroundColor();
String colorName = Colors.colorToString2(color);
String name = isPencil?"Pencil":"Brush";
gd = new NonBlockingGenericDialog(name+" Options");
gd.addSlider(name+" width:", 1, 50, width);
gd.addChoice("Color:", Colors.getColors(colorName), colorName);
gd.setInsets(10, 10, 0);
String ctrlString = IJ.isMacintosh()? "CMD":"CTRL";
gd.addMessage("SHIFT for horizontal or vertical lines\n"+
"ALT to draw in background color\n"+
ctrlString+"-SHIFT-drag to change "+(isPencil ? "pencil" : "brush")+" width\n"+
ctrlString+"-(ALT) click to change foreground\n"+
"(background) color\n"+
"Or use this dialog or Color Picker (shift-k).", null, Color.darkGray);
gd.hideCancelButton();
gd.addHelp("");
gd.setHelpLabel("Undo");
gd.setOKLabel("Close");
gd.addDialogListener(this);
Point loc = Prefs.getLocation(LOC_KEY);
if (loc!=null) {
gd.centerDialog(false);
gd.setLocation (loc);
}
gd.showDialog();
Prefs.saveLocation(LOC_KEY, gd.getLocation());
gd = null;
}
public boolean dialogItemChanged(GenericDialog gd, AWTEvent e) {
if (e!=null && e.toString().contains("Undo")) {
ImagePlus imp = WindowManager.getCurrentImage();
if (imp!=null) IJ.run("Undo");
return true;
}
width = (int)gd.getNextNumber();
if (gd.invalidNumber() || width<0)
width = (int)Prefs.get(widthKey, 1);
String colorName = gd.getNextChoice();
Color color = Colors.decode(colorName, null);
Toolbar.setForegroundColor(color);
Prefs.set(widthKey, width);
return true;
}
}
} |
/**
* Creates and sends an event to listeners when a user closes java.sql.Connection
* object belonging to this PooledConnection.
*/
protected void connectionClosedNotification() {
synchronized (connectionEventListeners) {
if (connectionEventListeners.size() == 0)
return;
ConnectionEvent closedEvent = new ConnectionEvent(this);
for (final ConnectionEventListener nextListener : connectionEventListeners) {
nextListener.connectionClosed(closedEvent);
}
}
} |
// WithMutexMode will use mutex to receive metrics from the app throught the API.
//
// This determines how the client receive metrics from the app (for example when calling the `Gauge()` method).
// The client will either drop the metrics if its buffers are full (WithChannelMode option) or block the caller until the
// metric can be handled (WithMutexMode option). By default the client use mutexes.
//
// WithMutexMode uses mutexes to receive metrics which is much faster than channels but can cause some lock contention
// when used with a high number of goroutines sendint the same metrics. Mutexes are sharded based on the metrics name
// which limit mutex contention when multiple goroutines send different metrics (see WithWorkersCount). This is the
// default behavior which will produce the best throughput.
func WithMutexMode() Option {
return func(o *Options) error {
o.receiveMode = mutexMode
return nil
}
} |
/**NOTE: This main function is only used for testing the function getDist(path)*/
public static void main(String[] args) throws Exception {
System.out.println("----- Distribution Information in dataset -----\n");
String[] path = {"files/data/codec.arff",
"files/data/ormlite.arff", "files/data/jsqlparser.arff", "files/data/collections.arff",
"files/data/io.arff", "files/data/jsoup.arff", "files/data/mango.arff"
,"files/data/lang.arff"
};
for(int i=0; i<path.length; i++){
getDist(path[i]);
}
} |
/** Write the footer on a stream */
private static void writeFooter(DataOutputStream fout,String version)
{
try{
fout.writeBytes("<hr width=\"800\"></center>");
fout.writeBytes("<b>GanttProject ("+version+")</b><br>\n");
fout.writeBytes("<b><a href=\"ganttproject.sourceforge.net\">ganttproject.sourceforge.net</a></b><br>\n");
fout.writeBytes("<b>"+GanttCalendar.getDateAndTime()+"</b><br>\n");
fout.writeBytes("</body>\n</html>");
}catch(Exception e) {
System.out.println(e);
}
} |
The Effect of Familiarization on the Reliability of Isokinetic Assessment in Breast Cancer Survivors
To determine the amount of familiarization sessions required by breast cancer survivors to achieve a reliable measurement of muscle function assessed using isokinetic dynamometry. Twenty-six breast cancer survivors performed three isokinetic knee extension tests separated by, at least, 48 h. The isokinetic testing protocol included one warm-up set of 10 submaximal knee extensions at 120°/s, followed by two sets of four maximal knee extensions at 60°/s, with 2-min rest interval between sets. Peak torque (PT), time to peak torque (TPT), angle of peak torque (APT), and average power (AP) of each trial was used for the assessment of testing reliability. Percentage change in the mean, typical error, coefficient of variation and intraclass correlation coefficients (ICC2.1) were calculated to determine test–retest reliability. For PT, change in mean was lower between trials 2 and 3 than between trials 1 and 2 (4.18% and 13.18%, respectively), and ICC was greater between trials 2 and 3 than between trials 1 and 2 (0.962 and 0.818, respectively). For TPT and APT, ICC was clinically acceptable only between trials 2 and 3 (0.757 and 0.803, respectively). For AP, change in mean was clinically acceptable between trials 2 and 3 (9.84%), while ICC met acceptable reliability between both, trials 1 and 2 and, trials 2 and 3 (0.756 and 0.891, respectively). At least one familiarization session is adequate to achieve reliable measurements of muscle function using isokinetic dynamometry, while avoiding the impact of learning effect of the measurements in breast cancer survivors. |
package main
import (
"fmt"
"strings"
)
func ping(pingChannel chan<- string, message string) {
pingChannel <- message // Send the message thru the ping channel
}
func pong(pingChannel <-chan string, pongChannel chan<- string) {
message := <-pingChannel // Receive the message from ping channel
pongChannel <- strings.ToUpper(message) // Send the message back on the channel
}
func main() {
pings := make(chan string, 1)
pongs := make(chan string, 1)
message := ""
fmt.Print("Enter the message :: ")
fmt.Scanf("%s", &message)
ping(pings, message)
pong(pings, pongs)
fmt.Println(<-pongs) // Receive the message back
}
|
<gh_stars>100-1000
# Copyright (c) 2021 Graphcore Ltd. All rights reserved.
# Written by <NAME>
import yaml
import os
from config import cfg
def merge_dict(src, dst):
for key, val in src.items():
if isinstance(val, dict):
if key not in dst:
dst[key] = {}
merge_dict(val, dst[key])
else:
dst[key] = val
return dst
def assgin_attr(yaml_cfg, dst_cfg):
for key, val in yaml_cfg.items():
if isinstance(val, dict):
assgin_attr(val, dst_cfg.get(key))
else:
dst_cfg.__setattr__(key, val)
def get_multi_level_folders(path):
_folder, _file = os.path.split(path)
if _folder in ['.', '/', '', '..']:
return [_folder, _file]
_folders = get_multi_level_folders(_folder)
return _folders + [_file]
def change_cfg_by_yaml_file(yaml_file):
with open(yaml_file, 'r') as f:
yaml_cfg = yaml.load(f, Loader=yaml.FullLoader)
yaml_cfg = load_parent(yaml_cfg)
if 'parent' in yaml_cfg:
del yaml_cfg['parent']
if yaml_cfg is None:
yaml_cfg = {}
# assign yaml file name to name of this config
folderpath, filename = os.path.split(yaml_file)
name, _ = filename.split('.')
assert 'task_name' not in yaml_cfg
yaml_cfg['task_name'] = name
# assign output dir, if yaml file in yamls folder, replace folder 'yamls' with 'outputs', then the result path is the output dir
# else use 'output' + name as the output dir
list_of_folders_and_file = get_multi_level_folders(folderpath)
if 'yamls' in list_of_folders_and_file:
idx = list_of_folders_and_file.index('yamls')
list_of_folders_and_file[idx] = 'outputs'
list_of_folders_and_file = list_of_folders_and_file + [name]
output_dir = os.path.join(*list_of_folders_and_file)
else:
output_dir = os.path.join('outputs', name)
yaml_cfg['output_dir'] = output_dir
assgin_attr(yaml_cfg, cfg)
def load_parent(yaml_cfg):
if 'parent' in yaml_cfg:
assert os.path.exists(yaml_cfg['parent'])
with open(yaml_cfg['parent'], 'r') as f:
parent_cfg = yaml.load(f, Loader=yaml.FullLoader)
parent_cfg = load_parent(parent_cfg)
yaml_cfg = merge_dict(yaml_cfg, parent_cfg)
return yaml_cfg
def save_yaml(file_path):
with open(file_path, 'w', encoding='utf-8') as f:
yaml.dump(cfg, f)
if __name__ == '__main__':
change_cfg_by_yaml_file('yamls/example.yaml')
print(cfg.TRAIN.STEPSIZE)
print(cfg.TRAIN.LEARNING_RATE)
print(cfg.TRAIN.BBOX_NORMALIZE_STDS)
print(cfg['TRAIN']['BBOX_NORMALIZE_STDS'])
save_yaml('test.yaml')
|
/**
* Returns the factory bean that makes it possible to inject {@link LockService} directly into other beans instead of accessing it via
* {@link Hekate#locks()} method.
*
* @return Service bean.
*/
@Lazy
@Bean
public LockServiceBean lockService() {
return new LockServiceBean();
} |
<gh_stars>0
//
// Dog.h
// A1property
//
// Created by MS on 15-6-2.
// Copyright (c) 2015年 qf. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface Dog : NSObject
@property int age;
//第一组
//nonatomic(不考虑线程安全)/atomic(默认的,考虑线程安全)
@property (nonatomic) int height;
//第二组
//readwrite(默认的)/readonly(只生成getter方法,不生成setter方法)
@property (readonly) int weith;
//第三组(setter和getter改名)
//这一组修饰符是四组里唯一可以共存的一组
@property (setter = setNnnn:, getter = nnnn) int name;
//最后一组(copy,retain,assign)(三种区别只是在setter方法的实现部分不一样,在内存管理方面)
//基础数据类型用assign,字符串用copy,其它(自定义对象,数组,字典)用retain
//理论上在自动内存管理中这一组应该写成strong/weak。但实际上编译器对这块比较松
@property (copy) NSString *job;
//工作中一般会这样用(nonatomic, assign/copy/retain)
@property (nonatomic, assign) float scroll;
@end
|
def _get_data(self, gas, loc, voltage, speed, trial):
cols = []
for g in gas:
for l in loc:
try:
(sub, files) = self._get_sensor_col_files(g, l)
except OSError as e:
print('{}\n Keeping calm and carrying on.'.format(e))
continue
for v in voltage:
for s in speed:
end = "_board_setPoint_%s_fan_setPoint_%s_mfc_setPoint_%sppm_p%s" % (
self.SensorVoltages[v],
self.FanSpeeds[s],
self.GasNames[g],
self.AltLocs[l])
filtered = [f.split('/')[-1] for f in files if f.endswith(end)]
if not filtered:
if self._args['verbose']:
print('No valid files found for "%s", skipping!' % sub)
continue
timeStamp = [filt.split('_', 1)[0] for filt in filtered]
date = [time.strptime(ts, '%Y%m%d%H%M') for ts in timeStamp]
date = [time.strftime('%Y-%m-%d %H:%M', d) for d in date]
filtered = [os.path.join(sub, f) for f in filtered]
for i, filt in enumerate(filtered):
j = i + 1
if j in trial:
p = os.path.sep.join([self.dataloc_prefix,
self.data_location,
filt])
cols.append(SensorColumn(data_location=p,
gas=self.GasNames[g],
loc=self.Locs[l],
voltage=self.SensorVoltages[v],
speed=self.AltFanSpeeds[s],
trial=j,
_args=self._args))
if self._args['verbose']:
print('\nSelected %i single trial SensorColumns!' % len(cols))
return cols |
// newTemplateFuncs returns a set of template funcs bound to the supplied args.
func (a *Generator) newTemplateFuncs() template.FuncMap {
return template.FuncMap{
"filterFields": a.filterFields,
"shortName": a.shortName,
"nullcheck": a.nullcheck,
"hasColumn": a.hasColumn,
"columnNames": a.columnNames,
"columnNamesQuery": a.columnNamesQuery,
"columnPrefixNames": a.columnPrefixNames,
"hasField": a.hasField,
"fieldNames": a.fieldNames,
"goParam": a.goParam,
"goEncodedParam": a.goEncodedParam,
"goParams": a.goParams,
"goEncodedParams": a.goEncodedParams,
"escape": a.escape,
"toLower": a.toLower,
"pluralize": a.pluralize,
}
} |
// Retrieve a specific config.
func GetConfig(networkId string, configType string, key string) (interface{}, error) {
client, conn, err := getConfigServiceClient()
if err != nil {
return nil, err
}
defer conn.Close()
req := &protos.GetOrDeleteConfigRequest{NetworkId: networkId, Type: configType, Key: key}
val, err := client.GetConfig(context.Background(), req)
if err != nil {
return nil, err
}
return serde.Deserialize(SerdeDomain, configType, val.GetValue())
} |
#!/usr/bin/env python
import argparse
import logging
import asyncio
parser = argparse.ArgumentParser(
description='Upload a file or a directory to s3')
parser.add_argument('path', nargs=1,
help='location of files to upload')
parser.add_argument('--bucket', '-b', nargs=1, required=True,
help='Bucket name to upload files to')
parser.add_argument('--region', '-r', dest='region', default='us-east-1',
help='S3 region to upload files')
args = parser.parse_args()
def upload(options, loop=None):
from cloud.aws import AsyncioBotocore
bucket = options.bucket[0]
bits = bucket.split('/')
bucket = bits[0]
key = '/'.join(bits[1:])
s3 = AsyncioBotocore('s3', options.region, loop=loop)
return s3.upload_folder(bucket, options.path[0], key=key)
if __name__ == "__main__":
logging.basicConfig(format='%(message)s', level=logging.INFO)
loop = asyncio.get_event_loop()
result = upload(parser.parse_args(), loop=loop)
loop.run_until_complete(result)
|
def add_sub(hub, payload):
hub.pop.sub.add(*payload['args'], **payload['kwargs']) |
package se.sics.mspsim.cli;
import java.io.File;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Hashtable;
import se.sics.mspsim.core.EmulationException;
import se.sics.mspsim.util.ActiveComponent;
import se.sics.mspsim.util.ComponentRegistry;
import se.sics.mspsim.util.MapTable;
public class CommandHandler implements ActiveComponent, LineListener {
private String scriptDirectory = "scripts";
private Hashtable<String, Command> commands = new Hashtable<String, Command>();
protected final PrintStream out;
protected final PrintStream err;
private MapTable mapTable;
private ComponentRegistry registry;
private ArrayList<CommandContext[]> currentAsyncCommands = new ArrayList<CommandContext[]>();
private int pidCounter = 0;
public CommandHandler(PrintStream out, PrintStream err) {
this.out = out;
this.err = err;
registerCommands();
}
private MapTable getMapTable() {
if (mapTable == null && registry != null) {
mapTable = (MapTable) registry.getComponent(MapTable.class);
}
return mapTable;
}
// Add it to the command table (overwriting anything there)
public void registerCommand(String cmd, Command command) {
commands.put(cmd, command);
}
public int executeCommand(String commandLine, CommandContext context) {
String[][] parts;
final PrintStream cOut = context == null ? this.out : context.out;
final PrintStream cErr = context == null ? this.err : context.err;
try {
parts = CommandParser.parseCommandLine(commandLine);
} catch (Exception e) {
cErr.println("Error: failed to parse command:");
e.printStackTrace(cErr);
return -1;
}
if (parts == null || parts.length == 0) {
// Nothing to execute
return 0;
}
Command[] cmds = createCommands(parts, cErr);
if(cmds != null && cmds.length > 0) {
CommandContext[] commands = new CommandContext[parts.length];
boolean error = false;
int pid = -1;
for (int i = 0; i < parts.length; i++) {
String[] args = parts[i];
Command cmd = cmds[i];
if (i == 0 && cmd instanceof AsyncCommand) {
pid = ++pidCounter;
}
commands[i] = new CommandContext(this, getMapTable(), commandLine, args, pid, cmd);
if (i > 0) {
PrintStream po = new PrintStream(new LineOutputStream((LineListener) commands[i].getCommand()));
commands[i - 1].setOutput(po, cErr);
}
// Last element also needs output!
if (i == parts.length - 1) {
commands[i].setOutput(cOut, cErr);
}
// TODO: Check if first command is also LineListener and set it up for input!!
}
// Execute when all is set-up in opposite order...
int index = commands.length - 1;
try {
for (; index >= 0; index--) {
int code = commands[index].getCommand().executeCommand(commands[index]);
if (code != 0) {
cErr.println("command '" + commands[index].getCommandName() + "' failed with error code " + code);
error = true;
break;
}
}
} catch (Exception e) {
cErr.println("Error: Command failed: " + e.getMessage());
e.printStackTrace(cErr);
error = true;
if (e instanceof EmulationException) {
throw (EmulationException) e;
}
}
if (error) {
// Stop any commands that have been started
for (index++; index < commands.length; index++) {
commands[index].stopCommand();
}
return 1;
} else if (pid < 0) {
// The first command is not asynchronous. Make sure all commands have stopped.
exitCommands(commands);
} else {
boolean exited = false;
for (int i = 0; i < commands.length && !exited; i++) {
if (commands[i].hasExited()) {
exited = true;
}
}
if (exited) {
exitCommands(commands);
} else {
synchronized (currentAsyncCommands) {
currentAsyncCommands.add(commands);
}
}
}
return 0;
}
return -1;
}
// This will return an instance that can be configured -
// which is basically not OK... TODO - fix this!!!
private Command getCommand(String cmd) {
Command command = commands.get(cmd);
if (command != null) {
return (Command) command.getInstance();
}
File scriptFile = new File(scriptDirectory, cmd);
if (scriptFile.isFile() && scriptFile.canRead()) {
return new ScriptCommand(scriptFile);
}
return null;
}
private Command[] createCommands(String[][] commandList, PrintStream err) {
Command[] cmds = new Command[commandList.length];
for (int i = 0; i < commandList.length; i++) {
Command command = getCommand(commandList[i][0]);
if (command == null) {
err.println("CLI: Command not found: \"" + commandList[i][0] + "\". Try \"help\".");
return null;
}
if (i > 0 && !(command instanceof LineListener)) {
err.println("CLI: Error, command \"" + commandList[i][0] + "\" does not take input.");
return null;
}
// TODO replace with command name
String argHelp = command.getArgumentHelp(null);
if (argHelp != null) {
int requiredCount = 0;
for (int j = 0, m = argHelp.length(); j < m; j++) {
if (argHelp.charAt(j) == '<') {
requiredCount++;
}
}
if (requiredCount > commandList[i].length - 1) {
// Too few arguments
err.println("Too few arguments for " + commandList[i][0]);
err.println("Usage: " + commandList[i][0] + ' ' + argHelp);
return null;
}
}
cmds[i] = command;
}
return cmds;
}
public void init(String name, ComponentRegistry registry) {
this.registry = registry;
}
public void start() {
Object[] commandBundles = registry.getAllComponents(CommandBundle.class);
if (commandBundles != null) {
for (int i = 0, n = commandBundles.length; i < n; i++) {
((CommandBundle) commandBundles[i]).setupCommands(registry, this);
}
}
}
private void registerCommands() {
registerCommand("help", new BasicCommand("show help for the specified command or command list", "[command]") {
public int executeCommand(CommandContext context) {
if (context.getArgumentCount() == 0) {
context.out.println("Available commands:");
String[] names = commands.keySet().toArray(new String[commands.size()]);
Arrays.sort(names);
for(String name : names) {
Command command = commands.get(name);
String helpText = command.getCommandHelp(name);
if (helpText != null) {
String argHelp = command.getArgumentHelp(name);
String prefix = argHelp != null ? (' ' + name + ' ' + argHelp) : (' ' + name);
int n;
if ((n = helpText.indexOf('\n')) > 0) {
// Show only first line as short help if help text consists of several lines
helpText = helpText.substring(0, n);
}
context.out.print(prefix);
int prefixLen = prefix.length();
if (prefixLen < 8) {
context.out.print("\t\t\t\t");
} else if (prefixLen < 16) {
context.out.print("\t\t\t");
} else if (prefixLen < 24) {
context.out.print("\t\t");
} else if (prefixLen < 32) {
context.out.print('\t');
}
context.out.print(' ');
context.out.println(helpText);
}
}
return 0;
}
String cmd = context.getArgument(0);
Command command = getCommand(cmd);
if (command != null) {
String helpText = command.getCommandHelp(cmd);
String argHelp = command.getArgumentHelp(cmd);
context.out.print(cmd);
if (argHelp != null && argHelp.length() > 0) {
context.out.print(' ' + argHelp);
}
context.out.println();
if (helpText != null && helpText.length() > 0) {
context.out.println(" " + helpText);
}
return 0;
}
context.err.println("Error: unknown command '" + cmd + '\'');
return 1;
}
});
registerCommand("ps", new BasicCommand("list current executing commands/processes", "") {
public int executeCommand(CommandContext context) {
if (currentAsyncCommands.size() > 0) {
context.out.println(" PID\tCommand");
for (int i = 0; i < currentAsyncCommands.size(); i++) {
CommandContext cmd = currentAsyncCommands.get(i)[0];
context.out.println(" " + cmd);
}
} else {
context.out.println("No executing commands.");
}
return 0;
}
});
registerCommand("kill", new BasicCommand("kill a currently executing command", "<process>") {
public int executeCommand(CommandContext context) {
int pid = context.getArgumentAsInt(0);
if (removePid(pid)) {
return 0;
}
context.err.println("could not find the command to kill.");
return 1;
}
});
}
public void exit(CommandContext commandContext, int exitCode, int pid) {
if (pid < 0 || !removePid(pid)) {
commandContext.stopCommand();
}
}
private boolean removePid(int pid) {
CommandContext[] contexts = null;
synchronized (currentAsyncCommands) {
for (int i = 0, n = currentAsyncCommands.size(); i < n; i++) {
CommandContext[] cntx = currentAsyncCommands.get(i);
if (pid == cntx[0].getPID()) {
contexts = cntx;
currentAsyncCommands.remove(cntx);
break;
}
}
}
return exitCommands(contexts);
}
private boolean exitCommands(CommandContext[] contexts) {
if (contexts != null) {
for (int i = 0; i < contexts.length; i++) {
contexts[i].stopCommand();
}
return true;
}
return false;
}
public void lineRead(String line) {
executeCommand(line, null);
}
}
|
<gh_stars>0
#include "Pargon/Application.h"
#include "Pargon/Files.h"
#include "Pargon/Graphics.DirectX11.h"
#include "Pargon/Audio.XAudio2.h"
#include "Pargon/Input.Win32.h"
#define NOMINMAX
#define WIN32_LEAN_AND_MEAN
#include <Windows.h>
using namespace Pargon;
class Playground : ApplicationInterface
{
public:
Application Application;
GraphicsDevice Graphics;
AudioDevice Audio;
KeyboardDevice Keyboard;
Log Log;
private:
Geometry* _quad;
Texture* _eagle;
Material* _material;
Mixer* _mixer;
Sound* _sound;
SoundInstance _instance;
Vector2 _position = {};
KeyboardState _state = {};
struct Vertex
{
float X, Y;
float U, V;
};
public:
void Run()
{
WindowSettings settings = { "Playground"_s, WindowStyle::Resizable, 100, 100, 800, 600 };
auto& info = Application.Initialize("Playground", *this);
info.WriteInformation(Log);
info.WriteCapabilities(Log);
Application.Run(settings);
}
protected:
void Setup() override
{
auto input = CreateWin32KeyboardInput();
Keyboard.Setup(Application, std::move(input));
auto renderer = CreateDirectX11Renderer();
auto& info = Graphics.Setup(Application, std::move(renderer));
info.WriteInformation(Log);
info.WriteCapabilities(Log);
auto soundFile = ApplicationDirectory().GetFile("Assets/Ambience.wav");
auto eagleFile = ApplicationDirectory().GetFile("Assets/Eagle.png");
_eagle = Graphics.CreateTexture(GraphicsStorage::TransferredToGpu);
_eagle->Reset(eagleFile);
_eagle->Unlock();
_quad = Graphics.CreateGeometry(GraphicsStorage::TransferredToGpu);
_quad->Unlock();
_material = Graphics.CreateMaterial(GraphicsStorage::TransferredToGpu);
_material->Reset("Assets/Material.hlsl");
_material->SampleOptions.Add(BilinearSampling);
_material->VertexLayout.Add({ ShaderElementType::Vector2, ShaderElementUsage::Position });
_material->VertexLayout.Add({ ShaderElementType::Vector2, ShaderElementUsage::Coordinate });
_material->Output.Add({ DisabledBlending, DisabledBlending });
_material->Unlock();
auto player = CreateXAudio2Player();
Audio.Setup(std::move(player));
_mixer = Audio.CreateMixer(2);
_sound = Audio.CreateSound(*_mixer);
auto soundData = soundFile.ReadData();
_sound->SetData(std::move(soundData.Data));
_instance = _sound->Play(nullptr);
}
void Process(Time elapsed) override
{
Keyboard.Update(_state);
auto speed = 0.5f * (float)elapsed.InSeconds();
if (_state.IsKeyDown(Key::Left)) _position.X -= speed;
if (_state.IsKeyDown(Key::Right)) _position.X += speed;
if (_state.IsKeyDown(Key::Down)) _position.Y -= speed;
if (_state.IsKeyDown(Key::Up)) _position.Y += speed;
_quad->Lock();
_quad->Reset<Vertex>(GeometryTopology::TriangleStrip, {
Vertex{ _position.X - 0.1f, _position.Y - 0.1f, 0.0f, 1.0f },
Vertex{ _position.X - 0.1f, _position.Y + 0.1f, 0.0f, 0.0f },
Vertex{ _position.X + 0.1f, _position.Y - 0.1f, 1.0f, 1.0f },
Vertex{ _position.X + 0.1f, _position.Y + 0.1f, 1.0f, 0.0f }
});
_quad->Unlock();
Graphics.SetColorTarget({}, 0);
Graphics.SetDepthStencilTarget({});
Graphics.ClearColorTarget(0.5f, 0.25f, 0.5f, 1.0f);
Graphics.SetTexture(_eagle->Id(), 0);
Graphics.SetVertexBuffer(_quad->Id(), sizeof(Vertex));
Graphics.SetMaterial(_material->Id());
Graphics.Draw(0, Graphics.DrawAll);
Graphics.Render(GraphicsDevice::NoSynchronization);
}
void Shutdown() override
{
_instance.Stop();
Audio.DestroySound(*_sound);
Audio.DestroyMixer(*_mixer);
Graphics.DestroyMaterial(_material->Id());
Graphics.DestroyGeometry(_quad->Id());
Graphics.DestroyTexture(_eagle->Id());
}
};
auto CALLBACK WinMain(_In_ HINSTANCE hInstance, _In_opt_ HINSTANCE hPrevInstance, _In_ LPSTR lpCmdLine, _In_ int nCmdShow) -> int
{
Playground playground;
playground.Run();
return 0;
}
|
import { Injectable } from '@nestjs/common';
import { CreateUserDto } from 'src/users/users.dto';
import { LoginDto } from './auth.dto';
@Injectable()
export class AuthService {
signIn(user: CreateUserDto) {
return 'signIn';
}
login(user: LoginDto) {
return `login`;
}
logout() {
return `logout`;
}
}
|
/**
* Method 'markAsRead()' should complete successfully if response code is
* 205.
* @throws Exception Thrown in case of error.
*/
@Test
public void markAsReadOkIfResponseStatusIs205() throws Exception {
MkContainer container = null;
try {
container = new MkGrizzlyContainer().next(
new MkAnswer.Simple(HttpURLConnection.HTTP_RESET)
).start(this.resource.port());
final Request req = new ApacheRequest(container.home());
final Github github = Mockito.mock(Github.class);
Mockito.when(github.entry()).thenReturn(req);
new RtUser(
github,
req
).markAsRead(new Date());
} finally {
container.close();
}
} |
<gh_stars>0
package org.infinispan.objectfilter.impl.hql;
import java.util.LinkedList;
import java.util.List;
/**
* @author <EMAIL>
* @since 7.0
*/
public final class FilterEmbeddedEntityTypeDescriptor implements FilterTypeDescriptor {
private final String entityType;
private final List<String> propertyPath;
private final ObjectPropertyHelper propertyHelper;
/**
* Creates a new {@link FilterEmbeddedEntityTypeDescriptor}.
*
* @param entityType the entity into which this entity is embedded
* @param path the property path from the embedding entity to this entity
* @param propertyHelper a helper for dealing with properties
*/
public FilterEmbeddedEntityTypeDescriptor(String entityType, List<String> path, ObjectPropertyHelper propertyHelper) {
this.entityType = entityType;
this.propertyPath = path;
this.propertyHelper = propertyHelper;
}
@Override
public boolean hasProperty(String propertyName) {
List<String> newPath = new LinkedList<String>(propertyPath);
newPath.add(propertyName);
return propertyHelper.hasProperty(entityType, newPath);
}
@Override
public boolean hasEmbeddedProperty(String propertyName) {
List<String> newPath = new LinkedList<String>(propertyPath);
newPath.add(propertyName);
return propertyHelper.hasEmbeddedProperty(entityType, newPath);
}
@Override
public String getEntityType() {
return entityType;
}
@Override
public String toString() {
return propertyPath.toString();
}
}
|
/*
* Open Source Physics software is free software as described near the bottom of this code file.
*
* For additional information and documentation on Open Source Physics please see:
* <http://www.opensourcephysics.org/>
*/
package org.opensourcephysics.media.core;
import java.awt.event.MouseEvent;
import java.awt.geom.Point2D;
import org.opensourcephysics.display.DrawingPanel;
import org.opensourcephysics.display.axes.CartesianCoordinateStringBuilder;
/**
* A coordinate string builder for a video panel.
*/
public class VidCartesianCoordinateStringBuilder extends CartesianCoordinateStringBuilder {
protected VidCartesianCoordinateStringBuilder() {
super();
}
protected VidCartesianCoordinateStringBuilder(String xLabel, String yLabel) {
super(xLabel, yLabel);
}
/**
* Converts the pixel coordinates in a mouse event into world coordinates and
* return these coordinates in a string.
*
* @param panel the drawing panel
* @param e the mouse event
* @return the coordinate string
*/
public String getCoordinateString(DrawingPanel panel, MouseEvent e) {
if(!(panel instanceof VideoPanel)) {
return super.getCoordinateString(panel, e);
}
VideoPanel vidPanel = (VideoPanel) panel;
Point2D pt = vidPanel.getWorldMousePoint();
return getCoordinateString(pt.getX(), pt.getY());
}
/**
* Returns the specified xy coordinates in a string.
*
* @param x the x
* @param y the y
* @return the coordinate string
*/
public String getCoordinateString(double x, double y) {
String msg;
if((Math.abs(x)>100)||(Math.abs(x)<0.01)||(Math.abs(y)>100)||(Math.abs(y)<0.01)) {
msg = xLabel+scientificFormat.format((float) x)+yLabel+scientificFormat.format((float) y);
} else {
msg = xLabel+decimalFormat.format((float) x)+yLabel+decimalFormat.format((float) y);
}
return msg;
}
}
/*
* Open Source Physics software is free software; you can redistribute
* it and/or modify it under the terms of the GNU General Public License (GPL) as
* published by the Free Software Foundation; either version 2 of the License,
* or(at your option) any later version.
* Code that uses any portion of the code in the org.opensourcephysics package
* or any subpackage (subdirectory) of this package must must also be be released
* under the GNU GPL license.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston MA 02111-1307 USA
* or view the license online at http://www.gnu.org/copyleft/gpl.html
*
* Copyright (c) 2007 The Open Source Physics project
* http://www.opensourcephysics.org
*/
|
Infrared Excess and Molecular Clouds: A Comparison of New Surveys of Far-Infrared and H I 21 Centimeter Emission at High Galactic Latitudes
We have created a map of the large-scale infrared surface brightness in excess of that associated with the atomic interstellar medium, using region-by-region correlations between the far-infrared and 21 cm line surface brightness. Our study updates and extends a previous attempt with the Infrared Astronomical Satellite and Berkeley/Parkes H I surveys; in this study we used far-infrared (60-240 μm) data from the Cosmic Background Explorer Diffuse Infrared Background Experiment and 21 cm data from the combined Leiden-Dwingeloo and Parkes 21 cm line surveys. Using the maps of excess infrared emission at 100, 140, and 240 μm, we created an atlas and identified the coherent structures. These infrared excess clouds can be caused both by dust that is warmer than average or by dust associated with gas other than the atomic interstellar medium. We find very few warm clouds—which are relatively bright at 60 μm—such as the H II region around the high-latitude B-type star α Vir and a new cloud of unknown origin that we name DIR 015+54. Using the ratio of 100 to 240 μm brightness, we find that infrared excess clouds are cold. The dust temperature in atomic gas is 19 ± 2 K, while the dust temperature in known high-latitude molecular clouds (all of which have infrared excess) is 15.5 ± 1 K. The dust temperature in those infrared excess clouds that are not known to be associated with molecular clouds (generally because they have never been observed) is 17 ± 2 K, suggesting they are similar to high-latitude molecular clouds. Infrared excess clouds are peaks of column density rather than dust temperature, and their excess infrared emission is likely due to dust associated with molecular gas. For a large region in Ursa Major-Ursa Minor-Camelopardalis, where the CO(1 → 0) line has been surveyed, we correlated the infrared excess CO line integral, allowing us to measure X = N(H2)/W(CO) = (1.3 ± 0.2) × 1020 cm2 (K km s-1)-1 for high-latitude molecular clouds. Our measurement of X takes into account the low dust temperature in molecular gas; this correction amounts to a factor of 3.8 increase in the X-value that would naively be determined using only 100 μm, CO, and H I data. Our value of X is consistent with a recent γ-ray determination for the same region, while it is a factor of about 2 lower than the value determined for the inner galactic plane. The surface mass density of infrared excess clouds is 0.3 M☉ pc-2. The atlas of infrared excess clouds may be useful as a guide to regions of relatively high interstellar column density, which might extinct light from extragalactic objects at optical to ultraviolet wavelengths and confuse structures in the cosmic background at infrared to microwave wavelengths. |
package com.amyliascarlet.jsontest.bvt.bug;
import junit.framework.TestCase;
import com.amyliascarlet.lib.json.JSON;
import com.amyliascarlet.lib.json.annotation.JSONField;
import com.amyliascarlet.lib.json.serializer.SerializerFeature;
public class Bug_for_field extends TestCase {
public void test_annotation() throws Exception {
VO vo = new VO();
vo.setId(123);
String text = JSON.toJSONString(vo);
System.out.println(text);
}
public static class VO {
@JSONField(name = "ID", serialzeFeatures={SerializerFeature.WriteClassName})
private long id;
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
}
}
|
package com.codepath.simpletodo.data;
import java.io.Serializable;
/**
* Created by Karl on 9/26/2016.
*/
public class TodoListItem implements Serializable{
private String item, note;
private int priority;
private long id, dueDate;
private boolean status;
public static final int LOW = 0;
public static final int MEDIUM = 1;
public static final int HIGH = 2;
private static final long serialVersionUID = 4654897646L;
public TodoListItem(){
id = -1L;
}
public TodoListItem(long id, String item, String note, int priority, long dueDate, boolean status) {
this.id = id;
this.item = item;
this.note = note;
this.priority = priority;
this.dueDate = dueDate;
this.status = status;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getNote() {
return note;
}
public void setNote(String note) {
this.note = note;
}
public int getPriority() {
return priority;
}
public void setPriority(int priority) {
this.priority = priority;
}
public long getDueDate() {
return dueDate;
}
public void setDueDate(long dueDate) {
this.dueDate = dueDate;
}
public boolean getStatus() {
return status;
}
public void setStatus(boolean status) {
this.status = status;
}
public String getItem() {
return item;
}
public void setItem(String item) {
this.item = item;
}
}
|
<filename>Mobile App/Code/sources/com/google/android/gms/internal/C2052or.java
package com.google.android.gms.internal;
import android.os.Bundle;
import android.os.Parcel;
import com.google.android.gms.common.internal.safeparcel.SafeParcelable;
/* renamed from: com.google.android.gms.internal.or */
public class C2052or implements SafeParcelable {
public static final C2056ot CREATOR = new C2056ot();
public final long amW;
public final byte[] amX;
public final Bundle amY;
public final String tag;
public final int versionCode;
C2052or(int i, long j, String str, byte[] bArr, Bundle bundle) {
this.versionCode = i;
this.amW = j;
this.tag = str;
this.amX = bArr;
this.amY = bundle;
}
public C2052or(long j, String str, byte[] bArr, String... strArr) {
this.versionCode = 1;
this.amW = j;
this.tag = str;
this.amX = bArr;
this.amY = m6059g(strArr);
}
/* renamed from: g */
private static Bundle m6059g(String... strArr) {
if (strArr == null) {
return null;
}
if (strArr.length % 2 != 0) {
throw new IllegalArgumentException("extras must have an even number of elements");
}
int length = strArr.length / 2;
if (length == 0) {
return null;
}
Bundle bundle = new Bundle(length);
for (int i = 0; i < length; i++) {
int i2 = i * 2;
bundle.putString(strArr[i2], strArr[i2 + 1]);
}
return bundle;
}
public int describeContents() {
return 0;
}
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("tag=");
sb.append(this.tag);
sb.append(",");
sb.append("eventTime=");
sb.append(this.amW);
sb.append(",");
if (this.amY != null && !this.amY.isEmpty()) {
sb.append("keyValues=");
for (String str : this.amY.keySet()) {
sb.append("(");
sb.append(str);
sb.append(",");
sb.append(this.amY.getString(str));
sb.append(")");
sb.append(" ");
}
}
return sb.toString();
}
public void writeToParcel(Parcel parcel, int i) {
C2056ot.m6063a(this, parcel, i);
}
}
|
/**
* The type Bla class.
*/
public class BlaClass {
/**
* The type Inner.
*
* @param <T> the type parameter
*/
public class Inner<T> {
}
} |
Noise reduction techniques in pulse width modulated inductive switching systems of PSLV
The subject of the paper is the techniques employed in the Polar Satellite Launch Vehicle to reduce electromagnetic interference in control systems which have pulse width modulated inductive switching systems. There are 3 such control systems in PSLV. PWM servo amplifiers are generally preferred in control systems for better efficiency and to reduce dissipation losses. However, the PWM signal itself creates an EMI environment. Hence care must be taken in the design phase of control electronics to minimise the EMI effects of PWM. This includes radiation effects as well as interference due to switching spikes. In general, the main sources of interferences are the PWM switching signal, inductive switching of motors by PWM with heavy currents, and subsequent generation of large magnetic fields, ground loop formations between power and signal grounds and improper bunching and harnessing of cables. The paper attempts to categorise the various interference sources, and to share the experiences in PSLV programme as to how in the design phase these interferences are taken care of. The forms of interferences are studied and the methods of noise reduction techniques are illustrated in a case study of the 3rd stage control system. |
She's deadly, she's powerful, she's got six arms! It's the X-Men's Spiral and here she's been revamped in Marvel Legends style. I started with the upper half of a Target Exclusive Spiral and mounted it to a Captain Marvel lower body. She sport's Black Widow's head, different dance-pose hands, and Spiral's leg fur was hollowed out and attached to the new legs. Winter Soldier's belt was added along with the original Spiral's helmet. A modded Scarlet Witch hex became her magic effect and she can wield three different swords with the new hands. One airbrushed paint job later and Spiral was ready to capture the X-Men in the name of Mojo!
Want to own one of my customs like this one? Head on over to my *Ebay Auctions* this week to see what's available.
Looking to buy some custom action figures like the ones you see here? Look no further! You can find Marvel Legends, Transformers, GI Joe, DC Comics, video game characters, and other great customized toys in the links below. |
def to_kurdish(self, solar=False):
if solar == False:
kurdish_year = self.shamsi_date.year + 1321
ku_date = JalaliDate(
kurdish_year, self.shamsi_date.month, self.shamsi_date.day
)
elif solar == True:
kurdish_year = self.whole_year.year + 1321
ku_date = JalaliDate(
kurdish_year, self.whole_year.month, self.whole_year.day
)
return ku_date |
//calculates the nodes of the output layer. It will be used if any other activation besides softmax will be used
private void calculate_activation_nodes(double[] last_layer_vals, double[][] weights){
double sum = 0.0;
for(int row = 0; row < weights.length; row++){
for(int column = 0; column < weights[row].length; column++){
sum += weights[row][column] * last_layer_vals[column];
}
super.nodes[row] = super.activation.activate(sum + super.biases[row]);
sum = 0.0;
}
} |
Fans are invited to pick the worst episode of Arrow Season 3 in the annual GreenArrowTV Awards.
Earlier in the week, we had fans voting for the Best Episode of Arrow Season 3 as part of our annual GreenArrowTV Awards… now, it’s time to pick the worst. This is assuming that not all 23 episodes of Arrow Season 3 were perfection, so you’re stuck with picking one that… wasn’t as good as usual.
Things should get interesting this year, as we don’t have a Huntress episode to be readers’ go-to choice for worst this time. Still… a bad episode could very well cause people alarm, like Felicity and Ray have in the picture to your right.
Results for the 2015 GreenArrowTV Awards will be announced later this summer. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.