content
stringlengths 10
4.9M
|
---|
<gh_stars>10-100
import pytest
from slackclient._channel import Channel
from slackclient._server import Server
from slackclient._client import SlackClient
@pytest.fixture
def server(monkeypatch):
myserver = Server('xoxp-1234123412341234-12341234-1234', False)
return myserver
@pytest.fixture
def slackclient(server):
myslackclient = SlackClient('xoxp-1234123412341234-12341234-1234')
return myslackclient
@pytest.fixture
def channel(server):
mychannel = Channel(server, "somechannel", "C12341234", ["user"])
return mychannel
|
import java.io.*;
import java.math.*;
import java.util.*;
public class Solution {
static PrintWriter out = new PrintWriter(System.out);
public static void main(String[] args) throws Exception{
StreamTokenizer in = new StreamTokenizer(new BufferedReader(new InputStreamReader(System.in)));
int[] size = new int[5];
for (int i =0; i<5; i++) {
in.nextToken();
size[i] = (int)in.nval;
}
in.nextToken();
int kol = (int)in.nval;
String s = "";
int z;
int left, right;
for (int i = 0; i<kol; i++) {
in.nextToken();
s = in.sval;
if (s.equals("S")) z = 0;
else
if (s.equals("M")) z = 1;
else
if (s.equals("L")) z = 2;
else
if (s.equals("XL")) z = 3;
else
z = 4;
left = z; right = z;
while (true) {
if (right<5 && size[right]>0) {
size[right]--;
ans(right);
out.print("\n");
break;
}
else
if (left>-1 && size[left]>0){
size[left]--;
ans(left);
out.print("\n");
break;
}
right++;
left--;
}
}
out.flush();
}
private static void ans(int a) {
if (a == 0) out.print("S");
else
if (a == 1) out.print("M");
else
if (a == 2) out.print("L");
else
if (a == 3) out.print("XL");
else
if (a == 4) out.print("XXL");
}
}
|
/**
* Returns an iterator for the elements of BAG.
*/
@LispMethod(comment = "Returns an iterator for the elements of BAG.")
public static final SubLObject new_bag_iterator_alt(SubLObject v_bag) {
SubLTrampolineFile.checkType(v_bag, BAG_P);
return new_bag_contents_iterator(bag_struct_unique_contents(v_bag), bag_struct_repeat_contents(v_bag));
} |
<reponame>cxMiguelSilva/kics
package testcases
// E2E-CLI-013 - KICS root command list-platforms
// should return all the supported platforms in the CLI
func init() { //nolint
testSample := TestCase{
Name: "should list all supported platforms [E2E-CLI-013]",
Args: args{
Args: []cmdArgs{
[]string{"list-platforms"},
},
ExpectedOut: []string{
"E2E_CLI_013",
},
},
WantStatus: []int{0},
}
Tests = append(Tests, testSample)
}
|
<filename>src/main/java/com/restteam/ong/controllers/dto/AuthenticationResponse.java
package com.restteam.ong.controllers.dto;
import lombok.Data;
@Data
public class AuthenticationResponse{
private String jwt;
public AuthenticationResponse(String jwt) {
this.jwt = jwt;
}
public AuthenticationResponse() {
}
}
|
<filename>vk_parser/vk_parser.py
import vk_api
from private_configurations import LOGIN, PASSWORD, AUTH2CODE
class VKAuthHandler:
def __init__(self, login: str, password: str, auth2step=False):
"""
:param login: str
:param password: str
:param auth2step: bool
"""
self.vk_session = vk_api.vk_api.VkApi(login=login, password=password,
auth_handler=self._input_auth_code_ if auth2step else None)
try:
self.vk_session.auth()
except vk_api.AuthError as error_msg:
print(error_msg)
exit(1)
@staticmethod
def _input_auth_code_():
"""
:return: auth code: str, remember device: bool
"""
return str(input("Enter authentication code: ")), True
class VKParserHandler:
def __init__(self, vk_session: vk_api.vk_api.VkApi):
"""
:param vk_session: vk_api.vk_api.VkApi
"""
self.vk_session = vk_session
def parse_user_friends(self, ids: list):
"""
:param ids:
:return:
"""
### OLD ###
# friends = {}
# with vk_api.VkRequestsPool(vk_session) as pool:
# for user_id in ids:
# friends[user_id] = pool.method('friends.get', {
# 'user_id': user_id,
# 'fields': 'photo'
# })
# for key in friends.keys():
# if not friends[key].ok:
# # del friends[key]
# print(friends[key].error)
# else:
# print(friends[key].result)
### NEW ###
friends, errors = vk_api.vk_request_one_param_pool(
self.vk_session,
'friends.get', # Метод
key='user_id', # Изменяющийся параметр
values=ids,
# Параметры, которые будут в каждом запросе
default_values={'fields': 'photo'}
)
return friends, errors
def parse_user_pages(self, ids: list):
"""
Parsing main info from user pages.
:param ids: List of user ids.
:return: List with dictionaries contained user info.
Format: (list) [ (dict) {...}, ...]
"""
def parse_user_pages_1000_ids(ids1000: list):
assert len(ids1000) <= 1000, ValueError("Len `ids1000` should be <= 1000")
vk = self.vk_session.get_api()
return vk.users.get(user_ids=ids1000,
fields=['blacklisted', 'deactivated', 'is_closed', 'can_access_closed', 'sex', 'bdate',
'country', 'has_photo', 'last_seen', 'photo_max_orig', 'contacts'])
res = []
split_ids = [ids[i:i + 1000] for i in range(0, len(ids), 1000)]
for batch in split_ids:
res += parse_user_pages_1000_ids(batch)
return res
def parse_user_photos(self, ids: list):
"""
:param ids: List of user ids.
:return: Dictionary with albums.
Format: (dict) { (int) user_id: ( (class) album `wall`, (class) album `profile`)}
"""
def parse_album(user_id: int, album: str):
return pool.method('photos.get', {
'owner_id': user_id,
'rev': 1,
'album_id': album
})
with vk_api.VkRequestsPool(self.vk_session) as pool:
res = {user_id: (parse_album(user_id, 'wall'), parse_album(user_id, 'profile')) for user_id in ids}
return res
def parse_user_ids_groups(self, ids: list):
"""
:param ids:
:return:
"""
pass
class ParsingDataHandler(VKParserHandler):
def __init__(self, vk_session: vk_api.vk_api.VkApi):
super().__init__(vk_session)
pass
def _check_last_time_(self):
pass
def parse_ids(self, ids: list):
valid_ids = []
users_dict = {}
# parse main info from user page
users_info = self.parse_user_pages(ids)
for user_info, user_id in zip(users_info, ids):
print(user_info)
if not 'deactivated' in user_info and user_info['can_access_closed'] and not user_info['blacklisted']:
valid_ids += [user_id]
users_dict[user_id] = {'first_name': user_info['first_name'] if 'first_name' in user_info else None,
'last_name': user_info['last_name'] if 'last_name' in user_info else None,
'sex': user_info['sex'] if 'sex' in user_info else None,
'bdate': user_info['bdate'] if 'bdate' in user_info else None,
'country': user_info['country']['id'] if 'country' in user_info else None,
'images': []}
if user_info['has_photo']:
users_dict[user_id]['images'] += [user_info['photo_max_orig']]
# parse albums with images from user page
users_images = self.parse_user_photos(valid_ids)
for user_id in users_images:
for album_id in [0, 1]:
if users_images[user_id][album_id].ok and users_images[user_id][album_id].result['count'] > 0:
users_dict[user_id]['images'] += [item['sizes'][-1]['url']
for item in users_images[user_id][album_id].result['items']]
return users_dict
if __name__ == "__main__":
vk_session = VKAuthHandler(LOGIN, PASSWORD, bool(AUTH2CODE)).vk_session
vk_parser = VKParserHandler(vk_session)
# ids = [2, 293990229, 170737642]
# print(vk_parser.parse_user_friends(ids))
# print(vk_parser.parse_user_pages(ids))
# print(vk_parser.parse_user_photos(ids))
pdh = ParsingDataHandler(vk_session)
res = pdh.parse_ids([i for i in range(1, 100)] + [170737642, 293990229])
# res = pdh.parse_user_photos([1, 2, 170737642, 293990229])
# res = pdh.parse_user_photos([293990229])[293990229][0].result['items'][1]['sizes'][-1]['url']
print(res)
print(help(pdh.parse_user_photos))
|
Giants Agree to Minor-League Deal With Kensuke Tanaka by Eric He
Breaking Down the Grizzlies’ Mauling of the Warriors
Breaking Down the Grizzlies’ Mauling of the Warriors by Michael Springer
In an under-the-radar move, the A’s acquired right-handed relief pitcher Chris Resop from the Pittsburgh Pirates this off-season in exchange for minor-leaguer Zach Thornton.
Resop and the A’s avoided arbitration today with a one-year, $1.35 million deal that was first reported by MLB Trade Rumors.
Drafted by the Marlins in the fourth round in 2004, Resop has bounced around in the league quite a bit, playing for four different teams in eight seasons.
Last season, Resop was 1-4 with a 3.91 ERA in 73,2 innings pitched with the Pirates. Over his career, he owns a 9-11 record with a 4.51 ERA.
Bottom line, Resop will be just another addition to a pitching staff that is rock-steady and should be one of the best in baseball. |
s=input()
vowel=['a','e','o','u','y','i']
strings=s.lower()
new_strings=[]
for i in range(len(strings)):
if strings[i] not in vowel:
new_strings.append(strings[i])
string='.'.join(new_strings)
print('.',string,sep='')
|
/**
* Called by <code>FlxG.drawPlugins()</code> after the game state has been drawn.
* Cycles through cameras and calls <code>drawDebug()</code> on each one.
*/
@Override
public void draw()
{
FlxCamera camera = FlxG.getActiveCamera();
if (cameras == null)
cameras = FlxG.cameras;
if (!cameras.contains(camera, true))
return;
if(FlxG.visualDebug && !ignoreDrawDebug)
drawDebug(camera);
} |
A panel discussion on Wednesday night’s episode of the Republican talk show “Hannity” ended with everyone agreeing that President Barack Obama must have used the Internal Revenue Service to steal the 2012 election, keeping tea partiers so busy with filling out documents to obtain tax exempt status that they were unable to register enough new voters to put Mitt Romney over the top.
Bill Cunningham, host of the CW’s daytime talk program “The Bill Cunningham Show,” articulated this theory, and Sean Hannity appeared to agree. Cunningham also went on a conspiracy monologue to rival even Alex Jones, somehow even managing to mash-in talk of the attack on State Department personnel in Benghazi, Libya.
“They said, ‘Let’s use Chicago-style politics and destroy the tea party,'” he insisted. “It was done with the media and it was done by individuals like the IRS. There was offices in Ohio — they were scared to death about losing Ohio — there was dozens and dozens of tea party groups in Ohio. So what they said is, ‘We’re going to destroy the tea party by keeping them tamped down with their political activities.'”
He went on: “So instead of going out to register new voters, the tea party, what were they doing? Responding to the IRS with page after page. And so the 4 million fewer voters who voted for Romney who didn’t voted for McCain, 4 million less, it’s because they attacked the tea party and took away their ability to organize. Just like Benghazi. Committing to re-elect the president, not about national security but about Obama’s re-election security, the IRS was all about electing Obama.”
He then turned to fellow panelist Joe Trippi, a Democratic strategist, and began shouting at him over comments made by Fox News contributor Bob Beckel, one of the network’s token Democrats from the Carter administration. “Joe Trippi! I’m watchin’ your buddy Bob Beckel today on ‘The Five.’ Even Bob Beckel said, ‘You know what? He lied. It wasn’t about national security, it was about his re-election security.’ And Beckel said it was a coverup. Trippi, do you join Beckel in calling this a damnable coverup? Yes or no?”
Trippi said he did not think what happened in Benghazi is being covered up, but demurred to his conservative counterparts and suggested that many Democrats will side with Republicans on wanting the administration to be more transparent about spying on The Associated Press and using the IRS to selectively target tea party groups if that’s what happened.
Seeming to reinforce Cunningham’s theory, Hannity chimed in: “Joe Trippi, I got a question for you. Joe Trippi, when you look at Benghazi, IRS and the AP reporter scandal, there is one common thread. Do you know what it is?”
“No Sean, but you’re going to educate me,” Trippi replied.
“Re-elect the president, that’s number one!” Cunningham said. “And abuse the power to re-elect the president.”
So far, there’s no evidence that the White House or President Barack Obama were involved in the IRS scrutinizing overtly political groups applying for tax-exempt status. Since the IRS voluntarily disclosed this information, progressive groups have come forward as well saying they received questionnaires from the nation’s taxing authority after filing for 501(c)(4) status, which enables groups to completely conceal the sources of their funding under the Supreme Court’s ruling in Citizens United.
Attorney General Eric Holder said Wednesday that there will be a broad investigation into whether the IRS unfairly targeted conservative groups. He added that he was not personally involved with spying on The Associated Press, and that Deputy Attorney General James Cole ultimately signed the subpoena for reporters’ records. Holder also specified that the investigation which netted AP communications started after one of “the top two or three most serious leaks I have ever seen,” that “put the American people at risk.”
This video is from “Hannity,” aired Wednesday, May 15, 2013, snipped by Mediaite. |
// READ (List of Customers)
public List<PlaneModel> read(String planeManufacturerId) throws SQLException {
String sqlQuery = "select t01.id"
+ " , t01.label"
+ " , t02.id as plane_manufacturer_id"
+ " , t02.label as plane_manufacturer_label"
+ " from plane_models t01, plane_manufacturers t02"
+ " where t02.id = t01.plane_manufacturer_id"
;
if (planeManufacturerId != null) {
sqlQuery += String.format(" and t02.id = %s", planeManufacturerId);
sqlQuery += " order by t01.id asc;";
}
else {
sqlQuery += " order by t02.id asc, t01.id asc;";
}
try {
return new PlaneModelRowMapper().mapRows(this.jdbcTemplate.queryForList(sqlQuery));
} catch (Exception e) {
e.printStackTrace();
}
return null;
} |
// TestEstablish sets up a basic BGP connection and confirms that traffic is forwarded according to
// it.
func TestEstablish(t *testing.T) {
t.Logf("Start DUT config load:")
dut := ondatra.DUT(t, "dut")
t.Logf("Start DUT interface Config")
configureDUT(t, dut)
t.Logf("Start DUT BGP Config")
dutConfPath := dut.Config().NetworkInstance("default").Protocol(telemetry.PolicyTypes_INSTALL_PROTOCOL_TYPE_BGP, "BGP").Bgp()
fptest.LogYgot(t, "DUT BGP Config before", dutConfPath, dutConfPath.Get(t))
dutConfPath.Replace(t, nil)
dutConf := bgpCreateNbr(dutAS, ateAS, defaultPolicy)
dutConfPath.Replace(t, dutConf)
t.Logf("Start ATE Config")
ate := ondatra.ATE(t, "ate")
allFlows := configureATE(t, ate)
t.Logf("Verifying port status")
verifyPortsUp(t, dut.Device)
t.Logf("Check BGP parameters")
verifyBgpTelemetry(t, dut)
sendTraffic(t, ate, allFlows)
verifyTraffic(t, ate, allFlows, false)
verifyPrefixesTelemetry(t, dut, routeCount, routeCount, 0)
verifyPrefixesTelemetryV6(t, dut, routeCount, routeCount, 0)
t.Run("RoutesWithdrawn", func(t *testing.T) {
t.Log("Breaking BGP config and confirming that forwarding stops working.")
dutConfPath.Replace(t, bgpCreateNbr(dutAS, badAS, defaultPolicy))
sendTraffic(t, ate, allFlows)
verifyTraffic(t, ate, allFlows, true)
})
} |
<gh_stars>10-100
//---------------------------------------------------------------------------
//
// This file is the copyrighted property of Tableau Software and is protected
// by registered patents and other applicable U.S. and international laws and
// regulations.
//
// You may adapt this file and modify it to fit into your context and use it
// as a template to start your own projects.
//
//---------------------------------------------------------------------------
package examples;
import com.tableau.hyperapi.Catalog;
import com.tableau.hyperapi.Connection;
import com.tableau.hyperapi.CreateMode;
import com.tableau.hyperapi.HyperProcess;
import com.tableau.hyperapi.Inserter;
import com.tableau.hyperapi.TableName;
import com.tableau.hyperapi.SqlType;
import com.tableau.hyperapi.TableDefinition;
import com.tableau.hyperapi.Telemetry;
import java.nio.file.Path;
import java.nio.file.Paths;
import static com.tableau.hyperapi.Nullability.NOT_NULLABLE;
import static com.tableau.hyperapi.Nullability.NULLABLE;
/**
* An example of how to create and insert data into a multi-table Hyper file where tables have different types
*/
public class InsertDataIntoMultipleTables {
// Table Definitions required to create tables
/**
* The orders table
*/
private static TableDefinition ORDERS_TABLE = new TableDefinition(
// Since the table name is not prefixed with an explicit schema name, the table will reside in the default "public" namespace.
new TableName("Orders"))
.addColumn("Address ID", SqlType.smallInt(), NOT_NULLABLE)
.addColumn("Customer ID", SqlType.text(), NOT_NULLABLE)
.addColumn("Order Date", SqlType.date(), NOT_NULLABLE)
.addColumn("Order ID", SqlType.text(), NOT_NULLABLE)
.addColumn("Ship Date", SqlType.date(), NULLABLE)
.addColumn("Ship Mode", SqlType.text(), NULLABLE);
/**
* The customer table
*/
private static TableDefinition CUSTOMER_TABLE = new TableDefinition(
// Since the table name is not prefixed with an explicit schema name, the table will reside in the default "public" namespace.
new TableName("Customer"))
.addColumn("Customer ID", SqlType.text(), NOT_NULLABLE)
.addColumn("Customer Name", SqlType.text(), NOT_NULLABLE)
.addColumn("Loyalty Reward Points", SqlType.bigInt(), NOT_NULLABLE)
.addColumn("Segment", SqlType.text(), NOT_NULLABLE);
/**
* The products table
*/
private static TableDefinition PRODUCTS_TABLE = new TableDefinition(
// Since the table name is not prefixed with an explicit schema name, the table will reside in the default "public" namespace.
new TableName("Products"))
.addColumn("Category", SqlType.text(), NOT_NULLABLE)
.addColumn("Product ID", SqlType.text(), NOT_NULLABLE)
.addColumn("Product Name", SqlType.text(), NOT_NULLABLE)
.addColumn("Sub-Category", SqlType.text(), NOT_NULLABLE);
/**
* The line items table
*/
private static TableDefinition LINE_ITEMS_TABLE = new TableDefinition(
// Since the table name is not prefixed with an explicit schema name, the table will reside in the default "public" namespace.
new TableName("Line Items"))
.addColumn("Line Item ID", SqlType.bigInt(), NOT_NULLABLE)
.addColumn("Order ID", SqlType.text(), NOT_NULLABLE)
.addColumn("Product ID", SqlType.text(), NOT_NULLABLE)
.addColumn("Sales", SqlType.doublePrecision(), NOT_NULLABLE)
.addColumn("Quantity", SqlType.smallInt(), NOT_NULLABLE)
.addColumn("Discount", SqlType.doublePrecision(), NULLABLE)
.addColumn("Profit", SqlType.doublePrecision(), NOT_NULLABLE);
/**
* The main function
*
* @param args The args
*/
public static void main(String[] args) {
System.out.println("EXAMPLE - Insert data into multiple tables within a new Hyper file\n");
Path superstoreDatabasePath = Paths.get("customers.hyper");
// Starts the Hyper Process with telemetry enabled to send data to Tableau.
// To opt out, simply set telemetry=Telemetry.DO_NOT_SEND_USAGE_DATA_TO_TABLEAU.
try (HyperProcess process = new HyperProcess(Telemetry.SEND_USAGE_DATA_TO_TABLEAU)) {
// Creates new Hyper file "superstore.hyper"
// Replaces file with CreateMode.CREATE_AND_REPLACE if it already exists
try (Connection connection = new Connection(process.getEndpoint(),
superstoreDatabasePath.toString(),
CreateMode.CREATE_AND_REPLACE)) {
Catalog catalog = connection.getCatalog();
catalog.createTable(ORDERS_TABLE);
catalog.createTable(CUSTOMER_TABLE);
catalog.createTable(PRODUCTS_TABLE);
catalog.createTable(LINE_ITEMS_TABLE);
// Insert data into Orders table
try (Inserter inserter = new Inserter(connection, ORDERS_TABLE)) {
inserter.add((short) 399).add("DK-13375")
.addDate(2012, 9, 7).add("CA-2011-100006")
.addDate(2012, 9, 13).add("Standard Class").endRow();
inserter.add((short) 530).add("EB-13705")
.addDate(2012, 7, 8).add("CA-2011-100090")
.addDate(2012, 7, 12).add("Standard Class").endRow();
inserter.execute();
}
// Insert data into Customer table
try (Inserter inserter = new Inserter(connection, CUSTOMER_TABLE)) {
inserter.add("DK-13375").add("<NAME>").add(518)
.add("Consumer").endRow();
inserter.add("EB-13705").add("<NAME>").add(815)
.add("Corporate").endRow();
inserter.execute();
}
// Insert data into Products table
try (Inserter inserter = new Inserter(connection, PRODUCTS_TABLE)) {
inserter.add("TEC-PH-10002075").add("Technology").add("Phones")
.add("AT&T EL51110 DECT").endRow();
inserter.execute();
}
// Insert data into Line Items table
try (Inserter inserter = new Inserter(connection, LINE_ITEMS_TABLE)) {
inserter.add(2718).add("CA-2011-100006").add("TEC-PH-10002075")
.add(377.97).add((short) 3).add(0)
.add(109.6113).endRow();
inserter.add(2719).add("CA-2011-100090").add("TEC-PH-10002075")
.add(377.97).add((short) 3).add(Double.NaN)
.add(109.6113).endRow();
inserter.execute();
}
TableDefinition[] tables = new TableDefinition[]{ORDERS_TABLE, CUSTOMER_TABLE, PRODUCTS_TABLE, LINE_ITEMS_TABLE};
for (TableDefinition table : tables) {
// executeScalarQuery is for executing a query that returns exactly one row with one column
long countInTable = connection.<Long>executeScalarQuery("SELECT COUNT(*) FROM " + table.getTableName()).get();
System.out.println("The number of rows in table " + table.getTableName() + " is " + countInTable);
}
System.out.println();
}
System.out.println("The connection to the Hyper file has been closed");
}
System.out.println("The Hyper process has been shut down");
}
}
|
Firstly, thank you all *so much* for your support. It means a great deal to me that you're willing to support the development and maintenance of the CKAN, and I want to be able to return that kindness by keeping you informed on my progress. April was the first month of my Patreon campaign, and marked my first dedicated CKAN code sprint. The sprint was a huge success, allowing us to greatly improve how the CKAN client downloads mods on both Mac and Linux systems. However, the last month has also shown a great increase in the number of indexed mods. In the month of April, we updated the netkan auto-indexer files for 142 mods, and indexed over 440 new mod releases. This has been in no small part due to Daz's outstanding efforts, who has gone above and beyond in not only ensuring our metadata remains up to date, but also in assisting others to do so the same, especially after the 1.0 release of KSP. If you see Daz on IRC or the forums, make sure to thank him for his efforts, he continues to be a true CKAN hero. In May I'll be concentrating on improving our software development processes. The CKAN project has always had very high quality standards, with mandatory code review, and hundreds of automated tests that run over our codebase. However any successful project will experience growing pains, especially one that took off as rapidly as the CKAN. By improving our build, test, and release processes we can focus our future attention on fixing bugs, adding oft-requested features, and improving the overall user experience. I'd also like to improve our user support framework, making it easier for our users to find answers quickly, and to improve our responsiveness when it comes to answering questions in a timely fashion. I'm in the process of evaluating a number of heavy-duty support options, and thanks to the support of Patrons like yourself we now have a budget that can be put toward support expenses. In addition, I'd like to concentrate on improving the reliability of mod downloads themselves. Many of our users have encountered situations where the site we downloading a mod from is down, or sites like github applying throttles for trying to download too many mods at once. While we may not be able to complete the work in May, I have every expectation that the CKAN client can implement automatic failovers to download mods with permissible licenses from mirror sites should the primary sites be unavailable. Finally, thank you for reading along and for your continuing support. These monthly CKAN reports will be public. If you're not already a supporter, you can still subscribe to these public updates on Patreon (use the follow-link in the top-right of https://www.patreon.com/pjf0 ). If you are a supporter, then thank you again for your contributions; you're the reason why I'm able to put the CKAN on my work schedule. Many, many thanks again ~ Paul --- April at a glance: * Mod releases indexed: 442 * New and updated mods handled by our netkan auto-indexer: 142 * New code commits (excluding merges): 53 * New CKAN client releases: 5 * Patron funding from code sprints: $124 * Bitcoin contributions: $64 |
def _plot_one_example(
orig_radar_matrix, translated_radar_matrix, rotated_radar_matrix,
noised_radar_matrix, output_dir_name, full_storm_id_string,
storm_time_unix_sec):
dummy_heights_m_agl = numpy.array([1000, 2000, 3000, 4000], dtype=int)
concat_radar_matrix = numpy.concatenate((
orig_radar_matrix, translated_radar_matrix, rotated_radar_matrix,
noised_radar_matrix
), axis=-2)
training_option_dict = {
trainval_io.SOUNDING_FIELDS_KEY: None,
trainval_io.RADAR_FIELDS_KEY: [RADAR_FIELD_NAME],
trainval_io.RADAR_HEIGHTS_KEY: dummy_heights_m_agl
}
model_metadata_dict = {cnn.TRAINING_OPTION_DICT_KEY: training_option_dict}
handle_dict = plot_examples.plot_one_example(
list_of_predictor_matrices=[concat_radar_matrix],
model_metadata_dict=model_metadata_dict,
pmm_flag=True, plot_sounding=False, allow_whitespace=True,
plot_panel_names=False, add_titles=False, label_colour_bars=True,
num_panel_rows=2)
figure_object = handle_dict[plot_examples.RADAR_FIGURES_KEY][0]
axes_object_matrix = handle_dict[plot_examples.RADAR_AXES_KEY][0]
axes_object_matrix[0, 0].set_title('(a) Original', fontsize=TITLE_FONT_SIZE)
axes_object_matrix[0, 1].set_title(
'(b) Translated', fontsize=TITLE_FONT_SIZE
)
axes_object_matrix[1, 0].set_title(
r'(c) Rotated 30$^{\circ}$ clockwise', fontsize=TITLE_FONT_SIZE
)
axes_object_matrix[1, 1].set_title('(d) Noised', fontsize=TITLE_FONT_SIZE)
output_file_name = '{0:s}/storm={1:s}_time={2:s}.jpg'.format(
output_dir_name, full_storm_id_string.replace('_', '-'),
time_conversion.unix_sec_to_string(
storm_time_unix_sec, FILE_NAME_TIME_FORMAT)
)
print('Saving figure to: "{0:s}"...'.format(output_file_name))
figure_object.savefig(
output_file_name, dpi=FIGURE_RESOLUTION_DPI,
pad_inches=0, bbox_inches='tight'
)
pyplot.close(figure_object) |
<reponame>Thanduriel/StableNN<gh_stars>0
#pragma once
#include <functional>
#include <torch/torch.h>
#include "hyperparam.hpp"
namespace nn {
// network output, target, data
using LossFn = std::function<torch::Tensor(const torch::Tensor&, const torch::Tensor&, const torch::Tensor&)>;
torch::Tensor lpLoss(const torch::Tensor& input, const torch::Tensor& target, c10::Scalar p);
torch::Tensor energyLoss(const torch::Tensor& netInput, const torch::Tensor& netOutput);
// @param _train loss function for training which includes regularization terms
LossFn makeLossFunction(const HyperParams& _params, bool _train = true);
} |
<reponame>NyBatis/NyBatisCore
package org.nybatis.core.file.handler;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectWriter;
import org.nybatis.core.exception.unchecked.UncheckedIOException;
import org.nybatis.core.exception.unchecked.JsonIOException;
import org.nybatis.core.file.FileUtil;
import org.nybatis.core.model.NList;
import org.nybatis.core.model.NMap;
import org.nybatis.core.reflection.mapper.NObjectExcelMapper;
import org.nybatis.core.util.Types;
import org.nybatis.core.validation.Validator;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* Abstract Excel Writer
*
* @author <EMAIL>
*/
public abstract class ExcelHandler {
private static final String DEFAULT_SHEET_NAME = "Sheet1";
private static NObjectExcelMapper excelMapper = new NObjectExcelMapper();
/**
* Write excel data to output stream
*
* @param outputStream output stream to write data
* @param sheetName sheet name of excel file to write
* @param data grid data
* @throws UncheckedIOException File I/O Exception
*/
public void writeTo( OutputStream outputStream, String sheetName, NList data ) throws UncheckedIOException {
writeTo( outputStream, sheetName, data, true );
}
/**
* Write data to excelFile
*
* @param outputStream output stream to write data
* @param sheetName sheet name of excel file to write
* @param data grid data
* @throws UncheckedIOException File I/O Exception
*/
public void writeTo( OutputStream outputStream, String sheetName, List<?> data ) throws UncheckedIOException {
writeTo( outputStream, sheetName, toExcelNListFromBean(data) );
}
/**
* Write excel data to output stream in sheet named 'Sheet1'
*
* @param outputStream output stream to write data
* @param data grid data
* @throws UncheckedIOException file I/O exception
*/
public void writeTo( OutputStream outputStream, NList data ) throws UncheckedIOException {
writeTo( outputStream, DEFAULT_SHEET_NAME, data, true );
}
/**
* Write excel data to output stream in sheet named 'Sheet1'
*
* @param outputStream output stream to write data
* @param data grid data
* @throws UncheckedIOException file I/O exception
*/
public void writeTo( OutputStream outputStream, List<?> data ) throws UncheckedIOException {
writeTo( outputStream, DEFAULT_SHEET_NAME, toExcelNListFromBean(data), true );
}
private void writeTo( OutputStream outputStream, String sheetName, NList data, boolean isXlsx ) throws UncheckedIOException {
if( outputStream == null ) return;
Map<String, NList> worksheets = new HashMap<>();
worksheets.put( sheetName, data );
writeNListTo( outputStream, worksheets, isXlsx );
}
/**
* Write excel data to output stream
*
* @param outputStream output stream to write data
* @param data key is sheetName and value is grid data.<br>
* value type is allowed only List or NList.
* @throws UncheckedIOException file I/O exception
*/
public void writeTo( OutputStream outputStream, Map<String, ?> data ) throws UncheckedIOException {
writeNListTo( outputStream, toNList(data), true );
}
/**
*
* Write excel data to output stream
*
* @param outputStream output stream to write data
* @param data key is sheetName and value is grid data.
* @param isXlsx excel file type ( true : xlsx, false : xls )
* @throws UncheckedIOException file I/O exception
*/
protected abstract void writeNListTo( OutputStream outputStream, Map<String, NList> data, boolean isXlsx ) throws UncheckedIOException;
private boolean isXlsx( File file ) {
return "xlsx".equalsIgnoreCase( FileUtil.getExtention(file) );
}
/**
* Write data to excelFile
*
* @param excelFile excel file to write data
* @param sheetName sheet name of excel file to write
* @param data grid data
* @throws UncheckedIOException File I/O Exception
*/
public void writeTo( File excelFile, String sheetName, NList data ) throws UncheckedIOException {
write( excelFile, outputStream -> writeTo( outputStream, sheetName, data, isXlsx(excelFile) ) );
}
/**
* Write data to excelFile
*
* @param excelFile excel file to write data
* @param sheetName sheet name of excel file to write
* @param data grid data
* @throws UncheckedIOException File I/O Exception
*/
public void writeTo( File excelFile, String sheetName, List<?> data ) throws UncheckedIOException {
writeTo( excelFile, sheetName, toExcelNListFromBean(data) );
}
/**
* Write data to excel file in sheet named 'Sheet1'
*
* @param excelFile excel file to write
* @param data grid data
* @throws UncheckedIOException file I/O exception
*/
public void writeTo( File excelFile, NList data ) throws UncheckedIOException {
writeTo( excelFile, DEFAULT_SHEET_NAME, data );
}
/**
* Write data to excel file in sheet named 'Sheet1'
*
* @param excelFile excel file to write
* @param data grid data
* @throws UncheckedIOException file I/O exception
*/
public void writeTo( File excelFile, List<?> data ) throws UncheckedIOException {
writeTo( excelFile, toExcelNListFromBean(data) );
}
/**
* Write data to excel file
*
* @param excelFile excel file to write data
* @param data key is sheetName and value is grid data.<br>
* value type is allowed only List or NList.
* @throws UncheckedIOException file I/O exception
*/
public void writeTo( File excelFile, Map<String, ?> data ) throws UncheckedIOException {
write( excelFile, outputStream -> writeNListTo( outputStream, toNList( data ), isXlsx( excelFile ) ) );
}
/**
* Read sheet from excel file
*
* @param excelFile excel file to read
* @param sheetName sheet name of excel file to read
* @return grid data
* @throws UncheckedIOException File I/O Exception
*/
public NList readFrom( File excelFile, String sheetName ) throws UncheckedIOException {
return (NList) read( excelFile, inputStream -> readFrom( inputStream, sheetName ) );
}
/**
* Read sheet from excel file
*
* @param excelFile excel file to read
* @param sheetName sheet name of excel file to read
* @param toClass generic type of list's class
* @param <T> expected class of return
* @return grid data
* @throws UncheckedIOException File I/O Exception
*/
public <T> List<T> readFrom( File excelFile, String sheetName, Class<T> toClass ) throws UncheckedIOException {
NList list = readFrom( excelFile, sheetName );
return toBeanFromExcelNList( list, toClass );
}
/**
* Read first sheet from excel file
*
* @param excelFile excel file to read
* @return grid data from first sheet
* @throws UncheckedIOException file I/O exception
*/
public NList readFirstSheetFrom( File excelFile ) throws UncheckedIOException {
return (NList) read( excelFile, inputStream -> readFirstSheetFrom( inputStream ) );
}
/**
* Read first sheet from excel file
*
* @param excelFile excel file to read
* @param toClass generic type of list's class
* @param <T> expected class of return
* @return grid data
* @throws UncheckedIOException File I/O Exception
*/
public <T> List<T> readFirstSheetFrom( File excelFile, Class<T> toClass ) throws UncheckedIOException {
NList list = readFirstSheetFrom( excelFile );
return toBeanFromExcelNList( list, toClass );
}
/**
* Read all sheets from excel file
*
* @param excelFile excel file to read.
* @return key is sheetName and value is grid data.
* @throws UncheckedIOException file I/O exception
*/
public Map<String, NList> readFrom( File excelFile ) throws UncheckedIOException {
return (Map<String, NList>) read( excelFile, inputStream -> readFrom( inputStream ) );
}
/**
* Read all sheet from excel file
*
* @param excelFile excel file to read.
* @param toClass generic type of list's class
* @param <T> expected class of return
* @return grid data
* @throws UncheckedIOException File I/O Exception
*/
public <T> Map<String, List<T>> readFrom( File excelFile, Class<T> toClass ) throws UncheckedIOException {
Map<String, NList> sheets = readFrom( excelFile );
return toBeanList( sheets, toClass );
}
/**
* Read sheet from excel file
*
* @param inputStream input stream to read data
* @param sheetName sheet name of excel file to read
* @return grid data
* @throws UncheckedIOException File I/O Exception
*/
public abstract NList readFrom( InputStream inputStream, String sheetName ) throws UncheckedIOException;
/**
* Read sheet from input stream
*
* @param inputStream input stream to read data
* @param sheetName sheet name of excel file to read
* @param toClass generic type of list's class
* @param <T> expected class of return
* @return grid data
* @throws UncheckedIOException File I/O Exception
*/
public <T> List<T> readFrom( InputStream inputStream, String sheetName, Class<T> toClass ) throws UncheckedIOException {
NList list = readFrom( inputStream, sheetName );
return toBeanFromExcelNList( list, toClass );
}
/**
* Read first sheet from input stream
*
* @param inputStream input stream to read data
* @return grid data from first sheet
* @throws UncheckedIOException file I/O exception
*/
public abstract NList readFirstSheetFrom( InputStream inputStream ) throws UncheckedIOException;
/**
* Read sheet from input stream
*
* @param inputStream input stream to read data
* @param toClass generic type of list's class
* @param <T> expected class of return
* @return grid data
* @throws UncheckedIOException File I/O Exception
*/
public <T> List<T> readFirstSheetFrom( InputStream inputStream, Class<T> toClass ) throws UncheckedIOException {
NList list = readFirstSheetFrom( inputStream );
return toBeanFromExcelNList( list, toClass );
}
/**
* Read all sheets from excel file
*
* @param inputStream input stream to read data
* @return key is sheetName and value is grid data.
* @throws UncheckedIOException file I/O exception
*/
public abstract Map<String, NList> readFrom( InputStream inputStream ) throws UncheckedIOException;
/**
* Read all sheet from input stream
*
* @param inputStream input stream to read data
* @param toClass generic type of list's class
* @param <T> expected class of return
* @return grid data
* @throws UncheckedIOException File I/O Exception
*/
public <T> Map<String, List<T>> readFrom( InputStream inputStream, Class<T> toClass ) throws UncheckedIOException {
Map<String, NList> sheets = readFrom( inputStream );
return toBeanList( sheets, toClass );
}
protected String toExcelText( Object object ) {
if( object == null ) return "";
String txt = object.toString();
if( txt.length() > 32_707 ) {
txt = txt.substring( 0, 32_707 );
}
return txt;
}
private FileInputStream getInputStream( File excelFile ) {
try {
return new FileInputStream( excelFile );
} catch( FileNotFoundException e ) {
throw new UncheckedIOException( e, "Excel File to read is not found. ({})", excelFile );
}
}
private FileOutputStream getOutputStream( File excelFile ) {
try {
FileUtil.makeFile( excelFile );
return new FileOutputStream( excelFile );
} catch( FileNotFoundException e ) {
throw new UncheckedIOException( e, "ExcelFile({}) to write is not found.", excelFile );
}
}
protected NList toExcelNListFromBean( List<?> fromList ) throws JsonIOException {
NList result = new NList();
if( hasRow(fromList) ) {
ObjectWriter writer = excelMapper.writer();
try {
for( Object bean : fromList ) {
result.addRow( writer.writeValueAsString(bean) );
}
} catch( JsonProcessingException e ) {
throw new JsonIOException( e );
}
}
return result;
}
protected <T> List<T> toBeanFromExcelNList( NList fromList, Class<T> toClass ) throws JsonIOException {
List<T> list = new ArrayList<>();
if( fromList == null || fromList.size() == 0 ) return list;
for( NMap map : fromList ) {
String json = map.toJson();
try {
T bean = excelMapper.readValue( json, toClass );
list.add( bean );
} catch( IOException e ) {
throw new JsonIOException( e, "JsonParseException : {}\n\t- json string :\n{}\n\t- target class : {}", e.getMessage(), json, toClass );
}
}
return list;
}
private boolean hasRow( List<?> list ) {
return Validator.isNotEmpty( list ) && Types.isNotPrimitive( list.get(0) );
}
/**
* Convert data to NList
*
* @param data data for excel
* @return data as NList type
*/
public Map<String, NList> toNList( Map<String, ?> data ) {
Map<String, NList> sheets = new LinkedHashMap<>();
if( Validator.isNotEmpty(data) ) {
for( String sheetName : data.keySet() ) {
Object sheet = data.get( sheetName );
if( sheet == null ) continue;
if( sheet instanceof NList ) {
sheets.put( sheetName, (NList) sheet );
} else if( sheet instanceof List ) {
sheets.put( sheetName, toExcelNListFromBean( (List<?>) sheet ) );
} else if( Types.isArrayOrList( sheet ) ) {
sheets.put( sheetName, toExcelNListFromBean( Types.toList(sheet) ) );
}
}
}
return sheets;
}
/**
* Convert data to bean list
*
* @param data data for excel
* @param toClass generic type of list
* @param <T> expected class of return
* @return data as toClass generic type
*/
public <T> Map<String, List<T>> toBeanList( Map<String, NList> data, Class<T> toClass ) {
Map<String, List<T>> sheets = new LinkedHashMap<>();
if( Validator.isNotEmpty(data) ) {
for( String sheet : data.keySet() ) {
sheets.put( sheet, toBeanFromExcelNList(data.get( sheet ), toClass ) );
}
}
return sheets;
}
//----------- annonymous interface
private Object read( File excelFile, Reader reader ) {
FileInputStream inputStream = getInputStream( excelFile );
try {
return reader.read( inputStream );
} catch( UncheckedIOException e ) {
throw new UncheckedIOException( e.getCause(), "Error on reading excel file({})", excelFile );
}
}
private interface Reader {
Object read( InputStream inputStream );
}
private void write( File excelFile, Writer reader ) {
FileOutputStream outputStream = getOutputStream( excelFile );
try {
reader.write( outputStream );
} catch( UncheckedIOException e ) {
FileUtil.delete( excelFile );
throw new UncheckedIOException( e.getCause(), "Error on writing excel file({})", excelFile );
}
}
private interface Writer {
void write( OutputStream outputStream );
}
}
|
package objects_and_classes_primer.Exercises;
import java.util.Scanner;
public class Exercise12 {
private static final Scanner sc = new Scanner(System.in);
public static void main(String[] args){
System.out.print("Enter the endpoints of the first line segment: ");
double x1 = sc.nextDouble();
double y1 = sc.nextDouble();
double x2 = sc.nextDouble();
double y2 = sc.nextDouble();
System.out.println();
System.out.print("Enter the endpoints of the second line segment: ");
double x3 = sc.nextDouble();
double y3 = sc.nextDouble();
double x4 = sc.nextDouble();
double y4 = sc.nextDouble();
double a = ((y2 - y1) / (x2 - x1));
double c = ((y4 - y3) / (x4 - x3));
double e = y1 - (a * x1);
double f = y3 - (c * x3);
LinearEquation linearEquation = new LinearEquation(-a, 1.0, -c, 1.0, e, f);
if(linearEquation.isSolvable())System.out.println("The intersecting point is: (" + linearEquation.getX()
+ ", " + linearEquation.getY() + ").");
else System.out.println("The equation has no solution");
}
}
|
import * as path from "path";
import * as chai from "chai";
import * as deepEql from "deep-eql";
import { getCommands } from "../src/projectBuilder";
const inputPath = path.join(__dirname, "..", "..", "test", "inputs");
const xcmlPath = path.join(inputPath, "helloworld_Model.xcml");
const xcbuildPath = "/xcbuild";
const rootPath = "/root/HelloWorld";
const monoFacadesPath = path.join(inputPath, "other_input.txt");
const windowsPlaform = "win";
const linuxPlaform = "linux";
const expectedCommands = [
{
platform: "windows",
commands: [
`${xcbuildPath} --compilationmode=Debug --build --env=Dev --vs=VS2015 --project=${xcmlPath}`,
`${xcbuildPath} --exportRuntimes --compilationmode=Debug --env=Dev --output="${rootPath}${path.sep}XCR" --project=${xcmlPath}`,
`${xcbuildPath} --compilationmode=Debug --exportInterface --env=Dev --output="${rootPath}${path.sep}output" --project=${xcmlPath}`
]
},
{
platform: "linux",
commands: [
`mono ${xcbuildPath} --compilationmode=Debug --build --env=Dev --vs=VS2015 --project=${xcmlPath} --monoPath=“${monoFacadesPath}” --framework=Framework452 `,
`mono ${xcbuildPath} --exportRuntimes --compilationmode=Debug --env=Dev --output="${rootPath}${path.sep}XCR" --project=${xcmlPath}`,
`mono ${xcbuildPath} --compilationmode=Debug --exportInterface --env=Dev --output="${rootPath}${path.sep}output" --project=${xcmlPath}`
]
}
];
describe("projectBuilder test", () => {
it(`Given a wrongCxmlPath, getCommands should return undefined`, () => {
const wrongCxmlPath = "";
const commands = getCommands(xcbuildPath, wrongCxmlPath, rootPath, undefined, windowsPlaform);
deepEql(commands, undefined).should.equal(true);
});
expectedCommands.forEach(test => {
it(`Given a the right arguments in a ${test.platform} platform, getCommands should return the right commands`, () => {
const commands = getCommands(xcbuildPath, rootPath, xcmlPath, monoFacadesPath, test.platform);
deepEql(commands, test.commands).should.equal(true);
});
});
}); |
/**
* Better, but still hard to read. Failed assertion messages are ok.
*
* Framework hasn't been updated in a while. No longer bundled in JUnit 5 like it was in JUnit 4.
*/
@Test
void objectHasPropertyHamcrest() {
MatcherAssert.assertThat(dummyFruits,
Matchers.anyOf(Matchers.contains(
Matchers.hasProperty("name", Matchers.is("Baby Banana")),
Matchers.hasProperty("name", Matchers.is("Granny Smith Apple")),
Matchers.hasProperty("name", Matchers.is("Grapefruit")))
));
MatcherAssert.assertThat(dummyFruits,
Matchers.anyOf(Collections.singletonList(Matchers.contains(Arrays.asList(
Matchers.hasProperty("name", Matchers.is("Baby Banana")),
Matchers.hasProperty("name", Matchers.is("Granny Smith Apple")),
Matchers.hasProperty("name", Matchers.is("Grapefruit")))
))));
/*
java.lang.AssertionError:
Expected: (iterable containing [hasProperty("name", is "Baby Banana"), hasProperty("name", is "Granny Smith Apple"), hasProperty("name", is "FailedFruit")])
but: was <[1, Baby Banana, It's yellow!, 20.0, BANANA, 2, Granny Smith Apple, Delicious!, 10.5, APPLE, 3, Grapefruit, It's totally an orange, baka!, 8.5, ORANGE]>
*/
} |
<gh_stars>0
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Microsoft Bot Framework: http://botframework.com
//
// Bot Framework Emulator Github:
// https://github.com/Microsoft/BotFramwork-Emulator
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
import { BotInfo } from '@bfemulator/app-shared';
import { BotConfigWithPath } from '@bfemulator/sdk-shared';
import { BotAction, load, setActive, setDirectory, close, mockAndSetActive } from '../actions/botActions';
import { BotState, bot } from './bot';
describe('Bot reducer tests', () => {
let defaultState: BotState;
beforeEach(() => {
defaultState = {
activeBot: null,
botFiles: [],
currentBotDirectory: null,
};
});
it('should return unaltered state for non-matching action type', () => {
const emptyAction: BotAction = { type: null, payload: null };
const startingState = { ...defaultState };
const endingState = bot(defaultState, emptyAction);
expect(endingState).toEqual(startingState);
});
it('should load bot files', () => {
const bots: BotInfo[] = [
{
displayName: 'bot1',
path: 'path1',
secret: null,
},
{
displayName: 'bot2',
path: 'path2',
secret: 'test-secret',
},
{
displayName: 'bot3',
path: 'path3',
secret: null,
},
null,
];
const action = load(bots);
const state = bot(defaultState, action);
expect(state.botFiles).not.toEqual(bots);
expect(state.botFiles.length).toBe(3);
expect(state.botFiles).toEqual([
{
displayName: 'bot1',
path: 'path1',
secret: null,
},
{
displayName: 'bot2',
path: 'path2',
secret: 'test-secret',
},
{
displayName: 'bot3',
path: 'path3',
secret: null,
},
]);
});
it('should close the current active bot', () => {
const activeBot: BotConfigWithPath = {
name: 'someActiveBot',
description: 'testing',
padlock: null,
services: [],
path: 'somePath',
};
const startingState: BotState = {
...defaultState,
activeBot,
};
const action = close();
const state = bot(startingState, action);
expect(state.activeBot).toBe(null);
});
it('should set the bot directory', () => {
const action = setDirectory('some/path/to/bot/dir');
const state = bot(defaultState, action);
expect(state.currentBotDirectory).toBe('some/path/to/bot/dir');
});
it('should set a bot as active', () => {
const activeBot: BotConfigWithPath = {
name: 'someBot',
description: 'some description',
padlock: null,
services: [],
path: 'somePath',
};
expect(defaultState.activeBot).toBe(null);
const action = setActive(activeBot);
const state = bot(defaultState, action);
expect(state.activeBot).toEqual(activeBot);
expect(state.activeBot.path).toBe('somePath');
});
it('should preserve overrides from the previous bot if they have the same path', () => {
const activeBot: BotConfigWithPath = {
name: 'someActiveBot',
description: 'testing',
padlock: null,
services: [],
path: 'somePath',
overrides: {
endpoint: {
endpoint: 'someEndpointOverride',
id: 'someEndpointOverride',
appId: 'someAppIdOverride',
appPassword: '<PASSWORD>',
},
},
};
const startingState: BotState = {
...defaultState,
activeBot,
};
const newActiveBot: BotConfigWithPath = {
name: 'someBot',
description: 'some description',
padlock: null,
services: [],
path: 'somePath',
};
const action = setActive(newActiveBot);
const endingState = bot(startingState, action);
expect(endingState.activeBot.name).toBe('someBot');
expect(endingState.activeBot.overrides).toBeTruthy();
const endpointOverrides = endingState.activeBot.overrides.endpoint;
expect(endpointOverrides.endpoint).toBe('someEndpointOverride');
expect(endpointOverrides.id).toBe('someEndpointOverride');
expect(endpointOverrides.appId).toBe('someAppIdOverride');
expect(endpointOverrides.appPassword).toBe('<PASSWORD>');
});
it("should throw away overrides from the previous bot if they don't have the same path", () => {
const activeBot: BotConfigWithPath = {
name: 'someActiveBot',
description: 'testing',
padlock: null,
services: [],
path: 'somePath',
overrides: {
endpoint: {
endpoint: 'someEndpointOverride',
id: 'someEndpointOverride',
appId: 'someAppIdOverride',
appPassword: '<PASSWORD>',
},
},
};
const startingState: BotState = {
...defaultState,
activeBot,
};
const newActiveBot: BotConfigWithPath = {
name: 'someBot',
description: 'some description',
padlock: null,
services: [],
path: 'someOtherPath',
};
const action = setActive(newActiveBot);
const endingState = bot(startingState, action);
expect(endingState.activeBot.name).toBe('someBot');
expect(endingState.activeBot.overrides).toBeFalsy();
});
it('should mock a bot and set as active', () => {
const botMock: BotConfigWithPath = {
name: 'mockedBot',
description: '',
padlock: null,
path: 'mockedPath',
services: [],
};
const action = mockAndSetActive(botMock);
const state = bot(defaultState, action);
expect(state.activeBot).not.toBe(null);
expect(state.activeBot.name).toBe('mockedBot');
expect(state.activeBot.description).toBe('');
expect(state.activeBot.path).toBe('mockedPath');
});
});
|
/*
* This file is part of the Jikes RVM project (http://jikesrvm.org).
*
* This file is licensed to You under the Common Public License (CPL);
* You may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.opensource.org/licenses/cpl1.0.php
*
* See the COPYRIGHT.txt file distributed with this work for information
* regarding copyright ownership.
*/
package test.org.jikesrvm.basic.core.annotation;
import java.lang.annotation.Annotation;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Class to test inheritance of annotations.
*/
public class TestAnnotationInheritance {
@Retention(RetentionPolicy.RUNTIME)
@Inherited
public @interface A {
}
@Retention(RetentionPolicy.RUNTIME)
public @interface B {
}
@Retention(RetentionPolicy.RUNTIME)
public @interface C {
}
@A
@C
class X {
}
@B
class Y extends X {
}
public static void main(String[] args) {
final Annotation[] annotations = Y.class.getAnnotations();
for (final Annotation annotation : annotations) {
System.out.println(annotation.annotationType().getName());
}
check("getAnnotations must return 2 annotations, 1 inherited and 1 declared", Y.class.getAnnotations().length == 2);
check("getAnnotations must return declared first", Y.class.getAnnotations()[0] instanceof B);
check("getAnnotations must return inherited second", Y.class.getAnnotations()[1] instanceof A);
check("getAnnotation on non-declared, non-inherited annotation must return null", Y.class.getAnnotation(C.class) == null);
check("getAnnotation on declared annotation must not return null", Y.class.getAnnotation(B.class) != null);
check("getAnnotation on inherited annotation must not return null", Y.class.getAnnotation(A.class) != null);
check("getAnnotation on declared annotation must return same instance after multiple calls",
Y.class.getAnnotation(B.class) == Y.class.getAnnotation(B.class));
check("getAnnotation on inherited annotation must return same instance after multiple calls",
Y.class.getAnnotation(A.class) == Y.class.getAnnotation(A.class));
check("getAnnotation on inherited annotation must return same instance from parent and child classes",
Y.class.getAnnotation(A.class) == X.class.getAnnotation(A.class));
}
private static void check(String message, boolean condition) {
if (!condition) System.out.println("Failed check: " + message);
}
}
|
package flag
import (
"testing"
beaconcommon "github.com/protolambda/zrnt/eth2/beacon/common"
"github.com/spf13/pflag"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestRoot(t *testing.T) {
root := beaconcommon.Root{}
f := pflag.NewFlagSet("test", pflag.ContinueOnError)
RootVar(f, &root, "test-root", beaconcommon.Root{}, "test")
// No 0x prefix arg
err := f.Parse([]string{"--test-root=0100000000000000000000007e654d251da770a068413677967f6d3ea2fea9e4"})
require.NoError(t, err)
assert.Equal(t, "0x0100000000000000000000007e654d251da770a068413677967f6d3ea2fea9e4", root.String())
// 0x prefix arg
err = f.Parse([]string{"--test-root=0x0100000000000000000000008f654d251da770a068413677967f6d3ea2fea9e4"})
require.NoError(t, err)
assert.Equal(t, "0x0100000000000000000000008f654d251da770a068413677967f6d3ea2fea9e4", root.String())
}
|
package utils
import (
"strings"
"path/filepath"
"github.com/spf13/viper"
log "github.com/sirupsen/logrus"
)
// Get configuration file's path structure.
func GetConfigFile(configFileName string) (string, string, string) {
path := filepath.Dir(configFileName)
file := filepath.Base(configFileName)
ctype := filepath.Ext(configFileName)[1:]
return path, file, ctype
}
// Give precedence to environment variables over configuration file's
func GetConfigString(configEnv *viper.Viper, configFile *viper.Viper, key string) string {
value := configEnv.GetString(key)
if value == "" {
value = configFile.GetString(key)
}
if value == "" {
log.Fatalf("Variable '%s' missing.", strings.ToUpper(key))
}
return value
}
// Give precedence to environment variables over configuration file's
func GetConfigInt(configEnv *viper.Viper, configFile *viper.Viper, key string) int {
value := configEnv.GetInt(key)
if value == 0 {
value = configFile.GetInt(key)
}
if value == 0 {
log.Fatalf("Variable '%s' missing.", strings.ToUpper(key))
}
return value
}
|
def debug_compilation_time(message):
global _tic
if message and global_config.print_compilation_time:
print(f"compile_pipeshard_executable::{message}: "
f"{time.time() - _tic:.2f} s")
_tic = time.time() |
def esp_to_anymail_event(self, esp_event):
esp_type = getfirst(esp_event, ['event', 'type'], 'unknown')
if esp_type == 'inbound':
assert self.signal is not tracking
self.signal = inbound
return self.mandrill_inbound_to_anymail_event(esp_event)
else:
assert self.signal is not inbound
self.signal = tracking
return self.mandrill_tracking_to_anymail_event(esp_event) |
package com.example.myapplicationvalidation;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.EditText;
import android.widget.TextView;
public class MainActivity extends AppCompatActivity {
EditText Num1;
EditText Num2;
TextView Resultado;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Num1 = findViewById(R.id.Valor1);
Num2 = findViewById(R.id.Valor2);
Resultado = findViewById(R.id.Resultado);
}
public void Check_Number(View view){
int nu1 = Integer.parseInt(Num1.getText().toString());
int nu2 = Integer.parseInt(Num2.getText().toString());
if (nu1 > nu2){
Resultado.setText("El mayor es el numero : " + nu1);
}
else if (nu2 > nu1){
Resultado.setText("El mayor es el numero : " + nu2);
}
else{
Resultado.setText("Los numeros son iguales");
}
}
} |
<reponame>iqb-berlin/testcenter
import { BookletConfigData } from 'testcenter-common/classes/booklet-config-data.class';
export class BookletConfig extends BookletConfigData {
setFromKeyValuePairs(config: { [key: string]: string }): void {
Object.keys(config)
.forEach(key => {
if (this[key]) {
this[key] = config[key];
}
});
}
setFromXml(bookletConfigElement: Element): void {
const bookletConfigs = Array.prototype.slice.call(bookletConfigElement.childNodes)
.filter(e => e.nodeType === 1)
.reduce(
(agg, item) => {
agg[item.getAttribute('key')] = item.textContent;
return agg;
},
{}
);
this.setFromKeyValuePairs(bookletConfigs);
}
}
|
OTTAWA — Last October, the House of Commons agreed to send “military assets,” in the form of CF-18 fighters, to Iraq to battle the Islamic State. The resolution presented by the government said Canada would not deploy troops on the ground in combat operations.
Yet in a briefing to media on Monday by Lieutenant-General Jonathan Vance and Brigadier-General Michael Rouleau, it was revealed that Canada’s 69 special forces troops have indeed been involved in combat.
Lt.-Gen. Vance said that special forces troops “neutralized” incoming mortar and machine gun fire, while on the frontlines within the last week. Special operations forces also identified targets with lasers and provided “eyes on” reconnaissance for air strikes.
Brig-Gen. Rouleau said the action was taken in self-defence, and an exchange of fire does not mean Canada has started a combat mission.
But the revelation provoked immediate accusations of mission creep, and claims that Parliament is being misled by the government: “We were told all the work would be away from the front lines but obviously that is not the case,” said NDP defence critic Jack Harris.
Jason MacDonald, the prime minister’s spokesman, said in an email Monday the bulk of the special forces work is taking place away from the front lines, and that “a combat role is one in which our troops advance and themselves seek to engage the enemy physically, aggressively, and directly. That is not the case with this mission.”
While the October resolution is not legally binding, the government has committed to no troops on the ground. And yet, by the military’s own admission, troops are not only on the ground, they are involved in firefights with the enemy.
The incongruity stems from the shadowy nature of our special forces’ mandate. We knew there were 69 special operations members in Iraq. We didn’t know what they were doing — quite frankly, it’s a shock to be told as much as we have been. But most informed observers assumed they were acting as frontline combat advisors to Kurdish and Iraqi forces, as well as providing reconnaissance for the air mission. This is still a long way from our experience in Afghanistan.
But there are signs that is the direction in which we may be going.
Lt.-Gen. Vance said ISIS’s advance has been halted but not defeated. A “large-scale reversal” has yet to occur, he said, and the unspoken coda is that that won’t happen without the intervention of ground troops.
He said the Forces are prepared, and preparing, to extend the mission, if they are asked to do so by Parliament.
The government has said it will return to the House of Commons to gain its support at the end of the six month period this spring, though it has no legal obligation to do so.
It seems inevitable that will happen, if only to force the Liberals and New Democrats to re-state their opposition to the mission.
But will the mandate be expanded to include ground forces?
In an election year, it would seem counter-intuitive for the Conservatives to deliberately drive up the risks and costs. The public is onside with a low-level war, in which Canada is seen to be making a solid contribution, without risking mass casualties.
But Stephen Harper has said the criteria on extending the mission will be the risk the Islamic State poses to Canada – and he believes the risk is significant.
“This is a movement that has declared war on Canada specifically and it has shown it has the ability to develop the capacity to execute attacks on this soil,” he said in B.C. this month.
Images made public at the weekend showed blindfolded men accused of homosexuality being pushed by ISIS fighters to their deaths off towers, for the amusement of a watching crowd.
They reinforced the sense that this is an evil that must be confronted wherever it rears its head — and made a mockery of the claims made by a letter writer in Monday’s National Post, who argued that if only Canada would acknowledge its participation in the Afghan war was unethical aggression, and that Israel is guilty of monstrous war crimes, it will find radical Islam becomes a “genuine friend and ally.” How ludicrous. There can be no appeasement or accommodation with such a carcinogenic interpretation of Islam.
But how far does our determination to protect our freedoms go?
The Prime Minister is obviously persuaded that we are engaged in a long conflict with militant Islam that will require resolve and more resources.
It suggests that if the special forces’ firefight did not signal the start of a combat mission, it may not be long before we are, incontrovertibly, at war.
National Post, with files from Postmedia News
• Email: [email protected] | Twitter: IvisonJ |
/**
* Created by Abhishek on 5/13/2016.
*/
public class editAdapter extends RecyclerView.Adapter<editAdapter.listHolder> {
Context context;
List<GalleryPhotoAlbum> albumList;
int imgwidth;
ArrayList<Integer> countList;
public editAdapter(Context baseContext, List<GalleryPhotoAlbum> albumList, ArrayList<Integer> countList, int columnWidth) {
this.context = baseContext;
this.albumList = albumList;
this.imgwidth = columnWidth ;
this.countList = countList;
}
@Override
public editAdapter.listHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view = LayoutInflater.from(context).inflate(R.layout.custom_edit_item_layout,parent,false);
listHolder holder = new listHolder(view);
return holder;
}
@Override
public void onBindViewHolder(editAdapter.listHolder holder, int position) {
holder.thumbnail.setScaleType(ImageView.ScaleType.CENTER_CROP);
holder.thumbnail.setLayoutParams(new RelativeLayout.LayoutParams(imgwidth,imgwidth+90));
holder.linearLayout.setLayoutParams(new RelativeLayout.LayoutParams(imgwidth,imgwidth+90));
holder.linearLayout.getBackground().setAlpha(130);
Glide.with(context).load(albumList.get(position).getData()).
thumbnail(0.5f).crossFade().diskCacheStrategy(DiskCacheStrategy.ALL).into(holder.thumbnail);
Log.d("adaper_thumb",albumList.get(position).getData());
holder.name.setText(albumList.get(position).getBucketName());
if (albumList.get(position).getTotalCount()==1){
// holder.count.setText(String.valueOf(albumList.get(position).getTotalCount()) + " Photo");
holder.count.setText(String.valueOf(countList.get(position)) + " Photo");
}else {
holder.count.setText(String.valueOf(countList.get(position)) + " Photos");
// holder.count.setText(String.valueOf(albumList.get(position).getTotalCount()) + " Photos");
}
}
@Override
public int getItemCount() {
return albumList.size();
}
public class listHolder extends RecyclerView.ViewHolder{
ImageView thumbnail;
RelativeLayout layout;
RelativeLayout linearLayout;
TextView name,count ;
public listHolder(View itemView) {
super(itemView);
thumbnail = (ImageView) itemView.findViewById(R.id.thumbnail);
layout = (RelativeLayout) itemView.findViewById(R.id.each);
linearLayout = (RelativeLayout) itemView.findViewById(R.id.blur_lay);
name = (TextView) itemView.findViewById(R.id.name);
count = (TextView) itemView.findViewById(R.id.count);
}
}
} |
<reponame>dot-cat/shp_android_clients
package space.dotcat.assistant.di.activitiesComponents.roomsActivity;
import dagger.Subcomponent;
import space.dotcat.assistant.di.activitiesComponents.ActivityScope;
import space.dotcat.assistant.screen.roomList.RoomsActivity;
@Subcomponent(modules = RoomsModule.class)
@ActivityScope
public interface RoomListActivityComponent {
void inject(RoomsActivity roomsActivity);
}
|
<gh_stars>1-10
fn main() {
println!("Run example tests: cargo test --example=at24c04-test1 -- --test-threads=1");
}
#[cfg(test)]
mod test {
use eeprom24x::Eeprom24x;
use eeprom24x::SlaveAddr;
use ftdi_embedded_hal as hal;
use ftdi_embedded_hal::x232h::FTx232H;
use std::thread::sleep;
use std::time::Duration;
#[test]
fn at24x_test_t1() {
let dev = FTx232H::init(0x0403, 0x6014).unwrap();
let i2c = dev.i2c(hal::i2c::I2cSpeed::CLK_400kHz).unwrap();
let mut eeprom = Eeprom24x::new_24x04(i2c, SlaveAddr::default());
let delay = Duration::from_millis(5);
let byte_w = 0xe5;
let addr = 0x0;
eeprom.write_byte(addr, byte_w).unwrap();
sleep(delay);
let byte_r = eeprom.read_byte(addr).unwrap();
assert_eq!(byte_w, byte_r);
}
#[test]
fn at24x_test_t2() {
let dev = FTx232H::init(0x0403, 0x6014).unwrap();
let i2c = dev.i2c(hal::i2c::I2cSpeed::CLK_400kHz).unwrap();
let mut eeprom = Eeprom24x::new_24x04(i2c, SlaveAddr::default());
let delay = Duration::from_millis(5);
let data_w: [u8; 4] = [0xaa, 0xbb, 0xcc, 0xdd];
let mut data_r: [u8; 4] = [0; 4];
for i in 0..data_w.len() {
eeprom.write_byte(i as u32, data_w[i]).unwrap();
sleep(delay);
}
for i in 0..data_r.len() {
data_r[i] = eeprom.read_byte(i as u32).unwrap();
}
assert_eq!(data_w, data_r);
}
#[test]
fn at24x_test_t3() {
let dev = FTx232H::init(0x0403, 0x6014).unwrap();
let i2c = dev.i2c(hal::i2c::I2cSpeed::CLK_400kHz).unwrap();
let mut eeprom = Eeprom24x::new_24x04(i2c, SlaveAddr::default());
let delay = Duration::from_millis(5);
let data_w: [u8; 4] = [0xaa, 0xbb, 0xcc, 0xdd];
let mut data_r: [u8; 4] = [0; 4];
for i in 0..data_w.len() {
eeprom.write_byte(i as u32, data_w[i]).unwrap();
sleep(delay);
}
eeprom.read_data(0x0, &mut data_r).unwrap();
assert_eq!(data_w, data_r);
}
#[test]
fn at24x_test_t4() {
let dev = FTx232H::init(0x0403, 0x6014).unwrap();
let i2c = dev.i2c(hal::i2c::I2cSpeed::CLK_400kHz).unwrap();
let mut eeprom = Eeprom24x::new_24x04(i2c, SlaveAddr::default());
let delay = Duration::from_millis(50);
let addrs: [u32; 4] = [0x00, 0x10, 0x20, 0x30];
let mut data_r = [0x00; 16];
let data_w = [0xAB; 16];
for addr in addrs.iter() {
eeprom.write_page(*addr, &data_w).unwrap();
sleep(delay);
eeprom.read_data(*addr, &mut data_r).unwrap();
assert_eq!(data_w, data_r);
}
}
#[test]
fn at24x_test_t5() {
let dev = FTx232H::init(0x0403, 0x6014).unwrap();
let i2c = dev.i2c(hal::i2c::I2cSpeed::CLK_400kHz).unwrap();
let mut eeprom = Eeprom24x::new_24x04(i2c, SlaveAddr::default());
let delay = Duration::from_millis(5);
// check high memory addresses: 1 bit passed as a part of i2c addr
let addrs1: [u32; 4] = [0x100, 0x10F, 0x1F0, 0x1EE];
let byte_w1 = 0xe5;
let addrs2: [u32; 4] = [0x00, 0x0F, 0xF0, 0xEE];
let byte_w2 = 0xaa;
// write bytes
for addr in addrs1.iter() {
eeprom.write_byte(*addr, byte_w1).unwrap();
sleep(delay);
}
for addr in addrs2.iter() {
eeprom.write_byte(*addr, byte_w2).unwrap();
sleep(delay);
}
// read bytes and check
for addr in addrs1.iter() {
let byte_r = eeprom.read_byte(*addr).unwrap();
assert_eq!(byte_w1, byte_r);
sleep(delay);
}
for addr in addrs2.iter() {
let byte_r = eeprom.read_byte(*addr).unwrap();
assert_eq!(byte_w2, byte_r);
sleep(delay);
}
}
}
|
/**
* Created by "[email protected]" on 2018/8/27.
*/
public class SelectContactsPresenter extends BasePresenter<SelectContactsView> {
public static final String SELECTED_CONTACTS = "selected_contacts";
private List<ContactsInfo> mContactsInfoList = new LinkedList<>();
/**
* Constructor of BasePresenter.
* All of the subs who extents this BasePresenter must implement this method.
*
* @param mActivity Activity,it is often used as context.
*/
public SelectContactsPresenter(Activity mActivity) {
super(mActivity);
}
/**
* Initialize the view of App's callback.
*
* @param selectContactsView Generic parameter IV impl.
*/
@Override
public void initView(SelectContactsView selectContactsView) {
super.iv = selectContactsView;
}
/**
* 读取所有联系人
*/
public void readAllContacts() {
//联系人的Uri,也就是content://com.android.contacts/contacts
Uri uri = ContactsContract.Contacts.CONTENT_URI;
//指定获取_id和display_name两列数据,display_name即为姓名
String[] projection = new String[]{
ContactsContract.Contacts._ID,
ContactsContract.Contacts.DISPLAY_NAME
};
mContactsInfoList.clear();
//根据Uri查询相应的ContentProvider,cursor为获取到的数据集
Cursor cursor = mActivity.getContentResolver().query(uri, projection, null, null, null);
Cursor phonesCursor = null;
if (null == cursor) return;
if (cursor.moveToFirst()) {
do {
ContactsInfo info = new ContactsInfo();
Long id = cursor.getLong(0);
//获取姓名
String name = cursor.getString(1);
//指定获取NUMBER这一列数据
String[] phoneProjection = new String[]{
ContactsContract.CommonDataKinds.Phone.NUMBER
};
info.setName(name);
//根据联系人的ID获取此人的电话号码
phonesCursor = mActivity.getContentResolver().query(
ContactsContract.CommonDataKinds.Phone.CONTENT_URI,
phoneProjection,
ContactsContract.CommonDataKinds.Phone.CONTACT_ID + "=" + id,
null,
null);
//因为每个联系人可能有多个电话号码,所以需要遍历
if (phonesCursor != null && phonesCursor.moveToFirst()) {
do {
String num = phonesCursor.getString(0);
info.setPhoneNumber(num);
} while (phonesCursor.moveToNext());
}
mContactsInfoList.add(info);
} while (cursor.moveToNext());
}
cursor.close();
if (null != phonesCursor)
phonesCursor.close();
iv.onShowAllContacts(mContactsInfoList);
}
/**
* 遍历这个集合、找到所有被选择的联系人
*
* @return 被选中的联系人
*/
private List<ContactsInfo> getSelectedContacts() {
if (ContainerUtil.isNullOrEmpty(mContactsInfoList)) return null;
List<ContactsInfo> result = new LinkedList<>();
for (ContactsInfo info : mContactsInfoList) {
if (info.isSelected())
result.add(info);
}
return result;
}
/**
* 点击了“联系人”添加
*/
public void addContacts() {
final Intent intent = new Intent();
final List<ContactsInfo> selectedContacts = getSelectedContacts();
if (ContainerUtil.isNullOrEmpty(selectedContacts)) {
iv.onToast("您未选择任何联系人!");
} else {
AlertDialog alertDialog = new AlertDialog.Builder(mActivity)
.setTitle("提示")
.setMessage("确定添加所选择的联系人?")
.setNegativeButton("取消", null)
.setPositiveButton("确定", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
Gson gson = new Gson();
String result = gson.toJson(selectedContacts);
intent.putExtra(SELECTED_CONTACTS, result);
mActivity.setResult(App.APP_RESULT_CODE, intent);
mActivity.finish();
}
})
.setCancelable(true)
.create();
alertDialog.setCanceledOnTouchOutside(false);
alertDialog.show();
}
}
} |
<reponame>OIEIEIO/ombre-working-old-chain-before-for-test<gh_stars>100-1000
// Copyright (c) 2014-2018, The Monero Project
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification, are
// permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of
// conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice, this list
// of conditions and the following disclaimer in the documentation and/or other
// materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its contributors may be
// used to endorse or promote products derived from this software without specific
// prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// Parts of this file are originally copyright (c) 2012-2013 The Cryptonote developers
#include "gtest/gtest.h"
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
// OS X, FreeBSD, and OpenBSD don't need malloc.h
#if !defined(__APPLE__) && !defined(__FreeBSD__) && !defined(__OpenBSD__) && \
!defined(__DragonFly__)
#include <malloc.h>
#endif
//#define TEST_ORIGINAL
//#define VERBOSE
#ifdef TEST_ORIGINAL
uint64_t slow_memmem_original(void *start_buff, size_t buflen, void *pat, size_t patlen)
{
void *buf = start_buff;
void *end = (char *)buf + buflen - patlen;
while((buf = memchr(buf, ((char *)pat)[0], buflen)))
{
if(buf > end)
return 0;
if(memcmp(buf, pat, patlen) == 0)
return (char *)buf - (char *)start_buff;
buf = (char *)buf + 1;
}
return 0;
}
#define slow_memmem slow_memmem_original
#else
namespace cryptonote
{
uint64_t slow_memmem(const void *start_buff, size_t buflen, const void *pat, size_t patlen);
}
using namespace cryptonote;
#endif
static const struct
{
size_t buflen;
const char *buf;
size_t patlen;
const char *pat;
uint64_t res;
} T[] = {
{0, "", 0, "", 0},
{1, "", 0, "", 0},
{0, "", 1, "", 0},
{1, "x", 1, "x", 0},
{2, "x", 1, "", 1},
{1, "x", 1, "", 0},
{1, "x", 2, "x", 0},
{2, "ax", 2, "x", 0},
{1, "xx", 2, "xx", 0},
{4, "abcd", 3, "abc", 0},
{4, "abcd", 3, "bcd", 1},
{4, "abcd", 4, "abcd", 0},
{4, "abcd", 1, "d", 3},
{4, "abcd", 1, "c", 2},
{4, "abcd", 1, "bc", 1},
{4, "abcd", 1, "", 0},
{3, "abcd", 1, "d", 0},
{5, "aaaab", 2, "ab", 3},
{7, "aaaabab", 2, "ab", 3},
{7, "aaaabab", 3, "abc", 0},
{4, "abcd", 2, "cd", 2},
{3, "abcd", 2, "cd", 0},
{3, "a\0b", 1, "", 1},
{3, "a\0b", 2, "\0b", 1},
{8, "xxxxxxab", 3, "xyz", 0},
{8, "xxxxxxab", 6, "abcdef", 0},
{9, "\0xxxxxab", 3, "ab", 6},
{4, "\0\0a", 3, "\0a", 1}, //
};
TEST(slowmem, Success)
{
size_t n;
for(n = 0; n < sizeof(T) / sizeof(T[0]); ++n)
{
#ifdef VERBOSE
printf("%3zu: ", n);
fflush(stdout);
#endif
void *buf = malloc(T[n].buflen);
memcpy(buf, T[n].buf, T[n].buflen);
void *pat = malloc(T[n].patlen);
memcpy(pat, T[n].pat, T[n].patlen);
uint64_t res = slow_memmem(buf, T[n].buflen, pat, T[n].patlen);
free(pat);
free(buf);
ASSERT_EQ(res, T[n].res);
#ifdef VERBOSE
if(res != T[n].res)
printf("failed (got %zu, expected %zu)", res, T[n].res);
else
printf("ok");
printf("\n");
#endif
}
}
|
<filename>include/dtc/utility/chrono.hpp<gh_stars>10-100
/******************************************************************************
* *
* Copyright (c) 2018, <NAME> and <NAME>, *
* University of Illinois at Urbana-Champaign (UIUC), IL, USA. *
* *
* All Rights Reserved. *
* *
* This program is free software. You can redistribute and/or modify *
* it in accordance with the terms of the accompanying license agreement. *
* See LICENSE in the top-level directory for details. *
* *
******************************************************************************/
#ifndef DTC_UTILITY_CHRONO_HPP_
#define DTC_UTILITY_CHRONO_HPP_
#include <chrono>
#include <sys/time.h>
namespace dtc {
// Function: now
// Return the current clock tick (steady_clock).
inline static std::chrono::steady_clock::time_point now() {
return std::chrono::steady_clock::now();
}
// Function: duration_cast
// chrono -> timeval
template <typename T, typename Rep, typename Period>
auto duration_cast (const std::chrono::duration<Rep, Period>& d)
-> std::enable_if_t<std::is_same<T, struct timeval>::value, struct timeval> {
struct timeval tv;
std::chrono::seconds const sec = std::chrono::duration_cast<std::chrono::seconds>(d);
tv.tv_sec = sec.count();
tv.tv_usec = std::chrono::duration_cast<std::chrono::microseconds>(d - sec).count();
return tv;
}
// Function: duration_cast
// timeval -> chrono
template <typename D>
auto duration_cast(const struct timeval& tv) {
return std::chrono::duration_cast<D> (
std::chrono::seconds(tv.tv_sec) + std::chrono::microseconds(tv.tv_usec)
);
}
}; // End of namespace dtc. ----------------------------------------------------------------------
#endif
|
/**
* An 'id' -> a string of characters that are all "id like", normally
* [a-zA-Z_][a-zA-Z0-9_]+, followed by a non id-like character.
*
* The definition of what is id-like can be configured when creating your
* grammar with the 'idchars' keyword argument.
*
* Returns the number of characters consumed (0 for invalid)
*/
int t_id(int at, char* text, int ln, char* idchars) {
int i = at;
if (!alpha_(text[i]) && strchr(idchars, text[i]) == NULL) {
return 0;
}
i += 1;
while (i < ln && (alpha_(text[i]) || num(text[i]) || strchr(idchars, text[i]) != NULL)) {
i++;
}
return i - at;
} |
/* Calculate the mousepointer from 2 IR sources (Default) */
void CWiiRemote::CalculateMousePointer(int x1, int y1, int x2, int y2)
{
int x3, y3;
x3 = ( (x1 + x2) / 2 );
y3 = ( (y1 + y2) / 2 );
x3 = (int)( ((float)x3 / (float)CWIID_IR_X_MAX) * m_MaxX);
y3 = (int)( ((float)y3 / (float)CWIID_IR_Y_MAX) * m_MaxY);
x3 = (int)(x3 - m_MinX);
y3 = (int)(y3 - m_MinY);
if (x3 < MOUSE_MIN) x3 = MOUSE_MIN;
else if (x3 > MOUSE_MAX) x3 = MOUSE_MAX;
if (y3 < MOUSE_MIN) y3 = MOUSE_MIN;
else if (y3 > MOUSE_MAX) y3 = MOUSE_MAX;
x3 = MOUSE_MAX - x3;
if (m_NumSamples == 1)
{
CPacketMOUSE mouse(x3, y3);
mouse.Send(m_Socket, m_MyAddr);
return;
}
else
{
for (int i = m_NumSamples; i > 0; i--)
{
m_SamplesX[i] = m_SamplesX[i-1];
m_SamplesY[i] = m_SamplesY[i-1];
}
m_SamplesX[0] = x3;
m_SamplesY[0] = y3;
long x4 = 0, y4 = 0;
for (int i = 0; i < m_NumSamples; i++)
{
x4 += m_SamplesX[i];
y4 += m_SamplesY[i];
}
CPacketMOUSE mouse((x4 / m_NumSamples), (y4 / m_NumSamples));
mouse.Send(m_Socket, m_MyAddr);
}
} |
<reponame>ourstudio-se/ouretl-abstractions<gh_stars>0
package ouretl
// DataMessage is a wrapper abstraction for the raw message provided
// to the DataHandlerPlugins. Apart from the actual data
// content, the message has an `Origin` field containing
// a WorkerPlugin name and an `ID` field with a unique
// message ID.
type DataMessage interface {
ID() string
Data() []byte
Origin() string
}
|
Police in California have arrested a man this week for spray-painting “Taste likes hate” on the side of a Chick-fil-A restaurant in West Hollywood. The graffiti was accompanied by an image of a cow holding a paint brush—a dig at the company’s “Eat Mor Chikin” campaign.
Artist Manuel Castro was arrested on Wednesday after taking credit for the work in a Huffington Post interview. It’s believed he tagged the building last Friday, during “National Same-Sex Kiss Day.”
In his interview, Castro, 30, explained he was trying to start a discussion about “tolerance and acceptance”:
“I didn’t use violence. I used paint. Artists for centuries have expressed their opinions through this medium and I am no different,” he said. “I am happy to pay for the costs of repainting the wall, but I am not —nor will I ever be — happy to sit quietly at the back of the bus.” Well, at least he used proper spelling, something Chick-fil-A seems to have a problem with. Photos: Sandy Mazza/Daily Breeze, Torrance Police Department |
// Searching using Search Bar Filter in React Native List View
// https://aboutreact.com/react-native-search-bar-filter-on-listview/
// import React in our code
import React, {useState, useEffect} from 'react';
//import {funcGetSelBusStop} from '../../function/funcGetSelBusStop';
import {Text, StyleSheet, View, FlatList, TextInput, Alert} from 'react-native';
import {funcGetWholeBus} from '../../function/funcGetWholeBus';
import {HomeScreens} from '../../navigators/index';
import {useNavigation} from '@react-navigation/native';
const busSearchBar = () => {
const navigation = useNavigation();
const [search, setSearch] = useState('');
const [filteredDataSource, setFilteredDataSource] = useState([]);
const [masterDataSource, setMasterDataSource] = useState([]);
const getWholeBusList = async () => {
try {
let getResult = await funcGetWholeBus();
// console.log('GETRESULT : ', getResult);
setFilteredDataSource(getResult);
setMasterDataSource(getResult);
} catch (e) {
Alert.alert('오류 발생');
}
};
useEffect(() => {
getWholeBusList();
}, []);
const searchFilterFunction = (text: React.SetStateAction<string>) => {
if (text) {
const newData = masterDataSource.filter(function (item: {노선번호: any}) {
const itemData = item.노선번호
? item.노선번호.toUpperCase()
: ''.toUpperCase();
const textData = text.toString().toUpperCase();
return itemData.indexOf(textData) > -1;
});
setFilteredDataSource(newData);
setSearch(text);
} else {
setFilteredDataSource(masterDataSource);
setSearch(text);
}
};
const ItemView = ({item}: any) => {
const busNumber = item.노선번호;
return (
// Flat List Item
<Text
style={styles.itemStyle}
onPress={() =>
navigation.navigate(HomeScreens.BusInfoMain, {busNumber: busNumber})
}>
{/* {item.노선번호} */}
{/* {'.'} */}
{item.노선번호}
</Text>
);
};
const ItemSeparatorView = () => {
return (
// Flat List Item Separator
<View
style={{
height: 0.5,
width: '100%',
backgroundColor: '#C8C8C8',
}}
/>
);
};
/**
const goToReservation = async (item: { 노선번호: any; }) => {
// Function for click on an item
Alert.alert('Id : ' + item.노선번호 + ' Title : ' + item.노선번호);
const busNumber = item.노선번호;
let getResult = await funcGetSelBusStop({busNumber});
};
*/
return (
<View style={styles.container}>
<TextInput
style={styles.textInputStyle}
onChangeText={text => searchFilterFunction(text)}
value={search}
underlineColorAndroid="transparent"
placeholder="버스 검색"
/>
<FlatList
data={filteredDataSource}
keyExtractor={(item, index) => index.toString()}
ItemSeparatorComponent={ItemSeparatorView}
renderItem={ItemView}
/>
</View>
);
};
const styles = StyleSheet.create({
container: {
backgroundColor: 'white',
},
itemStyle: {
padding: 10,
},
textInputStyle: {
height: 40,
borderWidth: 1,
paddingLeft: 20,
margin: 5,
borderColor: '#009688',
backgroundColor: '#FFFFFF',
},
});
export default busSearchBar;
|
/* Copyright (c) 2011, TrafficLab, Ericsson Research, Hungary
* Copyright (c) 2012, CPqD, Brazil
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the Ericsson Research nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
*
*/
#ifndef OFL_ACTIONS_H
#define OFL_ACTIONS_H 1
#include <sys/types.h>
#include <stdio.h>
#include "ofl.h"
#include "ofl-structs.h"
#include "../include/openflow/openflow.h"
struct ofl_exp;
/****************************************************************************
* Action structure definitions
****************************************************************************/
/* Common header for actions. All action structures - including experimenter
* ones - must start with this header. */
struct ofl_action_header {
enum ofp_action_type type; /* One of OFPAT_*. */
uint16_t len; /* Total length */
};
struct ofl_action_output {
struct ofl_action_header header; /* OFPAT_OUTPUT. */
uint32_t port; /* Output port. */
uint16_t max_len; /* Max length to send to controller. */
};
struct ofl_action_mpls_ttl {
struct ofl_action_header header; /* OFPAT_SET_MPLS_TTL. */
uint8_t mpls_ttl; /* MPLS TTL */
};
struct ofl_action_push {
struct ofl_action_header header; /* OFPAT_PUSH_VLAN/MPLS/PBB. */
uint16_t ethertype; /* Ethertype */
};
struct ofl_action_pop_mpls {
struct ofl_action_header header; /* OFPAT_POP_MPLS. */
uint16_t ethertype; /* Ethertype */
};
struct ofl_action_set_queue {
struct ofl_action_header header; /* OFPAT_SET_QUEUE. */
uint32_t queue_id;
};
struct ofl_action_group {
struct ofl_action_header header; /* OFPAT_GROUP. */
uint32_t group_id; /* Group identifier. */
};
struct ofl_action_set_nw_ttl {
struct ofl_action_header header; /* OFPAT_SET_NW_TTL. */
uint8_t nw_ttl;
};
struct ofl_action_set_field {
struct ofl_action_header header; /* OFPAT_SET_FIELD. */
struct ofl_match_tlv *field;
};
struct ofl_action_experimenter {
struct ofl_action_header header; /* OFPAT_EXPERIMENTER. */
uint32_t experimenter_id; /* Experimenter ID */
};
/****************************************************************************
* Functions for (un)packing action structures
****************************************************************************/
/* Packs the action in src to the memory location beginning at the address
* pointed at by dst. The return value is the length of the resulted structure.
* In case of an experimenter action, it uses the passed in experimenter
* callback. */
size_t
ofl_actions_pack(struct ofl_action_header *src, struct ofp_action_header *dst, uint8_t* data, struct ofl_exp *exp);
/* Given a list of action in OpenFlow wire format, these function returns
* the count of those actions in the passed in byte array. The functions
* return an ofl_err in case of an error, or 0 on succes. */
ofl_err
ofl_utils_count_ofp_actions(void *data, size_t data_len, size_t *count);
/* Unpacks the wire format action in src to a new memory location and returns a
* pointer to the location in dst. Returns 0 on success. In case of an
* experimenter action, it uses the passed in experimenter callback. */
ofl_err
ofl_actions_unpack(struct ofp_action_header *src, size_t *len, struct ofl_action_header **dst, struct ofl_exp *exp);
/****************************************************************************
* Functions for freeing action structures
****************************************************************************/
/* Calling this function frees the passed in action structure. In case of an
* experimenter action, it uses the passed in experimenter callback. */
void
ofl_actions_free(struct ofl_action_header *act, struct ofl_exp *exp);
/****************************************************************************
* Functions for freeing structures
****************************************************************************/
/* Returns the length of the resulting OpenFlow action structure from
* converting the passed in action. In case of an experimenter action, it uses
* the passed in experimenter callback. */
size_t
ofl_actions_ofp_total_len(struct ofl_action_header **actions, size_t actions_num, struct ofl_exp *exp);
/* Returns the length of the resulting OpenFlow action structures from
* converting the passed in list of actions. In case of an experimenter action,
* it uses the passed in experimenter callback. */
size_t
ofl_actions_ofp_len(struct ofl_action_header *action, struct ofl_exp *exp);
/****************************************************************************
* Functions for printing actions
****************************************************************************/
/* Converts the passed in action to a string format. In case of an experimenter
* action, it uses the passed in experimenter callback. */
char *
ofl_action_to_string(struct ofl_action_header *act, struct ofl_exp *exp);
/* Converts the passed in action to a string format and adds it to the dynamic
* string. In case of an experimenter action, it uses the passed in
* experimenter callback. */
void
ofl_action_print(FILE *stream, struct ofl_action_header *act, struct ofl_exp *exp);
#endif /* OFL_ACTIONS */
|
/*
* Copyright (c) 2019 Bixbit - <NAME>. All rights reserved.
* See LICENCE.txt file for licensing information.
*/
package pl.edu.icm.unity.oauth.as.console;
import java.util.List;
import java.util.function.Function;
import pl.edu.icm.unity.oauth.as.token.OAuthTokenEndpoint;
import pl.edu.icm.unity.oauth.as.webauthz.OAuthAuthzWebEndpoint;
import pl.edu.icm.unity.types.endpoint.Endpoint.EndpointState;
import pl.edu.icm.unity.webui.console.services.DefaultServiceDefinition;
import pl.edu.icm.unity.webui.console.services.ServiceDefinition;
/**
* Contains information of {@link OAuthAuthzWebEndpoint} and
* {@link OAuthTokenEndpoint} configurations and OAuth clients.
*
* @author P.Piernik
*
*/
class OAuthServiceDefinition implements ServiceDefinition
{
private DefaultServiceDefinition webAuthzService;
private DefaultServiceDefinition tokenService;
private Function<String, List<OAuthClient>> systemClientsSupplier;
private List<OAuthClient> selectedClients;
private String autoGeneratedClientsGroup;
OAuthServiceDefinition(DefaultServiceDefinition oauthWebAuthService, DefaultServiceDefinition tokenService)
{
this.webAuthzService = oauthWebAuthService;
this.tokenService = tokenService;
}
@Override
public String getName()
{
return webAuthzService.getName();
}
@Override
public EndpointState getState()
{
return webAuthzService.getState();
}
@Override
public String getType()
{
return webAuthzService.getType();
}
@Override
public String getBinding()
{
return webAuthzService.getBinding();
}
public DefaultServiceDefinition getWebAuthzService()
{
return webAuthzService;
}
public DefaultServiceDefinition getTokenService()
{
return tokenService;
}
public List<OAuthClient> getClients(String group)
{
return systemClientsSupplier.apply(group);
}
public void setClientsSupplier(Function<String, List<OAuthClient>> clientsSupplier)
{
this.systemClientsSupplier = clientsSupplier;
}
public List<OAuthClient> getSelectedClients()
{
return selectedClients;
}
public void setSelectedClients(List<OAuthClient> serviceClients)
{
this.selectedClients = serviceClients;
}
public String getAutoGeneratedClientsGroup()
{
return autoGeneratedClientsGroup;
}
public void setAutoGeneratedClientsGroup(String autoGeneratedGroup)
{
this.autoGeneratedClientsGroup = autoGeneratedGroup;
}
}
|
package com.github.gpluscb.toni.statsposting.dbots;
public class StatsResponse {
private final int shardCount;
private final long guildCount;
public StatsResponse(int shardCount, long guildCount) {
this.shardCount = shardCount;
this.guildCount = guildCount;
}
public int getShardCount() {
return shardCount;
}
public long getGuildCount() {
return guildCount;
}
}
|
import os
import json
from utils import initlogger
from API.tickerprice import StockTicker
class Variables:
logger = initlogger.getloggerobj(os.path.basename(__file__))
logger.info("Logger init")
ticker = StockTicker()
SYMBOL, PRICE, DELETE = range(3)
sym, thresh = "", 0
nifty_stocks = json.load(open("./assets/nifty_components.json", "r"))['stocks']
|
Genetically high angiotensin-converting enzyme concentrations causally increase asthma risk: A meta-analysis using Mendelian randomization
Objectives This meta-analysis aimed to test the association of angiotensin-converting enzyme (ACE) gene I/D polymorphism with asthma risk and circulating ACE changes. Methods Public literature retrieval, publication selection, and information extraction were completed independently by two investigators. Effect-size values are expressed as odds ratios (ORs) or standardized mean differences (SMDs) with a 95% confidence interval (95% CI). Results Nineteen studies (2,888 patients and 9,549 controls) fulfilled the eligibility criteria. Overall investigations demonstrated that ACE gene I/D polymorphism was significantly associated with asthma risk under allelic (OR, 95% CI: 1.26, 1.08 to 1.48), homozygous genotypic (1.50, 1.09 to 2.06), and recessive (1.53, 1.24 to 1.89) models with moderate heterogeneity (I2 statistic: 64% to 79%). Subsidiary investigations recorded that race, matched status, asthma diagnosis, sample size, and age possibly accounted for the existence of significant heterogeneity. Relative to carriers with the II genotype, those with the DD genotype, ID genotype, and the combination of DD and ID genotypes had significantly higher concentrations of circulating ACE (WMD: 3.13, 2.07, and 2.83 U/L, respectively, p < 0.05). Adoption of Mendelian randomization analyses revealed that one unit increment in circulating ACE concentrations was found to be significantly associated with a 1.14-fold increased risk of asthma (95% CI: 1.02 to 4.24). Conclusion We provided strong meta-analytical evidence supporting the causal implication of high circulating ACE concentrations in the development of asthma.
Introduction
Asthma is a highly heritable disease. The heritability of childhood asthma reached as high as 82% (1). A long list of asthma-susceptibility genes has been identified (2,3). However, no consensus exists on which gene actually involves the pathogenesis of asthma, even with genome-wide association studies (4)(5)(6). In this regard, the candidate gene approach still represents an alternative strategy (7). Based on a known biological function, a gene can be a candidate to precipitate asthma. Importantly, the gene can be screened to see which mutation actually embodies its function. One such case is the insertion/deletion (I/D) polymorphism in the gene coding angiotensin-converting enzyme (ACE).
The association of ACE gene I/D polymorphism with asthma has been widely studied. For example, the DD genotype of the ACE gene was overrepresented in patients with asthma relative to healthy controls (8). In an early meta-analysis, the DD homozygote carriers had an overall about 60% increased risk of asthma compared with the II + ID carriers, and this risk was more evident in Asians (9). A recent metaanalysis in children indicated that ACE gene I/D polymorphism was associated with a significant risk of asthma (10). Other studies, however, did not reveal any significance between this polymorphism and asthma risk (11)(12)(13). The reasons behind this inconsistency are multiple, likely because of differences in origins, baseline characteristics of study participants, diagnosis criteria of asthma, and statistical power to derive significance. To this point, the synthesis of individually underpowered studies with the same research goals can help shed some light on these reasons.
To yield robust evidence, we aimed to perform an updated meta-analysis and test the association of ACE gene I/D polymorphism with asthma risk and changes in circulating ACE concentrations. Meanwhile, heterogeneity sources attributable to inconsistent observations were explored.
Performance guidelines
This meta-analysis was performed according to the guidelines in the preferred reporting items for systematic reviews and meta-analyses (PRISMA) statement. The PRISMA checklist is shown in Supplementary Table 1, and the PRISMA flow diagram is shown in Figure 1.
Search strategies
Using predefined key terms, four electronic databases-PubMed, HuGE Navigator, EMBASE (Excerpt Medica Database), and Web of Science-were searched from literature inception through March 2022. Key terms used for searching possibly eligible publications were formulated using the MeSH (Medical Subject Headings) database and are expressed in the Boolean format, that is ("asthma" or "atopic") and ("polymorphism" or "SNP" or "variant" or "mutation" or "variation" or "genetic" or "genotype" or "allele") and ("ACE" or "angiotensin-converting enzyme" or "angiotensin I converting enzyme" or "ACE1" or "DCP" or "DCP1" or "CD143").
Initial screening of searched publications was restricted to the English language and human beings. The search was also extended to the reference lists of major publications (reviews and meta-analyses) retrieved. The search was independently performed by two investigators (Q. H. and F. Y.), and any difference in numbers was resolved via discussion and a consensus was attained.
Inclusion criteria
Included publications must concurrently meet the following four criteria: (i) class of evidence: case-control design; (ii) outcome: asthma with clear definition; (iii) necessary data: genotype counts (or allele counts in the absence of genotype counts) of ACE gene I/D polymorphism between patients with asthma and controls, or circulating ACE concentrations across I/D genotypes in either patients or controls or both; and (iv) genotype determination: valid methodology.
Exclusion criteria
Publications were excluded if one or more of the following criteria were met: (i) type of publication: review, letter to editor or correspondence, editorial, comment, conference abstract, and case report or series; (ii) publication with duplicate participant samples; (iii) involvement of only cases; (iv) endpoint other than asthma; and (v) unpublished data.
Publication selection
The selection of eligible publications was handled in two steps. First, the title and abstract (if available) were reviewed and removal was applied based on exclusion criteria. After the first round of publication removal, the full text was read and eligibility was checked based on inclusion criteria.
The selection process of eligible publications was completed independently by two investigators (Q. H. and F. Y.), and any divergence was solved by discussion and if necessary by a third investigator (W. N.).
FIGURE
The PRISMA flowchart illustrates the selection process of qualified articles in this meta-analysis.
Information extraction
By the use of a uniform data extraction Excel sheet, information from each qualified publication was independently extracted by two investigators (Q. H. and F. Y.), and two Excel sheets were compared by kappa statistics. Any disagreement was solved by rechecking the full text until a consensus was attained.
The first author's name, year of publication, country where participants were enrolled, race, sample size, design, source of controls, matched condition, diagnosis of asthma, genotype counts of ACE gene I/D polymorphism, and baseline characteristics of study participants, including age, gender, and ACE concentrations in circulation, were all extracted.
Statistical analyses
STATA software version 14.1 for Windows (Stata Corp, College Station, Texas) was utilized for this meta-analysis. The association of ACE gene I/D polymorphism with the risk of asthma was measured by odds ratio (OR) with a 95% confidence interval (95% CI). Changes in circulating ACE concentrations between genotypes of this polymorphism were denoted by standardized mean difference (SMD) with a 95% CI. Both OR and SMD were derived under the random-effects model, because in the absence of heterogeneity, fixed-effects and random-effects models yield very similar estimates, whereas, in the presence of heterogeneity, the random-effects model is preferred (14). In the case of the significant association of ACE gene I/D polymorphism with asthma risk and ACE changes in circulation, the Mendelian randomization technique was employed to infer the possible causality between circulating ACE and asthma.
Between-study heterogeneity was measured by the inconsistency index (I 2 ) statistic. I 2 denotes the percent of variability observed between studies that are the result of heterogeneity but not a chance observation. Higher I 2 indicates a higher likelihood of heterogeneity. An I 2 statistic of over 50% is indicative of significant heterogeneity. The sources of heterogeneity were statistical, clinical, and methodological aspects. To track these sources, subsidiary analyses according to pre-specified factors (including sample size, race, matched condition, source of controls, and diagnosis of asthma) and meta-regression analyses of both continuous and categorical factors were carried out. Subgroups involving two or more studies are displayed.
In addition, cumulative analyses were conducted to see the impact of the first publication on subsequent publications and evolution of accumulated estimates over time. Sensitivity analyses were conducted to see the impact of any single study on the overall effect-size estimate by removing an individual study each time to check whether any of these estimates can bias the overall estimates.
Publication bias was assessed by Begg's funnel plot. If the funnel shape is asymmetrically inverted, it suggests a correlation between pooled estimate and study size (publication bias or small study bias). From a statistical aspect, publication bias was measured by Egger's test, which appraises funnel asymmetry with a significance level set at 10%.
In total, 19 independent studies were isolated from 17 qualified publications, including 2,888 patients with asthma and 9,549 controls, among who the genotypes of ACE gene I/D polymorphism were assayed with validated typing methods. Out of 19 independent studies, 3 (including 431 subjects) provided data on the changes in circulating ACE concentrations across the genotypes of this polymorphism.
Baseline characteristics
The baseline characteristics of qualified studies are shown in Table 1. Thirteen studies were performed among adults, three among children, and one among both.
Overall analyses
The association between ACE gene I/D polymorphism and asthma risk was separately evaluated under allelic, homozygous genotypic, dominant, and recessive models of inheritance. Figure 2 shows the overall forest plots of the four models. This polymorphism was significantly associated with asthma risk in the allelic model, with the D allele corresponding to 1.26 times more likely to have asthma than the I allele (95% CI: 1.08 to 1.48). Significance was also noticed under homozygous genotypic (DD vs. II: OR = 1.50, 95% CI: 1.09 to 2.06) and recessive (DD vs. ID plus II: OR = 1.53, 95% CI: 1.24 to 1.89) models. However, there was no observable association under the dominant model (DD plus DI vs. II: OR = 1.14, 95% CI: 0.87 to 1.48). As of between-study heterogeneity, the I 2 statistic ranged from 64 to 79% across the four models of inheritance, suggesting that diversity in effect-size estimates was not due to chance.
Subgroup analyses
As summarized in Table 2, the association between ACE gene I/D polymorphism and asthma risk was examined upon stratification by several potential factors on a categorical scale under the four models of inheritance.
It is worth noticing that race, matched status, asthma diagnosis, sample size, and age were possible sources of betweenstudy heterogeneity, particularly under the recessive model. For example, the mutation of ACE gene I/D polymorphism was associated with the significant risk of asthma in Caucasians under allelic and recessive models, with the odds reaching 1.19 (95% CI: 1.00 to 1.41) and 1.41 (95% CI: 1.04 to 1.90), respectively, and no significance was observed in East Asians, irrespective of the models of inheritance.
The majority of subgroups showed improved betweenstudy heterogeneity by pooling studies with homogeneous characteristics of interest.
Meta-regression analyses
An alternative way to explore sources of between-study heterogeneity is to perform meta-regression analyses. By regressing age, gender, asthma severity, race, matched status, asthma diagnosis, sample size, and study design (Supplementary Table 2), no hints of significance were seen at a significance level of 5%.
Genotype-phenotype analyses
The relationship between ACE gene I/D polymorphism and circulating ACE concentrations is shown in Table 3. Relative to carriers with the II genotype, those with the DD genotype, ID genotype, and the combined DD and ID genotypes showed significantly higher concentrations of circulating ACE (WMD: 3.13, 2.07, and 2.83 U/L, respectively, p < 0.05 for all).
Mendelian randomization analyses
Given the significance observed in both genotypedisease and genotype-phenotype analyses, the Mendelian randomization technique was utilized to infer the possible causal association between circulating ACE and asthma risk. Under the assumptions of the Mendelian randomization technique and by use of ACE gene I/D polymorphism as an instrumental variable, one unit increment in circulating ACE concentrations was found to be significantly associated with a 1.14-fold increased risk of asthma (95% CI: 1.02 to 4.24) under the homozygous genotypic model.
Discussion
The aim of this meta-analysis was to test the association of ACE gene I/D polymorphism with asthma risk and circulating ACE changes as well as explore sources for heterogeneity in the English literature. Through a comprehensive pooling of 19 independent studies involving 2,888 cases and 9,549 controls, this polymorphism was associated with a significant risk of asthma and changes in circulating ACE concentrations. Importantly, further adoption of the Mendelian randomization technique revealed that genetically increased ACE concentrations were causally associated with an increased risk of asthma. To the best of our knowledge, this is thus far the first meta-analytical evidence concerning the causal relation between circulating ACE and asthma risk in the literature.
It is well-known that asthma is a chronic pulmonary disease characterized by intermittent and reversible airflow obstruction (29). The exact etiology of asthma currently remains elusive; however, there is convincing evidence that asthma is a highly inheritable disease (1,30). To shed light on the genetic profiles of asthma and seek reasons attributable to the inconsistency of previous individual studies, we, in this meta-analysis, aimed to test the association of ACE gene I/D polymorphism with asthma risk, as well as with circulating ACE concentrations. Our genotype-disease and genotypephenotype analyses showed that carriers of the mutant DD homozygote had a 50% increased risk of asthma and 3.13 U/L increased concentrations of circulating ACE relative to the wild II homozygote. Under the assumptions of the Mendelian randomization technique, it is expected that circulating ACE may be a causal risk factor for the development of asthma. The implication of circulating ACE in asthma is biologically plausible. There is evidence that ACE expressed in the lungs plays a key role in the pathogenesis of bronchial asthma. It is because ACE can mediate the proliferation of smooth vascular muscle cells (31), which affects aggregation and adhesion of platelets and monocytes and consequently leads to excessive bronchiectasis (32). The biological mechanism behind the causal implication of circulating ACE in asthma is not clear at present (33). It is reasonable to speculate that if involved, the mutation of I/D polymorphism can alter the expression of ACE in circulation or tissues, which triggers the development of asthma.
It is also worth noticing that according to our subsidiary analyses, differences in race, matched status, asthma diagnosis, sample size, and age might account for previously diverging findings of individual studies. Taking race as an example, we found that the susceptibility of ACE gene I/D polymorphism to asthma was race-dependent, with significance observed in Caucasians but not in East Asians, in agreement with the findings of previous studies (16,18,19,21,34). Indeed, asthma is a multifactorial disease to which genetic, environmental, and lifestyle-related factors contribute jointly (35). For feasibility reasons, it is recommended to construct a list of candidate genetic determinants for asthma in each racial group. Moreover, diagnostic criteria for asthma can also confound the association between ACE gene I/D polymorphism and asthma risk. In .
/fmed. . this meta-analysis, significance was observed in studies based on ATS and GINA criteria, whereas there was no observable significance in studies with self-reported asthma. To derive a reliable estimate, it is important to diagnose asthma formally. Differing from the observations of subsidiary analyses, we failed to reveal any statistical significance in meta-regression analyses, an alternative method to explore sources of betweenstudy heterogeneity. It is of practical importance to bear in mind that meta-regression analyses, albeit enabling covariates in either continuous or categorical format to be regressed, do not have the methodological rigor of a properly designed study that is intended to test the effect of these covariates formally (36). Another important finding is the obvious changes in circulating ACE between genotypes of ACE gene I/D polymorphism. Considering the fact that the I/D polymorphism is located in the 16th intron, it is unlikely to be functional at the transcription level. There is a possibility that this polymorphism is strongly linked to another functional locus in either the promoter or exon or 3 ′ -untranslated region of the ACE gene that is responsible for the regulation of circulating ACE concentrations. Further genomic and functional explorations of the ACE gene are encouraged.
Limitations
Several limitations should be acknowledged in this meta-analysis. First, this meta-analysis synthesized evidence from publications written in the English language, and selection bias cannot be excluded. Second, all included studies are cross-sectionally designed; however, causality was inferred by means of the Mendelian randomization technique. Third, only a few features were commonly provided by the majority of included studies, and it is expected that more features are needed to examine their potential confounding impact on between-study heterogeneity. Fourth, there was moderate evidence of publication bias, which might limit the generalizability of our findings.
Conclusion
Taken together, we, for the first time, provided systematic evidence supporting the causal implication of high circulating ACE in the development of asthma by means of the Mendelian randomization technique. Further experimental studies are needed to determine the culprit genetic loci in the ACE gene that can simultaneously regulate circulating ACE concentrations and precipitate the onset and progression of asthma.
Data availability statement
The original contributions presented in the study are included in the article/Supplementary material, further inquiries can be directed to the corresponding authors. |
<reponame>franderg/PULPino<gh_stars>10-100
/*
FreeRTOS V8.2.2 - Copyright (C) 2015 Real Time Engineers Ltd.
All rights reserved
VISIT http://www.FreeRTOS.org TO ENSURE YOU ARE USING THE LATEST VERSION.
This file is part of the FreeRTOS distribution.
FreeRTOS is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License (version 2) as published by the
Free Software Foundation >>!AND MODIFIED BY!<< the FreeRTOS exception.
***************************************************************************
>>! NOTE: The modification to the GPL is included to allow you to !<<
>>! distribute a combined work that includes FreeRTOS without being !<<
>>! obliged to provide the source code for proprietary components !<<
>>! outside of the FreeRTOS kernel. !<<
***************************************************************************
FreeRTOS is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. Full license text is available on the following
link: http://www.freertos.org/a00114.html
***************************************************************************
* *
* FreeRTOS provides completely free yet professionally developed, *
* robust, strictly quality controlled, supported, and cross *
* platform software that is more than just the market leader, it *
* is the industry's de facto standard. *
* *
* Help yourself get started quickly while simultaneously helping *
* to support the FreeRTOS project by purchasing a FreeRTOS *
* tutorial book, reference manual, or both: *
* http://www.FreeRTOS.org/Documentation *
* *
***************************************************************************
http://www.FreeRTOS.org/FAQHelp.html - Having a problem? Start by reading
the FAQ page "My application does not run, what could be wrong?". Have you
defined configASSERT()?
http://www.FreeRTOS.org/support - In return for receiving this top quality
embedded software for free we request you assist our global community by
participating in the support forum.
http://www.FreeRTOS.org/training - Investing in training allows your team to
be as productive as possible as early as possible. Now you can receive
FreeRTOS training directly from <NAME>, CEO of Real Time Engineers
Ltd, and the world's leading authority on the world's leading RTOS.
http://www.FreeRTOS.org/plus - A selection of FreeRTOS ecosystem products,
including FreeRTOS+Trace - an indispensable productivity tool, a DOS
compatible FAT file system, and our tiny thread aware UDP/IP stack.
http://www.FreeRTOS.org/labs - Where new FreeRTOS products go to incubate.
Come and try FreeRTOS+TCP, our new open source TCP/IP stack for FreeRTOS.
http://www.OpenRTOS.com - Real Time Engineers ltd. license FreeRTOS to High
Integrity Systems ltd. to sell under the OpenRTOS brand. Low cost OpenRTOS
licenses offer ticketed support, indemnification and commercial middleware.
http://www.SafeRTOS.com - High Integrity Systems also provide a safety
engineered and independently SIL3 certified version for use in safety and
mission critical applications that require provable dependability.
1 tab == 4 spaces!
*/
#ifndef DEPRECATED_DEFINITIONS_H
#define DEPRECATED_DEFINITIONS_H
/* Each FreeRTOS port has a unique portmacro.h header file. Originally a
pre-processor definition was used to ensure the pre-processor found the correct
portmacro.h file for the port being used. That scheme was deprecated in favour
of setting the compiler's include path such that it found the correct
portmacro.h file - removing the need for the constant and allowing the
portmacro.h file to be located anywhere in relation to the port being used. The
definitions below remain in the code for backward compatibility only. New
projects should not use them. */
#ifdef OPEN_WATCOM_INDUSTRIAL_PC_PORT
#include "..\..\Source\portable\owatcom\16bitdos\pc\portmacro.h"
typedef void ( __interrupt __far *pxISR )();
#endif
#ifdef OPEN_WATCOM_FLASH_LITE_186_PORT
#include "..\..\Source\portable\owatcom\16bitdos\flsh186\portmacro.h"
typedef void ( __interrupt __far *pxISR )();
#endif
#ifdef GCC_MEGA_AVR
#include "../portable/GCC/ATMega323/portmacro.h"
#endif
#ifdef IAR_MEGA_AVR
#include "../portable/IAR/ATMega323/portmacro.h"
#endif
#ifdef MPLAB_PIC24_PORT
#include "../../Source/portable/MPLAB/PIC24_dsPIC/portmacro.h"
#endif
#ifdef MPLAB_DSPIC_PORT
#include "../../Source/portable/MPLAB/PIC24_dsPIC/portmacro.h"
#endif
#ifdef MPLAB_PIC18F_PORT
#include "../../Source/portable/MPLAB/PIC18F/portmacro.h"
#endif
#ifdef MPLAB_PIC32MX_PORT
#include "../../Source/portable/MPLAB/PIC32MX/portmacro.h"
#endif
#ifdef _FEDPICC
#include "libFreeRTOS/Include/portmacro.h"
#endif
#ifdef SDCC_CYGNAL
#include "../../Source/portable/SDCC/Cygnal/portmacro.h"
#endif
#ifdef GCC_ARM7
#include "../../Source/portable/GCC/ARM7_LPC2000/portmacro.h"
#endif
#ifdef GCC_ARM7_ECLIPSE
#include "portmacro.h"
#endif
#ifdef ROWLEY_LPC23xx
#include "../../Source/portable/GCC/ARM7_LPC23xx/portmacro.h"
#endif
#ifdef IAR_MSP430
#include "..\..\Source\portable\IAR\MSP430\portmacro.h"
#endif
#ifdef GCC_MSP430
#include "../../Source/portable/GCC/MSP430F449/portmacro.h"
#endif
#ifdef ROWLEY_MSP430
#include "../../Source/portable/Rowley/MSP430F449/portmacro.h"
#endif
#ifdef ARM7_LPC21xx_KEIL_RVDS
#include "..\..\Source\portable\RVDS\ARM7_LPC21xx\portmacro.h"
#endif
#ifdef SAM7_GCC
#include "../../Source/portable/GCC/ARM7_AT91SAM7S/portmacro.h"
#endif
#ifdef SAM7_IAR
#include "..\..\Source\portable\IAR\AtmelSAM7S64\portmacro.h"
#endif
#ifdef SAM9XE_IAR
#include "..\..\Source\portable\IAR\AtmelSAM9XE\portmacro.h"
#endif
#ifdef LPC2000_IAR
#include "..\..\Source\portable\IAR\LPC2000\portmacro.h"
#endif
#ifdef STR71X_IAR
#include "..\..\Source\portable\IAR\STR71x\portmacro.h"
#endif
#ifdef STR75X_IAR
#include "..\..\Source\portable\IAR\STR75x\portmacro.h"
#endif
#ifdef STR75X_GCC
#include "..\..\Source\portable\GCC\STR75x\portmacro.h"
#endif
#ifdef STR91X_IAR
#include "..\..\Source\portable\IAR\STR91x\portmacro.h"
#endif
#ifdef GCC_H8S
#include "../../Source/portable/GCC/H8S2329/portmacro.h"
#endif
#ifdef GCC_AT91FR40008
#include "../../Source/portable/GCC/ARM7_AT91FR40008/portmacro.h"
#endif
#ifdef RVDS_ARMCM3_LM3S102
#include "../../Source/portable/RVDS/ARM_CM3/portmacro.h"
#endif
#ifdef GCC_ARMCM3_LM3S102
#include "../../Source/portable/GCC/ARM_CM3/portmacro.h"
#endif
#ifdef GCC_ARMCM3
#include "../../Source/portable/GCC/ARM_CM3/portmacro.h"
#endif
#ifdef IAR_ARM_CM3
#include "../../Source/portable/IAR/ARM_CM3/portmacro.h"
#endif
#ifdef IAR_ARMCM3_LM
#include "../../Source/portable/IAR/ARM_CM3/portmacro.h"
#endif
#ifdef HCS12_CODE_WARRIOR
#include "../../Source/portable/CodeWarrior/HCS12/portmacro.h"
#endif
#ifdef MICROBLAZE_GCC
#include "../../Source/portable/GCC/MicroBlaze/portmacro.h"
#endif
#ifdef TERN_EE
#include "..\..\Source\portable\Paradigm\Tern_EE\small\portmacro.h"
#endif
#ifdef GCC_HCS12
#include "../../Source/portable/GCC/HCS12/portmacro.h"
#endif
#ifdef GCC_MCF5235
#include "../../Source/portable/GCC/MCF5235/portmacro.h"
#endif
#ifdef COLDFIRE_V2_GCC
#include "../../../Source/portable/GCC/ColdFire_V2/portmacro.h"
#endif
#ifdef COLDFIRE_V2_CODEWARRIOR
#include "../../Source/portable/CodeWarrior/ColdFire_V2/portmacro.h"
#endif
#ifdef GCC_PPC405
#include "../../Source/portable/GCC/PPC405_Xilinx/portmacro.h"
#endif
#ifdef GCC_PPC440
#include "../../Source/portable/GCC/PPC440_Xilinx/portmacro.h"
#endif
#ifdef _16FX_SOFTUNE
#include "..\..\Source\portable\Softune\MB96340\portmacro.h"
#endif
#ifdef BCC_INDUSTRIAL_PC_PORT
/* A short file name has to be used in place of the normal
FreeRTOSConfig.h when using the Borland compiler. */
#include "frconfig.h"
#include "..\portable\BCC\16BitDOS\PC\prtmacro.h"
typedef void ( __interrupt __far *pxISR )();
#endif
#ifdef BCC_FLASH_LITE_186_PORT
/* A short file name has to be used in place of the normal
FreeRTOSConfig.h when using the Borland compiler. */
#include "frconfig.h"
#include "..\portable\BCC\16BitDOS\flsh186\prtmacro.h"
typedef void ( __interrupt __far *pxISR )();
#endif
#ifdef __GNUC__
#ifdef __AVR32_AVR32A__
#include "portmacro.h"
#endif
#endif
#ifdef __ICCAVR32__
#ifdef __CORE__
#if __CORE__ == __AVR32A__
#include "portmacro.h"
#endif
#endif
#endif
#ifdef __91467D
#include "portmacro.h"
#endif
#ifdef __96340
#include "portmacro.h"
#endif
#ifdef __IAR_V850ES_Fx3__
#include "../../Source/portable/IAR/V850ES/portmacro.h"
#endif
#ifdef __IAR_V850ES_Jx3__
#include "../../Source/portable/IAR/V850ES/portmacro.h"
#endif
#ifdef __IAR_V850ES_Jx3_L__
#include "../../Source/portable/IAR/V850ES/portmacro.h"
#endif
#ifdef __IAR_V850ES_Jx2__
#include "../../Source/portable/IAR/V850ES/portmacro.h"
#endif
#ifdef __IAR_V850ES_Hx2__
#include "../../Source/portable/IAR/V850ES/portmacro.h"
#endif
#ifdef __IAR_78K0R_Kx3__
#include "../../Source/portable/IAR/78K0R/portmacro.h"
#endif
#ifdef __IAR_78K0R_Kx3L__
#include "../../Source/portable/IAR/78K0R/portmacro.h"
#endif
#endif /* DEPRECATED_DEFINITIONS_H */
|
Dynamic bloodflow studies of space-occupying lesions in the liver.
Blood flow to space-occupying lesions in the liver was studied by scintillation camera in a series of 170 patients after injection of 113m In-colloid. The curves obtained show a biphasic curve over the normal liver, absence of portal flow over a malignant lesion, and avascularity over benign lesions such as cysts or abscesses. The usefulness and limitations of this technique in distinguishing between a benign and malignant lesion in the liver are discussed. |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012 Team-XBMC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# This script is based on service.skin.widgets
# Thanks to the original authors
import xbmc
import xbmcgui
import xbmcaddon
import datetime
from resources.lib import library
LIBRARY = library.LibraryFunctions()
ADDON = xbmcaddon.Addon()
ADDON_VERSION = ADDON.getAddonInfo('version')
ADDON_NAME = ADDON.getAddonInfo('name')
def log(txt):
message = '%s: %s' % (ADDON_NAME, txt.encode('ascii', 'ignore'))
xbmc.log(msg=message, level=xbmc.LOGDEBUG)
class Main:
def __init__(self):
self.WINDOW = xbmcgui.Window(10000)
# clear our property, if another instance is already running
# it should stop now
self._init_vars()
self.WINDOW.clearProperty('LibraryDataProvider_Running')
a_total = datetime.datetime.now()
self._fetch_random()
self._fetch_recent()
self._fetch_recommended()
self._fetch_favourite()
b_total = datetime.datetime.now()
c_total = b_total - a_total
log('Total time needed for all queries: %s' % c_total)
# give a possible other instance some time to notice the empty property
self.WINDOW.setProperty('LibraryDataProvider_Running', 'true')
self._daemon()
def _init_vars(self):
self.WINDOW = xbmcgui.Window(10000)
self.Player = Widgets_Player(action=self._update)
self.Monitor = Widgets_Monitor(update_listitems=self._update)
def _fetch_random(self):
LIBRARY._fetch_random_movies()
LIBRARY._fetch_random_episodes()
LIBRARY._fetch_random_songs()
LIBRARY._fetch_random_albums()
LIBRARY._fetch_random_musicvideos()
def _fetch_recent(self):
LIBRARY._fetch_recent_movies()
LIBRARY._fetch_recent_episodes()
LIBRARY._fetch_recent_albums()
LIBRARY._fetch_recent_musicvideos()
def _fetch_recommended(self):
LIBRARY._fetch_recommended_movies()
LIBRARY._fetch_recommended_episodes()
LIBRARY._fetch_recommended_albums()
def _fetch_favourite(self):
LIBRARY._fetch_favourite_episodes()
def _daemon(self):
# deamon is meant to keep script running at all time
count = 0
while not self.Monitor.abortRequested() and self.WINDOW.getProperty('LibraryDataProvider_Running') == 'true':
if self.Monitor.waitForAbort(1):
# Abort was requested while waiting. We should exit
self.Monitor.update_listitems = None
self.Player.action = None
break
if not xbmc.Player().isPlayingVideo():
# Update random items
count += 1
if count == 1200: # 10 minutes
self._fetch_random()
count = 0 # reset counter
def _update(self, type):
xbmc.sleep(1000)
if type == 'movie':
LIBRARY._fetch_recommended_movies()
LIBRARY._fetch_recent_movies()
elif type == 'episode':
LIBRARY._fetch_recommended_episodes()
LIBRARY._fetch_recent_episodes()
LIBRARY._fetch_favourite_episodes()
elif type == 'video':
# only on db update
LIBRARY._fetch_recommended_movies()
LIBRARY._fetch_recommended_episodes()
LIBRARY._fetch_recent_movies()
LIBRARY._fetch_recent_episodes()
elif type == 'music':
LIBRARY._fetch_recommended_albums()
LIBRARY._fetch_recent_albums()
elif type == 'musicvideo':
LIBRARY._fetch_recent_musicvideos()
class Widgets_Monitor(xbmc.Monitor):
def __init__(self, *args, **kwargs):
xbmc.Monitor.__init__(self)
self.update_listitems = kwargs['update_listitems']
def onDatabaseUpdated(self, database):
self.update_listitems(database)
class Widgets_Player(xbmc.Player):
def __init__(self, *args, **kwargs):
xbmc.Player.__init__(self)
self.type = ""
self.action = kwargs["action"]
self.substrings = ['-trailer', 'http://']
def onPlayBackStarted(self):
xbmc.sleep(1000)
# Set values based on the file content
if (self.isPlayingAudio()):
self.type = "music"
else:
if xbmc.getCondVisibility('VideoPlayer.Content(movies)'):
filename = ''
isMovie = True
try:
filename = self.getPlayingFile()
except:
pass
if filename != '':
for string in self.substrings:
if string in filename:
isMovie = False
break
if isMovie:
self.type = "movie"
elif xbmc.getCondVisibility('VideoPlayer.Content(episodes)'):
# Check for tv show title and season
# to make sure it's really an episode
title = xbmc.getInfoLabel('VideoPlayer.TVShowTitle')
season = xbmc.getInfoLabel('VideoPlayer.Season')
if title and season:
self.type = "episode"
elif xbmc.getCondVisibility('VideoPlayer.Content(musicvideos)'):
self.type = "musicvideo"
def onPlayBackEnded(self):
self.onPlayBackStopped()
def onPlayBackStopped(self):
# type is set in onPlayBackStarted
if self.type:
self.action(self.type)
self.type = ""
log('service version %s started' % ADDON_VERSION)
Main()
log('service version %s stopped' % ADDON_VERSION)
|
package hdfs
import (
"errors"
"github.com/argoproj/argo-events/common"
gwcommon "github.com/argoproj/argo-events/gateways/common"
"github.com/ghodss/yaml"
corev1 "k8s.io/api/core/v1"
"k8s.io/client-go/kubernetes"
)
// EventSourceExecutor implements Eventing
type EventSourceExecutor struct {
Log *common.ArgoEventsLogger
// Clientset is kubernetes client
Clientset kubernetes.Interface
// Namespace where gateway is deployed
Namespace string
}
// GatewayConfig contains information to setup a HDFS integration
type GatewayConfig struct {
gwcommon.WatchPathConfig `json:",inline"`
// Type of file operations to watch
Type string `json:"type"`
// CheckInterval is a string that describes an interval duration to check the directory state, e.g. 1s, 30m, 2h... (defaults to 1m)
CheckInterval string `json:"checkInterval,omitempty"`
GatewayClientConfig `json:",inline"`
}
// GatewayClientConfig contains HDFS client configurations
type GatewayClientConfig struct {
// Addresses is accessible addresses of HDFS name nodes
Addresses []string `json:"addresses"`
// HDFSUser is the user to access HDFS file system.
// It is ignored if either ccache or keytab is used.
HDFSUser string `json:"hdfsUser,omitempty"`
// KrbCCacheSecret is the secret selector for Kerberos ccache
// Either ccache or keytab can be set to use Kerberos.
KrbCCacheSecret *corev1.SecretKeySelector `json:"krbCCacheSecret,omitempty"`
// KrbKeytabSecret is the secret selector for Kerberos keytab
// Either ccache or keytab can be set to use Kerberos.
KrbKeytabSecret *corev1.SecretKeySelector `json:"krbKeytabSecret,omitempty"`
// KrbUsername is the Kerberos username used with Kerberos keytab
// It must be set if keytab is used.
KrbUsername string `json:"krbUsername,omitempty"`
// KrbRealm is the Kerberos realm used with Kerberos keytab
// It must be set if keytab is used.
KrbRealm string `json:"krbRealm,omitempty"`
// KrbConfig is the configmap selector for Kerberos config as string
// It must be set if either ccache or keytab is used.
KrbConfigConfigMap *corev1.ConfigMapKeySelector `json:"krbConfigConfigMap,omitempty"`
// KrbServicePrincipalName is the principal name of Kerberos service
// It must be set if either ccache or keytab is used.
KrbServicePrincipalName string `json:"krbServicePrincipalName,omitempty"`
}
func parseEventSource(eventSource string) (interface{}, error) {
var f *GatewayConfig
err := yaml.Unmarshal([]byte(eventSource), &f)
if err != nil {
return nil, err
}
return f, err
}
// Validate validates GatewayClientConfig
func (c *GatewayClientConfig) Validate() error {
if len(c.Addresses) == 0 {
return errors.New("addresses is required")
}
hasKrbCCache := c.KrbCCacheSecret != nil
hasKrbKeytab := c.KrbKeytabSecret != nil
if c.HDFSUser == "" && !hasKrbCCache && !hasKrbKeytab {
return errors.New("either hdfsUser, krbCCacheSecret or krbKeytabSecret is required")
}
if hasKrbKeytab && (c.KrbServicePrincipalName == "" || c.KrbConfigConfigMap == nil || c.KrbUsername == "" || c.KrbRealm == "") {
return errors.New("krbServicePrincipalName, krbConfigConfigMap, krbUsername and krbRealm are required with krbKeytabSecret")
}
if hasKrbCCache && (c.KrbServicePrincipalName == "" || c.KrbConfigConfigMap == nil) {
return errors.New("krbServicePrincipalName and krbConfigConfigMap are required with krbCCacheSecret")
}
return nil
}
|
/**
* Check if this DIDDocument is expired.
*
* @return true if expired, false otherwise
*/
public boolean isExpired() {
Calendar now = Calendar.getInstance(Constants.UTC);
Calendar expireDate = Calendar.getInstance(Constants.UTC);
expireDate.setTime(expires);
return now.after(expireDate);
} |
/**
* This class is designed to compute one kind
* of food that is different from any kind of
* food in "last five meals", according to the
* probability of every level of foods.
*
* @author Shuolin Yang
*
*/
public class GetFood {
public GetFood(FoodsToChoose foodsInput) {
foods = foodsInput;
}
public String GetAKindOfFood(String[] lastFiveMeals) {
// Get a random number as probability.
double random = Math.random();
// According to probability, find foods of a level.
double point1 = foods.GetProLevel1();
double point2 = foods.GetProLevel1()+ foods.GetProLevel2();
String[] target;
if (random > 0.0 && random < point1) {
target = foods.GetFoodsLevel1();
} else if (random > point1 && random < point2) {
target = foods.GetFoodsLevel2();
} else {
target = foods.GetFoodsLevel3();
}
// Pick a kind of food which is not the same as any kind
// of food in "last five meals".
int monitor = 0;
String tempFood = null;
do {
// To avoid looping forever.
if (monitor == 2 * target.length) {
return new String("OUT OF BOUND");
}
// Get a random kind of food.
int index = (int)(Math.random() * (double)target.length );
tempFood = target[index];
monitor++;
} while (IfStringArrayHas(tempFood, lastFiveMeals));
// If has a kind of food which is not in "last five meals".
return tempFood;
}
private boolean IfStringArrayHas(String target, String[] array) {
int count;
for (count = 0; count < array.length; count++) {
if (array[count].equals(target)) {
break;
}
}
if (count == array.length) {
return false;
} else {
return true;
}
}
private FoodsToChoose foods;
} |
//
// main.cpp
// 116A
//
// Created by Parsa Alimohammadi on 3/23/1399 AP.
// Copyright © 1399 Parsa Alimohammadi. All rights reserved.
//
#include <iostream>
using namespace std;
int main()
{
int n,sum=0,maxs=0;
cin >> n;
for (int a,b; n>0; n--)
{
cin >> a >> b;
sum += b-a;
maxs = max(maxs,sum);
}
cout << maxs;
return 0;
}
|
Structural stability and non-catalytic nucleation inhibition effect of Si–Zr–B mould coating on undercooled superalloy melt
Abstract The investment moulding technique was first adopted to prepare a SiO2–ZrO2–B2O3 (Si–Zr–B) substrate layer on the inner surface of the mould, by employing SiO2 glass dust and ZrO2 powder, SiO2–ZrO2 sol, and analytical grade H3BO3 as refractory material, binder, and softening agent, respectively. Then using sol–gel processing, seven layers of Si–Zr–B film of the same formula as the aforementioned Si–Zr–B substrate layer were compounded with the substrate layer step by step. After glassing treatment at 850°C for 60 min, this film transformed into a glass lined coating. It was shown from X-ray diffraction analysis that, after holding it at a temperature of 1500°C for 30 min, the amount of crystallinity in the Si–Zr–B coating was about 1–3% (vol.-%). Finally, the undercooling experiment showed that a large undercooling (up to 140 K) was achieved in a DD3 (Ni–Cr–Mo–Al–Ti–Co–W) single crystal superalloy melt in this coated mould. So it is concluded that a Si–Zr–B coating has got a good structural stability at high temperature and provides ideal non-catalytic nucleation inhibition for an undercooled superalloy. |
def openapi_types():
return {
'object_id': (str,),
'display_name': (str,),
'street_side': (str,),
'business_name': (str,),
'address_line1': (str,),
'address_line2': (str,),
'address_line3': (str,),
'city': (str,),
'state_province': (str,),
'county': (str,),
'postal_code': (str,),
'latitude': (str,),
'longitude': (str,),
'status': (str,),
'urbanization_name': (str,),
'formatted_address': (str,),
'main_address_line': (str,),
'address_last_line': (str,),
'place_name': (str,),
'area_name1': (str,),
'area_name2': (str,),
'area_name3': (str,),
'area_name4': (str,),
'post_code': (str,),
'post_code_ext': (str,),
'country': (str,),
'address_number': (str,),
'street_name': (str,),
'unit_type': (str,),
'unit_value': (str,),
} |
/**
* Configuration properties relating to the sns service for gatekeeper
*/
@Component
@ConfigurationProperties(prefix="gatekeeper")
public class GatekeeperSnsProperties {
private SnsProperties sns;
public SnsProperties getSns() { return sns; }
public GatekeeperSnsProperties setSns(SnsProperties sns) {
this.sns = sns;
return this;
}
public static class SnsProperties {
private int retryCount = -1;
private int retryIntervalMillis = -1;
private int retryIntervalMultiplier = -1;
private String topicARN;
private String approvalTopicARN;
public String getApprovalTopicARN() { return approvalTopicARN; }
public SnsProperties setApprovalTopicARN(String approvalTopicARN) {
this.approvalTopicARN = approvalTopicARN;
return this;
}
public String getTopicARN() {
return topicARN;
}
public SnsProperties setTopicARN(String topicARN) {
this.topicARN = topicARN;
return this;
}
public int getRetryCount() {
return retryCount;
}
public int getRetryIntervalMillis() {
return retryIntervalMillis;
}
public SnsProperties setRetryIntervalMillis(int retryIntervalMillis) {
this.retryIntervalMillis = retryIntervalMillis;
return this;
}
public int getRetryIntervalMultiplier() {
return retryIntervalMultiplier;
}
public SnsProperties setRetryIntervalMultiplier(int retryIntervalMultiplier) {
this.retryIntervalMultiplier = retryIntervalMultiplier;
return this;
}
public SnsProperties setRetryCount(int retryCount) {
this.retryCount = retryCount;
return this;
}
}
} |
/**
* Obtains an instance from the specified currency pair.
* <p>
* If a currency pair does not have an implementation, an FX index will be created.
*
* @param currencyPair the currency pair
* @return the index
*/
public static FxIndex of(CurrencyPair currencyPair) {
ArgChecker.notNull(currencyPair, "currencyPair");
return extendedEnum().lookupAll().values().stream()
.filter(index -> index.getCurrencyPair().equals(currencyPair))
.min(Comparator.comparing(FxIndex::getName))
.orElseGet(() -> createFxIndex(currencyPair));
} |
Polymorphisms in the IL‐1 gene cluster influence systemic inflammation in patients at risk for acute‐on‐chronic liver failure
Acute‐on‐chronic liver failure (ACLF) in cirrhosis is an increasingly recognized syndrome characterized by acute decompensation, organ failure(s) and high short‐term mortality. Recent findings suggest that an overexuberant systemic inflammation plays a primary role in ACLF progression. In this study, we examined whether genetic factors shape systemic immune responses in patients with decompensated cirrhosis. Six single‐nucleotide polymorphisms (SNPs) in inflammation‐related genes (interleukin ‐1 beta , rs1143623; IL‐1 receptor antagonist , rs4251961; IL‐10, rs1800871; suppressor of cytokine signaling‐3, rs4969170; nucleotide‐binding oligomerization domain‐containing protein 2, rs3135500; and chemerin chemokine‐like receptor 1, rs1878022) were genotyped in 279 patients with cirrhosis with (n = 178) and without (n = 101) ACLF from the CANONIC study of the CLIF consortium. Among these SNPs, we identified two polymorphisms belonging to the IL‐1 gene cluster (IL‐1β and IL‐1ra) in strong association with ACLF. Both SNPs were protective against ACLF; IL‐1β (odds ratio , 0.34, 95% confidence interval , 0.13‐0.89; P < 0.05) and IL‐1ra (OR, 0.58; 95% CI, 0.35‐0.95; P < 0.05) under the recessive and overdominant inheritance models, respectively. These protective SNPs translated into reduced circulating levels of IL‐1β, IL‐1α, IL‐6, granulocyte‐colony stimulating factor, granulocyte‐macrophage colony‐stimulating factor, and C‐reactive protein at enrollment as well as after 7‐14 days of admission. These findings were confirmed in vitro in leukocytes incubated with plasma from patients with decompensated cirrhosis carrying the protective SNP genotypes. Notably, a higher frequency of the protective genotypes was observed in patients without (80%) than in those with (20%) ACLF. Consistently, patients carrying the combined protective genotypes showed a lower 28‐day mortality rate. Conclusion: These data identify two common functional polymorphisms in the IL‐1 gene cluster, which are associated with the inflammatory process related to development of ACLF. (Hepatology 2017;65:202‐216). |
<filename>Pythonteste/test/calculadoradeporcentagem.py
valor = float(input('Digite o Valor que quer aplicar o desconto: R$'))
desci = float(input('Digite o desconto que quer aplicar: '))
descf = desci / 100
final = valor - (descf * valor)
print('O valor R${} com {}% de desconto será R${:.2f}'.format(valor, desci, final))
|
/**
* Test that a chain can be created for an account other than the HD account 0 of the BIP32 spec.
* In this test a chain pointing to account 44 is created and some addresses tested.
* This is a BIP44/ Trezor compatible chain. See https://github.com/bitcoin/bips/blob/master/bip-0044.mediawiki
*
* In this test the private master key is available
*
* @throws UnreadableWalletException
*/
@Test
public void trezorAccountChainUsingPrivateMasterKey() throws UnreadableWalletException {
DeterministicSeed seed = new DeterministicSeed(TREZOR_SEED_PHRASE, null, "", secs);
DeterministicKey privateMasterKey = HDKeyDerivation.createMasterPrivateKey(seed.getSeedBytes());
log.debug("privateMasterKey = " + privateMasterKey);
DeterministicKey key_m_44h = HDKeyDerivation.deriveChildKey(privateMasterKey, new ChildNumber(44 | ChildNumber.HARDENED_BIT));
log.debug("key_m_44h deterministic key = " + key_m_44h);
DeterministicKey key_m_44h_0h = HDKeyDerivation.deriveChildKey(key_m_44h, ChildNumber.ZERO_HARDENED);
log.debug("key_m_44h_0h deterministic key = " + key_m_44h_0h);
DeterministicHierarchy deterministicHierarchy = new DeterministicHierarchy(key_m_44h_0h);
DeterministicKey key_m_44h_0h_0h = deterministicHierarchy.deriveChild(key_m_44h_0h.getPath(), false, false, new ChildNumber(0, true));
log.debug("key_m_44h_0h_0h = " + key_m_44h_0h_0h);
ImmutableList<ChildNumber> key_m_44h_0h_0h_path = key_m_44h_0h_0h.getPath();
log.debug("key_m_44h_0h_0h_path = " + key_m_44h_0h_0h_path);
DeterministicKeyChain accountChain = new DeterministicKeyChain(seed, key_m_44h_0h_0h_path);
log.debug("accountChain = " + accountChain);
assertNotNull(accountChain.getSeed());
assertEquals(secs, accountChain.getSeed().getCreationTimeSeconds());
checkAccountChain(accountChain);
} |
/**
* Unit tests for {@link AnnotatedControllerConfigurer}, focusing on detection
* and mapping of handler methods to schema fields.
*
* @author Rossen Stoyanchev
*/
@SuppressWarnings({"rawtypes", "unused"})
public class BatchMappingDetectionTests {
private final BatchLoaderRegistry batchLoaderRegistry = new DefaultBatchLoaderRegistry();
@Test
void registerWithDefaultCoordinates() {
Map<String, Map<String, DataFetcher>> dataFetcherMap =
initRuntimeWiringBuilder(BookController.class).build().getDataFetchers();
assertThat(dataFetcherMap).containsOnlyKeys("Book");
assertThat(dataFetcherMap.get("Book")).containsOnlyKeys(
"authorFlux", "authorList", "authorMonoMap", "authorMap", "authorEnvironment");
DataLoaderRegistry registry = new DataLoaderRegistry();
this.batchLoaderRegistry.registerDataLoaders(registry, GraphQLContext.newContext().build());
assertThat(registry.getDataLoadersMap()).containsOnlyKeys(
"Book.authorFlux", "Book.authorList", "Book.authorMonoMap", "Book.authorMap", "Book.authorEnvironment");
}
@Test
void invalidReturnType() {
assertThatThrownBy(() -> initRuntimeWiringBuilder(InvalidReturnTypeController.class).build())
.hasMessageStartingWith("@BatchMapping method is expected to return");
}
@Test
void schemaAndBatch() {
assertThatThrownBy(() -> initRuntimeWiringBuilder(SchemaAndBatchMappingController.class).build())
.hasMessageStartingWith("Expected either @BatchMapping or @SchemaMapping, not both");
}
private RuntimeWiring.Builder initRuntimeWiringBuilder(Class<?> handlerType) {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.registerBean(handlerType);
context.registerBean(BatchLoaderRegistry.class, () -> this.batchLoaderRegistry);
context.refresh();
AnnotatedControllerConfigurer configurer = new AnnotatedControllerConfigurer();
configurer.setApplicationContext(context);
configurer.afterPropertiesSet();
RuntimeWiring.Builder wiringBuilder = RuntimeWiring.newRuntimeWiring();
configurer.configure(wiringBuilder);
return wiringBuilder;
}
@Controller
@SuppressWarnings({"ConstantConditions", "unused"})
private static class BookController {
@BatchMapping
public Flux<Author> authorFlux(List<Book> books) {
return null;
}
@BatchMapping
public List<Author> authorList(List<Book> books) {
return null;
}
@BatchMapping
public Mono<Map<Book, Author>> authorMonoMap(List<Book> books) {
return null;
}
@BatchMapping
public Map<Book, Author> authorMap(List<Book> books) {
return null;
}
@BatchMapping
public List<Author> authorEnvironment(BatchLoaderEnvironment environment, List<Book> books) {
return null;
}
}
@Controller
private static class InvalidReturnTypeController {
@BatchMapping
public void authors(List<Book> books) {
}
}
@Controller
private static class SchemaAndBatchMappingController {
@BatchMapping
@SchemaMapping
public void authors(List<Book> books) {
}
}
} |
/**
* Test cube dim.
*
* @throws Exception the exception
*/
@Test(dependsOnGroups = "first")
public void testCubeDim() throws Exception {
CubeMetastoreClient client = CubeMetastoreClient.getInstance(conf);
String dimTblName = "ziptableMeta";
List<FieldSchema> dimColumns = new ArrayList<FieldSchema>();
dimColumns.add(new FieldSchema("zipcode", "int", "code"));
dimColumns.add(new FieldSchema("f1", "string", "field1"));
dimColumns.add(new FieldSchema("f2", "string", "field2"));
dimColumns.add(new FieldSchema("stateid", "int", "state id"));
dimColumns.add(new FieldSchema("statei2", "int", "state id"));
Map<String, UpdatePeriod> dumpPeriods = new HashMap<String, UpdatePeriod>();
StorageTableDesc s1 = new StorageTableDesc();
s1.setStorageHandler(DBStorageHandler.class.getCanonicalName());
s1.setExternal(true);
dumpPeriods.put(db1.getName(), null);
dumpPeriods.put(db2.getName(), null);
Map<String, StorageTableDesc> storageTables = new HashMap<String, StorageTableDesc>();
storageTables.put(db1.getName(), s1);
storageTables.put(db2.getName(), s1);
client.createCubeDimensionTable("zipdim", dimTblName, dimColumns, 0L, dumpPeriods, null, storageTables);
Assert.assertTrue(client.tableExists(dimTblName));
for (String storage : storageTables.keySet()) {
String storageTableName = MetastoreUtil.getFactOrDimtableStorageTableName(dimTblName, storage);
Assert.assertTrue(client.tableExists(storageTableName));
}
} |
Joseph Niemeyer (Photo: Provided/Kenton County Detention Center)
KENTON COUNTY, KY (FOX19) – A former youth pastor and school volunteer will spend at least 17 years behind bars after admitting guilt on charges of sexual abuse and sodomy of a minor.
Joseph Niemeyer, 56, worked with youth at the Banklick Baptist Church in Walton until he was arrested in February 2016. He also volunteered at Twenhofel Middle School.
On Monday, Niemeyer pleaded guilty to four counts of first-degree sexual abuse and one count first-degree sodomy, all against a girl younger than 12.
Under the plea agreement, Niemeyer will have to register as a sex offender for the rest of his life. He could spend up to 20 years in jail and must serve 17 years before being parole eligible, according to Kenton County Prosecutor Rob Sanders.
Niemeyer and his wife worked as youth pastors at the church, according to Tim Cochran, pastor at New Banklick.
"This is a pretty big deal. It's like a kick in the gut,” Cochran told FOX19 NOW in 2016. “He was my friend. I'm shocked really. Never in a million years would I have guessed anything like this."
Sanders said Niemeyer will appear in court again for a sentencing hearing.
Copyright 2017 WXIX. All rights reserved.
Read or Share this story: http://cin.ci/2iCsrV4 |
<gh_stars>0
// Copyright (c) 2004-present Facebook All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Code generated by entc, DO NOT EDIT.
package ent
import (
"context"
"fmt"
"github.com/facebook/ent/dialect/sql"
"github.com/facebook/ent/dialect/sql/sqlgraph"
"github.com/facebook/ent/schema/field"
"github.com/facebookincubator/symphony/pkg/ent/comparator"
"github.com/facebookincubator/symphony/pkg/ent/predicate"
"github.com/facebookincubator/symphony/pkg/ent/rule"
"github.com/facebookincubator/symphony/pkg/ent/rulelimit"
)
// RuleLimitUpdate is the builder for updating RuleLimit entities.
type RuleLimitUpdate struct {
config
hooks []Hook
mutation *RuleLimitMutation
}
// Where adds a new predicate for the builder.
func (rlu *RuleLimitUpdate) Where(ps ...predicate.RuleLimit) *RuleLimitUpdate {
rlu.mutation.predicates = append(rlu.mutation.predicates, ps...)
return rlu
}
// SetNumber sets the number field.
func (rlu *RuleLimitUpdate) SetNumber(i int) *RuleLimitUpdate {
rlu.mutation.ResetNumber()
rlu.mutation.SetNumber(i)
return rlu
}
// AddNumber adds i to number.
func (rlu *RuleLimitUpdate) AddNumber(i int) *RuleLimitUpdate {
rlu.mutation.AddNumber(i)
return rlu
}
// SetLimitType sets the limitType field.
func (rlu *RuleLimitUpdate) SetLimitType(s string) *RuleLimitUpdate {
rlu.mutation.SetLimitType(s)
return rlu
}
// SetComparatorID sets the comparator edge to Comparator by id.
func (rlu *RuleLimitUpdate) SetComparatorID(id int) *RuleLimitUpdate {
rlu.mutation.SetComparatorID(id)
return rlu
}
// SetNillableComparatorID sets the comparator edge to Comparator by id if the given value is not nil.
func (rlu *RuleLimitUpdate) SetNillableComparatorID(id *int) *RuleLimitUpdate {
if id != nil {
rlu = rlu.SetComparatorID(*id)
}
return rlu
}
// SetComparator sets the comparator edge to Comparator.
func (rlu *RuleLimitUpdate) SetComparator(c *Comparator) *RuleLimitUpdate {
return rlu.SetComparatorID(c.ID)
}
// SetRuleID sets the rule edge to Rule by id.
func (rlu *RuleLimitUpdate) SetRuleID(id int) *RuleLimitUpdate {
rlu.mutation.SetRuleID(id)
return rlu
}
// SetNillableRuleID sets the rule edge to Rule by id if the given value is not nil.
func (rlu *RuleLimitUpdate) SetNillableRuleID(id *int) *RuleLimitUpdate {
if id != nil {
rlu = rlu.SetRuleID(*id)
}
return rlu
}
// SetRule sets the rule edge to Rule.
func (rlu *RuleLimitUpdate) SetRule(r *Rule) *RuleLimitUpdate {
return rlu.SetRuleID(r.ID)
}
// Mutation returns the RuleLimitMutation object of the builder.
func (rlu *RuleLimitUpdate) Mutation() *RuleLimitMutation {
return rlu.mutation
}
// ClearComparator clears the "comparator" edge to type Comparator.
func (rlu *RuleLimitUpdate) ClearComparator() *RuleLimitUpdate {
rlu.mutation.ClearComparator()
return rlu
}
// ClearRule clears the "rule" edge to type Rule.
func (rlu *RuleLimitUpdate) ClearRule() *RuleLimitUpdate {
rlu.mutation.ClearRule()
return rlu
}
// Save executes the query and returns the number of nodes affected by the update operation.
func (rlu *RuleLimitUpdate) Save(ctx context.Context) (int, error) {
var (
err error
affected int
)
rlu.defaults()
if len(rlu.hooks) == 0 {
if err = rlu.check(); err != nil {
return 0, err
}
affected, err = rlu.sqlSave(ctx)
} else {
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
mutation, ok := m.(*RuleLimitMutation)
if !ok {
return nil, fmt.Errorf("unexpected mutation type %T", m)
}
if err = rlu.check(); err != nil {
return 0, err
}
rlu.mutation = mutation
affected, err = rlu.sqlSave(ctx)
mutation.done = true
return affected, err
})
for i := len(rlu.hooks) - 1; i >= 0; i-- {
mut = rlu.hooks[i](mut)
}
if _, err := mut.Mutate(ctx, rlu.mutation); err != nil {
return 0, err
}
}
return affected, err
}
// SaveX is like Save, but panics if an error occurs.
func (rlu *RuleLimitUpdate) SaveX(ctx context.Context) int {
affected, err := rlu.Save(ctx)
if err != nil {
panic(err)
}
return affected
}
// Exec executes the query.
func (rlu *RuleLimitUpdate) Exec(ctx context.Context) error {
_, err := rlu.Save(ctx)
return err
}
// ExecX is like Exec, but panics if an error occurs.
func (rlu *RuleLimitUpdate) ExecX(ctx context.Context) {
if err := rlu.Exec(ctx); err != nil {
panic(err)
}
}
// defaults sets the default values of the builder before save.
func (rlu *RuleLimitUpdate) defaults() {
if _, ok := rlu.mutation.UpdateTime(); !ok {
v := rulelimit.UpdateDefaultUpdateTime()
rlu.mutation.SetUpdateTime(v)
}
}
// check runs all checks and user-defined validators on the builder.
func (rlu *RuleLimitUpdate) check() error {
if v, ok := rlu.mutation.LimitType(); ok {
if err := rulelimit.LimitTypeValidator(v); err != nil {
return &ValidationError{Name: "limitType", err: fmt.Errorf("ent: validator failed for field \"limitType\": %w", err)}
}
}
return nil
}
func (rlu *RuleLimitUpdate) sqlSave(ctx context.Context) (n int, err error) {
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: rulelimit.Table,
Columns: rulelimit.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeInt,
Column: rulelimit.FieldID,
},
},
}
if ps := rlu.mutation.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
ps[i](selector)
}
}
}
if value, ok := rlu.mutation.UpdateTime(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeTime,
Value: value,
Column: rulelimit.FieldUpdateTime,
})
}
if value, ok := rlu.mutation.Number(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeInt,
Value: value,
Column: rulelimit.FieldNumber,
})
}
if value, ok := rlu.mutation.AddedNumber(); ok {
_spec.Fields.Add = append(_spec.Fields.Add, &sqlgraph.FieldSpec{
Type: field.TypeInt,
Value: value,
Column: rulelimit.FieldNumber,
})
}
if value, ok := rlu.mutation.LimitType(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeString,
Value: value,
Column: rulelimit.FieldLimitType,
})
}
if rlu.mutation.ComparatorCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
Inverse: true,
Table: rulelimit.ComparatorTable,
Columns: []string{rulelimit.ComparatorColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeInt,
Column: comparator.FieldID,
},
},
}
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
}
if nodes := rlu.mutation.ComparatorIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
Inverse: true,
Table: rulelimit.ComparatorTable,
Columns: []string{rulelimit.ComparatorColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeInt,
Column: comparator.FieldID,
},
},
}
for _, k := range nodes {
edge.Target.Nodes = append(edge.Target.Nodes, k)
}
_spec.Edges.Add = append(_spec.Edges.Add, edge)
}
if rlu.mutation.RuleCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
Inverse: true,
Table: rulelimit.RuleTable,
Columns: []string{rulelimit.RuleColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeInt,
Column: rule.FieldID,
},
},
}
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
}
if nodes := rlu.mutation.RuleIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
Inverse: true,
Table: rulelimit.RuleTable,
Columns: []string{rulelimit.RuleColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeInt,
Column: rule.FieldID,
},
},
}
for _, k := range nodes {
edge.Target.Nodes = append(edge.Target.Nodes, k)
}
_spec.Edges.Add = append(_spec.Edges.Add, edge)
}
if n, err = sqlgraph.UpdateNodes(ctx, rlu.driver, _spec); err != nil {
if _, ok := err.(*sqlgraph.NotFoundError); ok {
err = &NotFoundError{rulelimit.Label}
} else if cerr, ok := isSQLConstraintError(err); ok {
err = cerr
}
return 0, err
}
return n, nil
}
// RuleLimitUpdateOne is the builder for updating a single RuleLimit entity.
type RuleLimitUpdateOne struct {
config
hooks []Hook
mutation *RuleLimitMutation
}
// SetNumber sets the number field.
func (rluo *RuleLimitUpdateOne) SetNumber(i int) *RuleLimitUpdateOne {
rluo.mutation.ResetNumber()
rluo.mutation.SetNumber(i)
return rluo
}
// AddNumber adds i to number.
func (rluo *RuleLimitUpdateOne) AddNumber(i int) *RuleLimitUpdateOne {
rluo.mutation.AddNumber(i)
return rluo
}
// SetLimitType sets the limitType field.
func (rluo *RuleLimitUpdateOne) SetLimitType(s string) *RuleLimitUpdateOne {
rluo.mutation.SetLimitType(s)
return rluo
}
// SetComparatorID sets the comparator edge to Comparator by id.
func (rluo *RuleLimitUpdateOne) SetComparatorID(id int) *RuleLimitUpdateOne {
rluo.mutation.SetComparatorID(id)
return rluo
}
// SetNillableComparatorID sets the comparator edge to Comparator by id if the given value is not nil.
func (rluo *RuleLimitUpdateOne) SetNillableComparatorID(id *int) *RuleLimitUpdateOne {
if id != nil {
rluo = rluo.SetComparatorID(*id)
}
return rluo
}
// SetComparator sets the comparator edge to Comparator.
func (rluo *RuleLimitUpdateOne) SetComparator(c *Comparator) *RuleLimitUpdateOne {
return rluo.SetComparatorID(c.ID)
}
// SetRuleID sets the rule edge to Rule by id.
func (rluo *RuleLimitUpdateOne) SetRuleID(id int) *RuleLimitUpdateOne {
rluo.mutation.SetRuleID(id)
return rluo
}
// SetNillableRuleID sets the rule edge to Rule by id if the given value is not nil.
func (rluo *RuleLimitUpdateOne) SetNillableRuleID(id *int) *RuleLimitUpdateOne {
if id != nil {
rluo = rluo.SetRuleID(*id)
}
return rluo
}
// SetRule sets the rule edge to Rule.
func (rluo *RuleLimitUpdateOne) SetRule(r *Rule) *RuleLimitUpdateOne {
return rluo.SetRuleID(r.ID)
}
// Mutation returns the RuleLimitMutation object of the builder.
func (rluo *RuleLimitUpdateOne) Mutation() *RuleLimitMutation {
return rluo.mutation
}
// ClearComparator clears the "comparator" edge to type Comparator.
func (rluo *RuleLimitUpdateOne) ClearComparator() *RuleLimitUpdateOne {
rluo.mutation.ClearComparator()
return rluo
}
// ClearRule clears the "rule" edge to type Rule.
func (rluo *RuleLimitUpdateOne) ClearRule() *RuleLimitUpdateOne {
rluo.mutation.ClearRule()
return rluo
}
// Save executes the query and returns the updated entity.
func (rluo *RuleLimitUpdateOne) Save(ctx context.Context) (*RuleLimit, error) {
var (
err error
node *RuleLimit
)
rluo.defaults()
if len(rluo.hooks) == 0 {
if err = rluo.check(); err != nil {
return nil, err
}
node, err = rluo.sqlSave(ctx)
} else {
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
mutation, ok := m.(*RuleLimitMutation)
if !ok {
return nil, fmt.Errorf("unexpected mutation type %T", m)
}
if err = rluo.check(); err != nil {
return nil, err
}
rluo.mutation = mutation
node, err = rluo.sqlSave(ctx)
mutation.done = true
return node, err
})
for i := len(rluo.hooks) - 1; i >= 0; i-- {
mut = rluo.hooks[i](mut)
}
if _, err := mut.Mutate(ctx, rluo.mutation); err != nil {
return nil, err
}
}
return node, err
}
// SaveX is like Save, but panics if an error occurs.
func (rluo *RuleLimitUpdateOne) SaveX(ctx context.Context) *RuleLimit {
node, err := rluo.Save(ctx)
if err != nil {
panic(err)
}
return node
}
// Exec executes the query on the entity.
func (rluo *RuleLimitUpdateOne) Exec(ctx context.Context) error {
_, err := rluo.Save(ctx)
return err
}
// ExecX is like Exec, but panics if an error occurs.
func (rluo *RuleLimitUpdateOne) ExecX(ctx context.Context) {
if err := rluo.Exec(ctx); err != nil {
panic(err)
}
}
// defaults sets the default values of the builder before save.
func (rluo *RuleLimitUpdateOne) defaults() {
if _, ok := rluo.mutation.UpdateTime(); !ok {
v := rulelimit.UpdateDefaultUpdateTime()
rluo.mutation.SetUpdateTime(v)
}
}
// check runs all checks and user-defined validators on the builder.
func (rluo *RuleLimitUpdateOne) check() error {
if v, ok := rluo.mutation.LimitType(); ok {
if err := rulelimit.LimitTypeValidator(v); err != nil {
return &ValidationError{Name: "limitType", err: fmt.Errorf("ent: validator failed for field \"limitType\": %w", err)}
}
}
return nil
}
func (rluo *RuleLimitUpdateOne) sqlSave(ctx context.Context) (_node *RuleLimit, err error) {
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: rulelimit.Table,
Columns: rulelimit.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeInt,
Column: rulelimit.FieldID,
},
},
}
id, ok := rluo.mutation.ID()
if !ok {
return nil, &ValidationError{Name: "ID", err: fmt.Errorf("missing RuleLimit.ID for update")}
}
_spec.Node.ID.Value = id
if value, ok := rluo.mutation.UpdateTime(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeTime,
Value: value,
Column: rulelimit.FieldUpdateTime,
})
}
if value, ok := rluo.mutation.Number(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeInt,
Value: value,
Column: rulelimit.FieldNumber,
})
}
if value, ok := rluo.mutation.AddedNumber(); ok {
_spec.Fields.Add = append(_spec.Fields.Add, &sqlgraph.FieldSpec{
Type: field.TypeInt,
Value: value,
Column: rulelimit.FieldNumber,
})
}
if value, ok := rluo.mutation.LimitType(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeString,
Value: value,
Column: rulelimit.FieldLimitType,
})
}
if rluo.mutation.ComparatorCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
Inverse: true,
Table: rulelimit.ComparatorTable,
Columns: []string{rulelimit.ComparatorColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeInt,
Column: comparator.FieldID,
},
},
}
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
}
if nodes := rluo.mutation.ComparatorIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
Inverse: true,
Table: rulelimit.ComparatorTable,
Columns: []string{rulelimit.ComparatorColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeInt,
Column: comparator.FieldID,
},
},
}
for _, k := range nodes {
edge.Target.Nodes = append(edge.Target.Nodes, k)
}
_spec.Edges.Add = append(_spec.Edges.Add, edge)
}
if rluo.mutation.RuleCleared() {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
Inverse: true,
Table: rulelimit.RuleTable,
Columns: []string{rulelimit.RuleColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeInt,
Column: rule.FieldID,
},
},
}
_spec.Edges.Clear = append(_spec.Edges.Clear, edge)
}
if nodes := rluo.mutation.RuleIDs(); len(nodes) > 0 {
edge := &sqlgraph.EdgeSpec{
Rel: sqlgraph.M2O,
Inverse: true,
Table: rulelimit.RuleTable,
Columns: []string{rulelimit.RuleColumn},
Bidi: false,
Target: &sqlgraph.EdgeTarget{
IDSpec: &sqlgraph.FieldSpec{
Type: field.TypeInt,
Column: rule.FieldID,
},
},
}
for _, k := range nodes {
edge.Target.Nodes = append(edge.Target.Nodes, k)
}
_spec.Edges.Add = append(_spec.Edges.Add, edge)
}
_node = &RuleLimit{config: rluo.config}
_spec.Assign = _node.assignValues
_spec.ScanValues = _node.scanValues()
if err = sqlgraph.UpdateNode(ctx, rluo.driver, _spec); err != nil {
if _, ok := err.(*sqlgraph.NotFoundError); ok {
err = &NotFoundError{rulelimit.Label}
} else if cerr, ok := isSQLConstraintError(err); ok {
err = cerr
}
return nil, err
}
return _node, nil
}
|
Microstrip lowpass filters with improved frequency responses using coupled-line hairpin resonators
In this paper, microstrip lowpass filters (LPFs) with improved frequency responses are developed by cascading a coupled-line hairpin resonator with quarter-wavelength microstrip lines. The equivalent transmission line circuit and lumped element circuit of the filters are analyzed, and formulas are derived for determining the frequencies of reflection zeroes and transmission zeroes, as well as the 3dB cutoff frequency of the filters. Based on the derived formulas, a number of LPFs are designed and fabricated, and their measured frequency responses agree well with the predicted ones. The desired filtering characteristics, like flat passband, sharp roll-off skirt, and wide stopband, are realized. |
<filename>OClusterMapView+Sample/OCDistanceCalculationPerformance.h
//
// OCDistanceCalculationPerformance.h
// OClusterMapView+Sample
//
// Created by Markus on 25.09.13.
//
//
#import <Foundation/Foundation.h>
@interface OCDistanceCalculationPerformance : NSObject
+ (void)testDistanceCalculationPerformance;
@end
|
N, K = map(int, input().split())
As = list(map(int, input().split()))
mem = [0]*N
mem[0] = 1
pos = 0
cic = 0
j = 0
for k in range(1,K+1):
pos = As[pos] - 1
if mem[pos] == 0:
mem[pos] = 1
else:
j = k
cic = pos
break
if k == K:
print(pos+1)
exit()
pos = 0
if cic == pos:
i = 0
else:
for l in range(1,j+1):
pos = As[pos] - 1
if pos == cic:
i = l
break
m = j - i
rlt = (K-i)%m
if rlt == 0:
print(cic+1)
else:
pos = cic
for n in range(1,rlt+1):
pos = As[pos] - 1
print(pos+1)
|
<gh_stars>0
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0
*/
//! Discovers files relevant to the build of a given crate, and
//! prints out a determistic SHA-256 of the entire crate contents.
use anyhow::Result;
use clap::Parser;
use file_list::FileList;
use std::path::PathBuf;
mod file_list;
#[derive(Parser, Debug)]
#[clap(author, version, about)]
struct Args {
location: PathBuf,
}
pub fn main() -> Result<()> {
let file_list = FileList::discover(&Args::parse().location)?;
println!("{}", file_list.sha256());
Ok(())
}
|
/// Delete generated pos files for jobs at the provided data dir
pub fn delete_pos_files(jobs: &Vec<Job>, data_dir: String) {
for job in jobs {
let file_name = job.file_name();
let path = Path::new(data_dir.clone().as_str()).join(file_name);
info!("deleting post file {}...", path.display());
let _ = fs::remove_file(path).unwrap();
}
} |
/**
* Update the progressbar with a new value
*
* @param val Value
*/
public void progressChanged(int val) {
for (IProgressObserver listener : listeners) {
listener.progressChanged(val);
}
} |
Referendum was born from similar feeling to UK’s Brexit vote, but what Viktor Orbán wants is more of a threat to EU’s future
Hungarian right does not want to leave the EU. It wants to subvert it
On walls across Budapest this week, a witty opposition poster mocked how the Hungarian government wanted to take European Union handouts while opting out of the EU’s responsibility-sharing mechanism for refugees. “Hi Brussels,” the poster reads. “We still want your money.”
'The question was stupid': Hungarians on the refugee referendum Read more
The quip is a reminder that while this weekend’s referendum in Hungary was born from similar frustrations to the Brexit vote in June, the Hungarian right does not want to leave the EU. Instead it wants something that is perhaps even more of a threat to the EU’s future: it wants to stay part of the union – and subvert it from the inside. Viktor Orbán, Hungary’s nationalist prime minister, seeks what he calls a counter-cultural revolution within the EU – greater autonomy for nation states and less emphasis on liberal and humanitarian principles.
While Hungary has traditionally played a minor role within European politics, Orbán’s ambitions turned him into the leading populist voice in contemporary Europe. Sunday’s referendum was his latest attempt to build political momentum for an illiberal European future – and Orbán himself presented the vote as a victory.
But analysts noted that he had ultimately failed to encourage a majority of his own population to vote against refugees and against a more humanitarian vision of the EU. As a result, this failure has arguably given some unexpected breathing space to the European politicians whose ideas Orbán so strongly opposes – notably Angela Merkel, the German chancellor, and Jean-Claude Juncker, the president of the EU commission.
“This will be viewed as a relief in Brussels,” said András Bíró-Nagy, a former EU official and the head of research at Policy Solutions, a Budapest-based thinktank. “It’s clearly a disappointing result for Orbán and I think his European ambitions will suffer as a result. In domestic politics, he’ll try to spin that he won this – but it doesn’t send a strong message to Brussels. How can he win a cultural counter-revolution in Europe if he can’t get a valid referendum result on his strongest issue in his own country?”
Hungary's refugee referendum not valid after voters stay away Read more
The impasse over the EU’s common asylum policy – the issue at the heart of the referendum – was likely to continue, Bíró-Nagy said. But his “[ideological] momentum at a European level has been stopped”.
Gerald Knaus, the head of the European Stability Initiative, a Berlin-based thinktank, said that he hoped Orbán’s setback would loosen his psychological hold over other European leaders. A prominent critic of the Hungarian leader, Knaus said members of the European People’s party, the continent’s largest alliance of centre-right parties, should now expel Orbán’s party, whose far-right policies are now at odds with mainstream Christian democracy.
Hungarian referendum decides whether to slam the door on migrants Read more
“If he fails to get the necessary 50% in a referendum of his choice, where he mobilised everything he could to get people to turn up and vote – it should hopefully be seen as a sign that he is in fact vulnerable,” Knaus argued. “And that his apparent total dominance of Hungarian politics is as much about the weakness of the opposition and the strange electoral system as it is about his popularity.”
But Knaus warned that Orbán still had an outsized influence on European affairs, as indicated by “the striking indifference on the part of the EU to taking its own laws seriously”. It is still, Knaus added, “far too early – whatever the results today – to count Orbán out.” |
Toyota, Australia's last remaining automotive manufacturer, this afternoon has confirmed what many have suspected for some time: it's pulling out of Australia in 2017.
First Ford pulled out, then Holden, now Toyota. The Australian manufacturing industry is now in quite a state.
The company issued a statement today, saying that it would shutter its Australian manufacturing facility in Port Melbourne, Victoria, while considering a scaling back of the other facility dedicated to design and evaluation work.
The Port Melbourne plant was established in 1959, and today employs 3900 workers who build the Camry, Camry Hybrid and Aurion vehicles for the company.
President of the Toyota Motor Company, Akio Toyoda, said in a statement that the company would still sell cars in Australia, it just wouldn't make them here due to a tough market:
"We believed that we should continue producing vehicles in Australia, and Toyota and its workforce here made every effort. However, various negative factors such as an extremely competitive market and a strong Australian dollar, together with forecasts of a reduction in the total scale of vehicle production in Australia, have forced us to make this painful decision.”
You can read the full statement here. |
/**
* This class handles the pyramid floor generation
*
* @author Tyluur<[email protected]>
* @since February 15th, 2015
*/
public class PyramidFloor implements PyramidHunterConstants {
/**
* Constructs a new {@link PyramidFloor} {@code Object}
*/
public PyramidFloor() {
this.boundChunks = RegionBuilder.findEmptyChunkBound(20, 20);
this.facade = new PyramidFloorFacade();
this.objectHandler = new PyramidObjects(this);
}
/**
* Starts single player mode floor. The only player in the floor will be the player entering.
*
* @param player
* The player entering.
*/
public void startSingleMode(Player player) {
List<Player> floorPlayers = new ArrayList<>();
floorPlayers.add(player);
this.floorPlayers = floorPlayers;
enterFloor(LobbyType.SINGLE);
}
/**
* Starts the team mode floor. The {@link #floorPlayers} is set to the floorPlayers parameterized. All players then
* start the {@link PyramidHuntingGame} controller
*/
public void startTeamMode(List<Player> floorPlayers) {
this.floorPlayers = floorPlayers;
enterFloor(LobbyType.TEAM);
}
/**
* This method brings every {@link #floorPlayers} {@code Player} into the pyramid floor.
*
* @param lobbyType
* The type of lobby the players were waiting in
*/
public void enterFloor(LobbyType lobbyType) {
setLobbyType(lobbyType);
facade.setGoalFoodCooked(PyramidHunterConstants.getFoodPerLevel(lobbyType, getLevel()));
RegionBuilder.copyAllPlanesMap(404, 1164, boundChunks[0], boundChunks[1], 64);
RegionBuilder.copyAllPlanesMap(400, 1160, boundChunks[0], boundChunks[1], 64);
for (Player player : floorPlayers) {
player.setNextWorldTile(getWorldTile(33, 36));
player.getControllerManager().startController("PHGame", this);
}
spawnEverything();
}
/**
* This method spawns monsters on the floor
*/
public void spawnMonsters() {
int total = PyramidHunterConstants.getRandomMonsterCount(getLevel()) + BASE_SCARAB_COUNT;
PyramidFloorMonster[] monsters = new PyramidFloorMonster[total];
for (int i = 0; i < BASE_SCARAB_COUNT; i++) {
monsters[i] = new PyramidFloorMonster(this, Monsters.SCARAB, getRandomTile(false));
}
total = total - BASE_SCARAB_COUNT;
int zombiesCount = Math.floorDiv(total, 3) * 2;
int generalCount = total - zombiesCount;
int zombiesSpawned = 0;
int generalSpawned = 0;
for (int i = 0; i < monsters.length; i++) {
if (monsters[i] != null) {
continue;
}
if (zombiesSpawned < zombiesCount) {
Monsters monsterType = Monsters.ZOMBIES;
int id = monsterType.getRandomId();
WorldTile tile = getRandomTile(true);
int count = 0;
while (!World.canMoveNPC(tile.getPlane(), tile.getX(), tile.getY(), NPCDefinitions.getNPCDefinitions(id).size)) {
if (count == MONSTER_SPAWNS.length) {
System.out.println("Gave up checking for better spawns...[" + monsterType + ", " + NPCDefinitions.getNPCDefinitions(id).getName() + "]");
break;
}
tile = getRandomTile(true);
count++;
}
monsters[i] = new PyramidFloorMonster(this, id, tile, monsterType);
zombiesSpawned++;
} else if (generalSpawned < generalCount) {
Monsters monsterType = Monsters.GENERAL;
int id = monsterType.getRandomId();
WorldTile tile = getRandomTile(true);
int count = 0;
while (!World.canMoveNPC(tile.getPlane(), tile.getX(), tile.getY(), NPCDefinitions.getNPCDefinitions(id).size)) {
if (count == MONSTER_SPAWNS.length) {
System.out.println("Gave up checking for better spawns...[" + monsterType + ", " + NPCDefinitions.getNPCDefinitions(id).getName() + "]");
break;
}
tile = getRandomTile(true);
}
monsters[i] = new PyramidFloorMonster(this, id, tile, monsterType);
generalSpawned++;
}
}
for (PyramidFloorMonster monster : monsters) {
facade.getMonsters().add(monster);
}
}
/**
* This method spawns different entities in the dynamic region
*/
public void spawnEverything() {
/** Spawning two fires */
World.spawnObject(new WorldObject(2732, 10, 1, getWorldTile(34, 31)));
// This fire is ontop of the altar
World.spawnObject(new WorldObject(2732, 10, 1, getWorldTile(33, 31)));
World.spawnObject(new WorldObject(2732, 10, 1, getWorldTile(32, 31)));
/* Spawning the two stalls */
World.spawnObject(new WorldObject(4875, 10, 1, getWorldTile(33, 34))); // magic
// removing the portal
World.spawnObject(new WorldObject(-1, 10, 0, getWorldTile(33, 32)));
// Nomad
NPC nomad = new NPC(8591, getWorldTile(33, 32), -1, true);
nomad.setWalkType(NPC.NO_WALK);
}
/**
* Checks if the monsters are all killed (by checking that the list is empty), and if they are, we progress to the
* {@link PyramidFloorStage#SEARCHING_FOR_TREASURE}
*/
private void checkMonsters() {
if (facade.getMonsters().isEmpty()) {
facade.setFloorStage(PyramidFloorStage.SEARCHING_FOR_TREASURE);
fireStageUpdateCheck();
}
}
/**
* Spawns the treasure at a random coordinate
*/
public void spawnTreasure() {
WorldTile randomTile = TREASURE_SPAWNS[Utils.random(TREASURE_SPAWNS.length)];
WorldTile localTile = getWorldTile(randomTile.getX(), randomTile.getY());
facade.setTreasure(new WorldObject(TREASURE_CHEST_ID, 10, 1, localTile));
facade.setTreasurePercentLeft(100);
CoresManager.FAST_EXECUTOR.scheduleAtFixedRate(setTimer(new TreasureTimerTask(this)), 0, TimeUnit.SECONDS.toMillis(1));
World.spawnObject(facade.getTreasure());
int totalPoints = 0;
for (Player pl : floorPlayers) {
try {
totalPoints += pl.getControllerManager().verifyControlerForOperation(PyramidHuntingGame.class).get().getPoints();
} catch (Exception e) {
e.printStackTrace();
}
}
facade.setTreasureReward(Math.floorDiv(totalPoints, 2));
}
/**
* This method removes the floor from the server
*/
public void removeFloor() {
RegionBuilder.destroyMap(boundChunks[0], boundChunks[1], 8, 8);
}
/**
* Grabs a random {@code WorldTile} {@code Object} from the {@link PyramidHunterConstants#MONSTER_SPAWNS} {@code
* Array} of spawns.
*/
public WorldTile getRandomTile(boolean checkDuplicates) {
if (checkDuplicates && facade.getMonsters().size() == MONSTER_SPAWNS.length) {
facade.getMonsters().clear();
}
WorldTile tile = MONSTER_SPAWNS[Utils.random(MONSTER_SPAWNS.length)];
while (checkDuplicates && facade.getMonsterSpawns().contains(tile)) {
tile = MONSTER_SPAWNS[Utils.random(MONSTER_SPAWNS.length)];
}
tile = getWorldTile(tile.getX(), tile.getY());
if (checkDuplicates) {
facade.getMonsterSpawns().add(tile);
}
return tile;
}
/**
* Sets the hint for everybody to see and turns the showing hint variable to true
*
* @param requested
* The player who requested the hint
*/
public void showGlobalHint(Player requested) {
WorldTile startTile = getWorldTile(33, 36);
WorldTile treasureTile = facade.getTreasure().getWorldTile();
String hint = Utils.getTileInformations(startTile, treasureTile);
facade.setHintText(hint);
int pointsLost = getPointsToLose();
for (Player player : floorPlayers) {
player.getControllerManager().verifyControlerForOperation(PyramidHuntingGame.class).ifPresent(c -> c.addPoints(-pointsLost));
player.sendMessage(requested.getDisplayName() + " requested a hint so you lose " + pointsLost + " points!");
}
facade.setShowingHint(true);
}
/**
* The amount of points everyone loses when the hint is displayed
*/
public int getPointsToLose() {
int points = 0;
for (Player player : floorPlayers) {
try {
points += player.getControllerManager().verifyControlerForOperation(PyramidHuntingGame.class).get().getPoints();
} catch (Exception e) {
e.printStackTrace();
}
}
return Math.floorDiv(points, 3);
}
/**
* When players cooked food, the total number of food they've cooked is incremented in {@link
* PyramidFloorFacade#getTotalFoodCooked()}. When this value reaches the goal value set in {@link
* PyramidFloorFacade#getGoalFoodCooked()}, we update to the killing monsters stage.
*/
public void checkFoodCooked() {
if (facade.getFloorStage() != PyramidFloorStage.PREPARING_ENTRANCE) {
return;
}
if (facade.getTotalFoodCooked() >= facade.getGoalFoodCooked()) {
facade.setFloorStage(PyramidFloorStage.FIGHTING_MONSTERS);
fireStageUpdateCheck();
}
}
/**
* Upon stage update, each stage has custom events that are fired. This method fires those events.
*/
private void fireStageUpdateCheck() {
facade.getFloorStage().fireUpdateCheck(this);
}
/**
* If the world tile is inside the home room
*
* @param worldTile
* The {@link WorldTile} {@code Object} we're checking for
*/
public boolean isInHomeRoom(WorldTile worldTile) {
WorldTile topLeft = new WorldTile(26, 41);
WorldTile bottomRight = new WorldTile(40, 29);
WorldTile regionLeft = getWorldTile(topLeft.getX(), topLeft.getY());
WorldTile regionRight = getWorldTile(bottomRight.getX(), bottomRight.getY());
boolean correctX = worldTile.getX() >= regionLeft.getX() && worldTile.getX() <= regionRight.getX();
boolean correctY = worldTile.getY() >= regionRight.getY() && worldTile.getY() <= regionLeft.getY();
return correctX && correctY;
}
/**
* Gets the world tile inside the dynamic region
*
* @param mapX
* The x in the map
* @param mapY
* The y in the map
*/
public WorldTile getWorldTile(int mapX, int mapY) {
return new WorldTile(boundChunks[0] * 8 + mapX, boundChunks[1] * 8 + mapY, 0);
}
/**
* This method removes the player from the {@link #floorPlayers} list
*
* @param player
* The player to remove.
*/
public void removePlayer(Player player) {
floorPlayers.remove(player);
if (floorPlayers.size() <= 0) {
removeFloor();
}
giveRewards(player);
player.getInventory().deleteItem(18173, Integer.MAX_VALUE);
player.getInventory().deleteItem(17811, Integer.MAX_VALUE);
player.getControllerManager().forceStop();
}
/**
* Gives the player their rewards for game progress.
*
* @param player
* The player
*/
private void giveRewards(Player player) {
PyramidHuntingGame game = null;
try {
game = player.getControllerManager().verifyControlerForOperation(PyramidHuntingGame.class).orElse(null);
} catch (Exception e) {
e.printStackTrace();
}
if (game == null) {
return;
}
int pointsReceived = PyramidHunterConstants.getPointsToGive(getLevel(), game.getPoints(), game.getDamageDealt());
String[] taunts = new String[] { "Perhaps you should've tried harder...", "That's all you've got? I doubt it.", "Where has your talent gone, fair traveller?" };
player.getDialogueManager().startDialogue(SimpleNPCMessage.class, 8591, "" + taunts[Utils.random(taunts.length)], "You earned " + Utils.format(pointsReceived) + " " + GameConstants.SERVER_NAME + " points though.", "Try again soon.");
player.getFacade().setDreamPoints(player.getFacade().getDreamPoints() + pointsReceived);
}
/**
* This method removes the monster from the {@link PyramidFloorFacade#getMonsters()} list
*
* @param monster
case "Construction Shop 1":
* The monster to remove
*/
public void removeMonster(PyramidFloorMonster monster) {
facade.getMonsters().remove(monster);
checkMonsters();
}
/**
* This method handles when the player interacts with the treasure
*
* @param player
* The player
*/
public void handleTreasureSearch(Player player) {
if (!facade.getFloorStage().equals(PyramidFloorStage.SEARCHING_FOR_TREASURE)) {
return;
}
List<Player> playersLeft = floorPlayers.stream().filter(pl -> !facade.getPlayersFoundTreasure().contains(pl)).collect(Collectors.toList());
if (facade.getPlayersFoundTreasure().contains(player)) {
StringBuilder bldr = new StringBuilder();
for (int i = 0; i < playersLeft.size(); i++) {
Player pl = playersLeft.get(i);
bldr.append(pl.getDisplayName()).append("").append(i == (playersLeft.size() - 1) ? "" : ",");
}
player.getDialogueManager().startDialogue(SimpleMessage.class, "You have already searched the chest!", "Players waiting on:", bldr.toString());
return;
}
boolean first = facade.getPlayersFoundTreasure().isEmpty() && lobbyType == LobbyType.TEAM;
boolean receivesBoost = false;
if (first && !facade.isShowingHint()) {
receivesBoost = true;
}
if (receivesBoost) {
int pointBoost = Math.floorDiv(player.getControllerManager().verifyControlerForOperation(PyramidHuntingGame.class).get().getPoints(), 5);
player.sendMessage("You were the first to reach the treasure and no hint was needed!");
player.sendMessage("You receive a " + pointBoost + " total point boost.");
player.getDialogueManager().startDialogue(SimpleMessage.class, "You were the first to reach the treasure and no hint was needed!", "You receive a " + pointBoost + " total point boost.");
try {
player.getControllerManager().verifyControlerForOperation(PyramidHuntingGame.class).ifPresent(c -> c.addPoints(pointBoost));
} catch (Exception e) {
e.printStackTrace();
}
}
if (!facade.getPlayersFoundTreasure().contains(player)) {
facade.getPlayersFoundTreasure().add(player);
}
if (facade.getPlayersFoundTreasure().size() >= floorPlayers.size()) {
finishLevel();
} else {
player.sendMessage("You have searched the chest... Waiting on " + (playersLeft.size() - 1) + " players now.");
}
}
/**
* When this method is called, the level is finished. Everything resets and we restart.
*/
private void finishLevel() {
World.removeObject(facade.getTreasure());
double percent = facade.getTreasurePercentLeft();
int pointsPossible = facade.getTreasureReward();
int pointReward = (int) (pointsPossible * percent / (double) 100);
for (Player player : floorPlayers) {
player.getControllerManager().verifyControlerForOperation(PyramidHuntingGame.class).ifPresent(c -> c.addPoints(pointReward));
player.getDialogueManager().startDialogue(SimpleMessage.class, "Level Complete! You receive " + Utils.format(pointReward) + " reward points!");
}
facade = new PyramidFloorFacade();
facade.setGoalFoodCooked(PyramidHunterConstants.getFoodPerLevel(lobbyType, getLevel()));
for (Player player : floorPlayers) {
player.setNextWorldTile(getWorldTile(33, 36));
}
setLevel(getLevel() + 1);
timer.cancel();
}
/**
* Gets the {@link #floorPlayers} list
*
* @return A {@code List} {@code Object}
*/
public List<Player> getFloorPlayers() {
return floorPlayers;
}
/**
* Setting the list of floor players to a new list
*
* @param floorPlayers
* A {@code List} of floor {@code Player}s
*/
public void setFloorPlayers(List<Player> floorPlayers) {
this.floorPlayers = floorPlayers;
}
public PyramidFloorFacade getFacade() {
return facade;
}
public PyramidObjects getObjectHandler() {
return objectHandler;
}
public LobbyType getLobbyType() {
return lobbyType;
}
public void setLobbyType(LobbyType lobbyType) {
this.lobbyType = lobbyType;
}
public int getLevel() {
return level;
}
public void setLevel(int level) {
this.level = level;
}
public TreasureTimerTask getTimer() {
return timer;
}
public TreasureTimerTask setTimer(TreasureTimerTask timer) {
this.timer = timer;
return timer;
}
/**
* The list of players in the floor
*/
private transient List<Player> floorPlayers;
/**
* The level we're on in the floor
*/
private transient int level = 1;
/**
* The chunks for the dynamic region in the floor
*/
private transient int[] boundChunks;
/**
* The type of lobby the players were waiting in before the game.
*/
private transient LobbyType lobbyType;
/**
* The timer object
*/
private transient TreasureTimerTask timer;
/**
* The facade for our floor
*/
private transient PyramidFloorFacade facade;
/**
* The instance of the object handler
*/
private transient final PyramidObjects objectHandler;
} |
Republican presidential candidate Donald Trump speaks as Wisconsin Gov. Scott Walker listens during the first Republican presidential debate Aug. 6 in Cleveland. On Saturday, Trump attacked Walker during the business mogul’s visit to the Iowa State Fair. Credit: Andrew Harnik
By of the
Billionaire businessman Donald Trump launched another attack against Wisconsin Gov. Scott Walker Saturday during a campaign stop at the Iowa State Fair.
Trump, who has overtaken Walker as the front-runner in the crucial state in recent polls, slammed the Republican governor and his record in Wisconsin, saying "there's tremendous dissension all over the state."
The real estate mogul said that economic growth under Walker has been "terrible."
Trump's comments echoed attacks he has made against Walker in recent weeks.
Until recently, polls showed Walker as the front-runner in Iowa and several other crucial early primary states. But several polls released in the wake of the first GOP debate have shown Walker dropping and Trump surging into a strong lead.
"His growth is terrible in terms of the state," Trump said. "There's tremendous dissension all over the state."
Walker campaign spokeswoman AshLee Strong responded in a statement: "As Governor Walker has said, these are the same failed Democrat talking points that voters rejected by electing him three times in four years. Governor Walker's record speaks for itself... The governor will continue to focus on the real opponent in this race, Hillary Clinton, as he shares his message of reform and results with the people of America."
Trump made his comments to reporters Saturday while standing near the spot where his private helicopter had landed outside the fairgrounds. He was flanked by children waiting for the helicopter rides he'd promised them. After a quick ride, Trump climbed into a golf cart and headed into the fair.
Trump is certainly not the first Walker opponent to accuse him of dividing the state.
Critics have repeatedly called the governor one of the most divisive politicians in recent Wisconsin history, especially in the wake of his push to all but end collective bargaining for most of the state's public workers. Walker unveiled the measure, known as Act 10, soon after taking office in early 2011, a move that drew tens of thousands of protesters to the Capitol and sent the state's 14 Democratic senators rushing to Illinois in an effort to block the plan.
It also spurred a historic wave of recall elections targeting Walker and others. In 2012, he became the first governor in U.S. history to survive a recall attempt. That raised his profile and dramatically widened his fundraising base nationwide.
Trump launched his latest attack against Walker after being asked an unrelated question about whether a "President Trump" would be the same as a "Candidate Trump" when it came to dealing with foreign leaders.
"I'm doing well. I'm leading in every poll — the little ones, the big ones. So importantly to me, I'm leading in Iowa," Trump said.
Trump repeated he waited until he was attacked first before going after Walker.
"Nobody was willing to say it but me. And I didn't want to say it. I would not have said it, until he attacked me," Trump said. "Then I said, 'Thank you very much.' "
Trump's first round of attacks targeting Walker came last month, and was apparently triggered by a fundraiser for the Wisconsin governor calling Trump a "DumbDumb." Trump quickly fired back, saying that Wisconsin was "doing terribly," and calling health care and education in the state "a disaster."
"I've been nice to Scott Walker. You know, he's a nice guy," Trump said at the time. "And then today I read this horrible statement from his fundraiser about Trump. I said, 'Oh, finally I can attack. Finally. Finally.' "
Walker has repeatedly refused to criticize fellow GOP presidential hopefuls, but has made exceptions for Trump on several occasions in recent weeks.
The first came after Trump attacked Arizona Sen. John McCain, saying he wasn't a war hero. Walker has since criticized Trump's offensive comments targeting Megyn Kelly of Fox News.
After a bill signing event Wednesday at Wisconsin State Fair, Walker was asked about Trump and said he would "never use the kind of language" used by his rival.
"I may disagree with someone, but I will always be respectful to people and I will never use the kind of language that you've heard out of him and some of the candidates out there," Walker said.
Earlier in the week, Walker called Trump's campaign a "sideshow."
"For a lot of us, it's like watching a car accident instead of focusing on the direction we should be headed," Walker said on Fox News' "America's Newsroom." |
/**
* @brief Get data from terminal state
*
* @param s : a terminal state
*
* @return the data associated with the terminal state @a s,
* or TRIE_DATA_ERROR if @a s is not a terminal state
*
*/
TrieData
trie_state_get_terminal_data (const TrieState *s)
{
TrieIndex tail_index;
TrieIndex index = s->index;
if (!s)
return TRIE_DATA_ERROR;
if (!s->is_suffix){
if (!trie_da_is_separate(s->trie->da, index)) {
Bool ret = da_walk (s->trie->da, &index, TRIE_CHAR_TERM);
if (!ret) {
return TRIE_DATA_ERROR;
}
}
tail_index = trie_da_get_tail_index (s->trie->da, index);
}
else {
tail_index = s->index;
}
return tail_get_data (s->trie->tail, tail_index);
} |
#include <bits/stdc++.h>
typedef long long int ll;
using namespace std;
bool if_possible(vector<ll> &a, vector<ll> &b,ll round)
{
ll n=b.size();
ll comp=round-n;
ll considering_rounds=round-round/4;
ll first=0;
if(comp>=considering_rounds)
first+=considering_rounds*100;
else
{
first+=comp*100+a[considering_rounds-comp-1];
}
ll mine=min(considering_rounds,n);
ll second=b[mine-1];
if(first>=second)
return true;
return false;
}
int main(){
int t;
cin>>t;
while(t--){
ll n;
cin>>n;
vector<ll> dp1(n);
vector<ll> dp2(n);
for(int i=0; i<n; i++)
cin>>dp1[i];
for(int i=0; i<n; i++)
cin>>dp2[i];
sort(dp1.begin(),dp1.end(),greater<ll>());
sort(dp2.begin(),dp2.end(),greater<ll>());
int temp=n-n/4;
int sum1=0,sum2=0;
for(int i=0; i<temp; i++)
{
sum1+=dp1[i];
sum2+=dp2[i];
}
for(int i=1; i<n; i++)
{
dp1[i]=dp1[i-1]+dp1[i];
dp2[i]=dp2[i-1]+dp2[i];
}
if(sum1>=sum2)
cout<<0<<endl;
else
{
ll l=n+1;
ll r=INT_MAX;
ll ans=r;
while(r>=l)
{
ll mid=l+(r-l)/2;
if(if_possible(dp1,dp2,mid))
{
ans=mid;
r=mid-1;
}
else
{
l=mid+1;
}
}
cout<<ans-n<<endl;
}
}
return 0;
} |
<filename>src/Game/Value/Internal.hs
module Game.Value.Internal
( GameValue(..)
) where
import DSpies.Prelude
newtype GameValue = GameValue {leftUtility :: Double}
deriving (Eq, Show)
|
/**
* vcalendar. 2018
*
* @author Eduard Maximovich <[email protected]>
*/
public class App extends Application {
@Override
public void onCreate() {
super.onCreate();
VCalendar.initialize(this, true);
}
} |
Google is about to make its biggest push yet to get Glass in the hands of as many people as possible. The Verge has obtained documents indicating that the company will open up its "Explorer Program" and make Glass available to anyone who wants to purchase a pair, possibly as soon as next week. It’ll be a limited-time offer, only available for about a day, and only US residents will be eligible to purchase the $1,500 device. Google will also include a free sunglass shade or one of its newly-introduced prescription glasses frames along with any purchase. An internal Google slide shows that the promotion may be announced on April 15th, though all the details of this program have yet to be finalized.
While this program will make Glass available to anyone in the US who wants to buy it, Google makes it clear on the leaked slide that this is an expansion of its existing Explorer program, not a full consumer release (still expected for later in 2014). It sounds like Google simply wants to get the device in the hands of as many people as possible for testing and development purposes ahead of that launch. Alternately, it could be Google's way of clearing out Explorer edition stock ahead of the full consumer launch.
A big push for Glass ahead of its eventual consumer release
Previous implementations of the Glass Explorer program have come with limitations of their own — Google first started accepting pre-orders for the device nearly two years ago at Google I/O 2012. That brought in about 2,000 orders, and another 8,000 people got their hands on a pair through a wider promotion later that year in which consumers told Google what they would do if they had their own Glass headset. Since then, Google has made Glass available through some smaller campaigns as well as opening up friend referrals and an ongoing waitlist to become an Explorer — but this new program could symbolize Google’s last major effort to ramp up the developer community ahead of the official Glass launch.
Update April 10 4:05PM: Google has officially announced its plan to sell Glass for one day only. In a Google+ post, the Glass team confirms that anyone in the US can purchase Glass on Tuesday, April 15th starting at 9AM ET. Google says orders will include the free sunglass shade or prescription lens frame, and notes that supplies will be limited. You can purchase at this link next Tuesday. |
/***
* Test for {@code EOabs}
* checks if the base value is returned as a non-negative number
*/
@Test
void EOabs() {
final EOfloat left = new EOfloat(-12.0);
final EOfloat absolute = left.EOneg();
MatcherAssert.assertThat(
absolute._getData().toFloat(),
Matchers.equalTo(12.0)
);
} |
package exec
import (
"amp/back-go/common"
"amp/back-go/protocol"
"errors"
"fmt"
"github.com/golang/protobuf/proto"
"github.com/yddeng/dnet"
"github.com/yddeng/dnet/drpc"
"github.com/yddeng/utils/task"
"log"
"math/rand"
"net"
"time"
)
type Config struct {
Name string `json:"name"`
Inet string `json:"inet"`
Net string `json:"net"`
Center string `json:"center"`
Token string `json:"token"`
DataPath string `json:"data_path"`
}
type Executor struct {
cfg *Config
dialing bool
session dnet.Session
taskPool *task.TaskPool
rpcServer *drpc.Server
rpcClient *drpc.Client
die chan struct{}
heartbeatTimer int64
}
func (er *Executor) SendMessage(msg proto.Message) error {
if er.session == nil {
return errors.New("session is nil")
}
return er.session.Send(protocol.NewMessage(msg))
}
func (er *Executor) SendRequest(req *drpc.Request) error {
if er.session == nil {
return errors.New("session is nil")
}
return er.session.Send(req)
}
func (er *Executor) SendResponse(resp *drpc.Response) error {
if er.session == nil {
return errors.New("session is nil")
}
return er.session.Send(resp)
}
func (er *Executor) Go(data proto.Message, callback func(interface{}, error)) error {
return er.rpcClient.Go(er, proto.MessageName(data), data, time.Second*5, callback)
}
func (er *Executor) Submit(fn interface{}, args ...interface{}) error {
return er.taskPool.Submit(fn, args...)
}
func (er *Executor) closed() bool {
select {
case <-er.die:
return true
default:
return false
}
}
func (er *Executor) dial() {
if er.session != nil || er.dialing || er.closed() {
return
}
er.dialing = true
go func() {
for {
conn, err := dnet.DialTCP(er.cfg.Center, time.Second*5)
if nil == err {
er.onConnected(conn)
return
} else {
time.Sleep(time.Millisecond * time.Duration(rand.Intn(1000)+500))
}
}
}()
}
func (er *Executor) onConnected(conn net.Conn) {
er.Submit(func() {
log.Printf("onConnected center %s", conn.RemoteAddr().String())
er.dialing = false
er.session = dnet.NewTCPSession(conn,
dnet.WithCodec(new(protocol.Codec)),
dnet.WithErrorCallback(func(session dnet.Session, err error) {
log.Println(err)
session.Close(err)
}),
dnet.WithMessageCallback(func(session dnet.Session, data interface{}) {
er.Submit(func() {
var err error
switch data.(type) {
case *drpc.Request:
err = er.rpcServer.OnRPCRequest(er, data.(*drpc.Request))
case *drpc.Response:
err = er.rpcClient.OnRPCResponse(data.(*drpc.Response))
case *protocol.Message:
er.dispatchMsg(session, data.(*protocol.Message))
}
if err != nil {
log.Println(err)
}
})
}),
dnet.WithCloseCallback(func(session dnet.Session, reason error) {
er.Submit(func() {
er.session.SetContext(nil)
er.session = nil
log.Printf("session closed, reason: %s", reason)
er.dial()
})
}))
// login
if err := er.Go(&protocol.LoginReq{
Name: er.cfg.Name,
Net: er.cfg.Net,
Inet: er.cfg.Inet,
Token: er.cfg.Token,
}, func(i interface{}, err error) {
if err != nil {
er.session.Close(err)
panic(err)
}
resp := i.(*protocol.LoginResp)
if resp.GetCode() != "" {
err = errors.New(resp.GetCode())
er.session.Close(err)
panic(err)
}
}); err != nil {
er.session.Close(err)
panic(err)
}
})
}
func (er *Executor) tick() {
timer := time.NewTimer(time.Second)
heartbeatMsg := &protocol.Heartbeat{}
for {
select {
case <-er.die:
timer.Stop()
return
case now := <-timer.C:
nodeStateMsg := packCollector()
er.Submit(func() {
_ = er.SendMessage(nodeStateMsg)
if now.Unix() > er.heartbeatTimer {
_ = er.SendMessage(heartbeatMsg)
er.heartbeatTimer = now.Add(common.HeartbeatTimeout / 2).Unix()
}
timer.Reset(time.Second)
})
}
}
}
func (er *Executor) dispatchMsg(session dnet.Session, msg *protocol.Message) {}
var er *Executor
func Start(cfg Config) (err error) {
er = new(Executor)
er.cfg = &cfg
er.die = make(chan struct{})
er.taskPool = task.NewTaskPool(1, 1024)
er.rpcClient = drpc.NewClient()
er.rpcServer = drpc.NewServer()
er.rpcServer.Register(proto.MessageName(&protocol.CmdExecReq{}), er.onCmdExec)
er.rpcServer.Register(proto.MessageName(&protocol.ProcessExecReq{}), er.onProcExec)
er.rpcServer.Register(proto.MessageName(&protocol.ProcessSignalReq{}), er.onProcSignal)
er.rpcServer.Register(proto.MessageName(&protocol.ProcessStateReq{}), er.onProcState)
er.rpcServer.Register(proto.MessageName(&protocol.LogFileReq{}), er.onLogFile)
loadProcess(cfg.DataPath)
initCollector()
er.Submit(er.dial)
go er.tick()
return nil
}
func Stop() {
stopCh := make(chan struct{})
er.Submit(func() {
close(er.die)
er.session.Close(fmt.Errorf("stop"))
saveProcess()
stopCh <- struct{}{}
})
//go func() {
// ticker := time.NewTicker(time.Millisecond * 50)
// for {
// <-ticker.C
// if er.taskPool.NumTask() == 0 {
// ticker.Stop()
// stopCh <- struct{}{}
// }
// }
//}()
<-stopCh
}
|
def exec_maven(mvn_args=()):
run_cmd([os.path.join(SPARK_HOME, "build", "mvn")] + mvn_args) |
def distance_to(self, other, radius=EARTH_RADIUS):
return radius * math.acos(
math.sin(self.rad_lat) * math.sin(other.rad_lat) +
math.cos(self.rad_lat) *
math.cos(other.rad_lat) *
math.cos(self.rad_lon - other.rad_lon)
) |
<filename>src/app/cells/options-cell/options-cell.component.ts
import { Component, Input } from '@angular/core';
import { CellComponent, ColumnConfig } from 'material-dynamic-table';
import { Product } from '../../product';
@Component({
selector: 'ld-options-cell',
templateUrl: './options-cell.component.html'
})
export class OptionsCellComponent implements CellComponent {
@Input()
column: ColumnConfig;
@Input()
row: Product;
constructor() {}
showDetails() {
const productName = this.row.product;
alert(`Product name is ${productName}.`);
}
} |
<gh_stars>100-1000
# dockerpty: test_tty.py.
#
# Copyright 2014 <NAME> <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from expects import expect, equal, be_none, be_true, be_false
import dockerpty.tty as tty
import tests.util as util
import os
import pty
import termios
import tempfile
def israw(fd):
__, __, __, flags, __, __, __ = termios.tcgetattr(fd)
return not flags & termios.ECHO
def test_size_returns_none_for_non_tty():
with tempfile.TemporaryFile() as t:
expect(tty.size(t)).to(be_none)
def test_size_returns_a_tuple_for_a_tty():
fd, __ = pty.openpty()
fd = os.fdopen(fd)
util.set_pty_size(fd, (43, 120))
expect(tty.size(fd)).to(equal((43, 120)))
class TestTerminal(object):
def test_start_when_raw(self):
fd, __ = pty.openpty()
terminal = tty.Terminal(os.fdopen(fd), raw=True)
expect(israw(fd)).to(be_false)
terminal.start()
expect(israw(fd)).to(be_true)
def test_start_when_not_raw(self):
fd, __ = pty.openpty()
terminal = tty.Terminal(os.fdopen(fd), raw=False)
expect(israw(fd)).to(be_false)
terminal.start()
expect(israw(fd)).to(be_false)
def test_stop_when_raw(self):
fd, __ = pty.openpty()
terminal = tty.Terminal(os.fdopen(fd), raw=True)
terminal.start()
terminal.stop()
expect(israw(fd)).to(be_false)
def test_raw_with_block(self):
fd, __ = pty.openpty()
fd = os.fdopen(fd)
with tty.Terminal(fd, raw=True):
expect(israw(fd)).to(be_true)
expect(israw(fd)).to(be_false)
def test_start_does_not_crash_when_fd_is_not_a_tty(self):
with tempfile.TemporaryFile() as f:
terminal = tty.Terminal(f, raw=True)
terminal.start()
terminal.stop()
def test_repr(self):
fd = 'some_fd'
terminal = tty.Terminal(fd, raw=True)
expect(repr(terminal)).to(equal("Terminal(some_fd, raw=True)"))
|
def edge_type(self, u, v):
if self.has_edge(u, v):
return EdgeType.arrow.value
elif self.has_bidirected_edge(u, v):
return EdgeType.bidirected.value
elif self.has_circle_edge(u, v):
return EdgeType.circle.value
else:
return None |
// createMetricNamespace returns metric namespace based on given `ns` which is used as a prefix; all dynamic elements
// in the `metricName` are defined based on content of map `dynamicElements`
func (creator *nsCreator) createMetricNamespace(ns snap.Namespace, metricName string) (snap.Namespace, error) {
metricName = strings.TrimSpace(metricName)
if len(metricName) == 0 {
return nil, fmt.Errorf("Cannot create metric namespace: empty metric name %s", metricName)
}
elements := strings.Split(metricName, "/")
if !strings.Contains(metricName, "*") {
ns = ns.AddStaticElements(elements...)
return ns, nil
}
for index, element := range elements {
if element == "*" {
dynamicElement, ok := creator.dynamicElements[elements[index-1]]
if !ok {
return nil, fmt.Errorf("Unknown dynamic element in metric `%s` under index %d", metricName, index)
}
ns = ns.AddDynamicElement(dynamicElement.name, dynamicElement.description)
if len(elements)-1 == index {
ns = ns.AddStaticElement("value")
}
} else {
ns = ns.AddStaticElement(element)
}
}
if len(ns) == 0 {
return nil, fmt.Errorf("Cannot create metric namespace for metric %s", metricName)
}
return ns, nil
} |
// Copyright (c) 2019,CAOHONGJU All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package cache
import (
"sync"
"github.com/cnotch/ipchub/av/codec/hevc"
"github.com/cnotch/ipchub/av/format/rtp"
"github.com/cnotch/queue"
)
// HevcCache 画面组缓存(Group of Pictures).
type HevcCache struct {
cacheGop bool
l sync.RWMutex
gop queue.Queue
vps *rtp.Packet // 视频参数集包
sps *rtp.Packet // 序列参数集包
pps *rtp.Packet // 图像参数集包
}
// NewHevcCache 创建 HEVC 缓存
func NewHevcCache(cacheGop bool) *HevcCache {
return &HevcCache{
cacheGop: cacheGop,
}
}
// CachePack 向HevcCache中缓存包
func (cache *HevcCache) CachePack(pack Pack) {
rtppack := pack.(*rtp.Packet)
if rtppack.Channel != rtp.ChannelVideo {
return
}
// 判断是否是参数和关键帧包
vps, sps, pps, islice := cache.getPalyloadType(rtppack.Payload())
cache.l.Lock()
defer cache.l.Unlock()
if vps { // 视频参数
cache.vps = rtppack
return
}
if sps { // 序列头参数
cache.sps = rtppack
return
}
if pps { // 图像参数
cache.pps = rtppack
return
}
if cache.cacheGop { // 需要缓存 GOP
if islice { // 关键帧
cache.gop.Reset()
cache.gop.Push(rtppack)
} else if cache.gop.Len() > 0 {
cache.gop.Push(rtppack)
}
}
}
// Reset 重置HevcCache缓存
func (cache *HevcCache) Reset() {
cache.l.Lock()
defer cache.l.Unlock()
cache.vps = nil
cache.sps = nil
cache.pps = nil
cache.gop.Reset()
}
// PushTo 入列到指定的队列
func (cache *HevcCache) PushTo(q *queue.SyncQueue) int {
bytes := 0
cache.l.RLock()
defer cache.l.RUnlock()
// 写参数包
if cache.vps != nil {
q.Queue().Push(cache.vps)
bytes += cache.vps.Size()
}
if cache.sps != nil {
q.Queue().Push(cache.sps)
bytes += cache.sps.Size()
}
if cache.pps != nil {
q.Queue().Push(cache.pps)
bytes += cache.pps.Size()
}
// 如果必要,写 GopCache
if cache.cacheGop {
packs := cache.gop.Elems()
q.Queue().PushN(packs) // 启动阶段调用,无需加锁
for _, p := range packs {
bytes += p.(Pack).Size()
}
}
return bytes
}
func (cache *HevcCache) getPalyloadType(payload []byte) (vps, sps, pps, islice bool) {
if len(payload) < 3 {
return
}
// +---------------+---------------+
// |0|1|2|3|4|5|6|7|0|1|2|3|4|5|6|7|
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// |F| Type | LayerId | TID |
// +-------------+-----------------+
naluType := (payload[0] >> 1) & 0x3f
// 在RTP中的扩展,分片(FU)
if naluType == hevc.NalFuInRtp {
// 0 1 2 3 4 5 6 7
// +-+-+-+-+-+-+-+-+
// |S|E| FuType |
// +---------------+
naluType = payload[2] & 0x3f
if (payload[2]>>7)&1 == 1 { // 第一个分片
cache.nalType(naluType, &vps, &sps, &pps, &islice)
}
return
}
// 如果是原生的 HEVC NAL
if naluType <= hevc.NalRsvNvcl47 {
cache.nalType(naluType, &vps, &sps, &pps, &islice)
return
}
return
}
func (cache *HevcCache) nalType(nalType byte, vps, sps, pps, islice *bool) {
if nalType >= hevc.NalBlaWLp && nalType <= hevc.NalCraNut {
*islice = true
return
}
switch nalType {
case hevc.NalVps:
*vps = true
case hevc.NalSps:
*sps = true
case hevc.NalPps:
*pps = true
}
return
}
|
year=int(input())+1
def setelement(arr):
arr=str(arr)
return len(list(set(arr)))
for i in range(1000):
if setelement(year)!=4:
year=year+1
continue
else:
print(year)
break
|
#include <stdio.h>
#include "zip.h"
int deanonymize;
int main(int argc, char **argv) {
if (argc < 2) {
printf("Usage: %s <path_to_DOCX>\n", argv[0]);
printf("Optionaly provide output file as second argument.\n");
printf("-d as second argument will deanonymize given file. You can optionaly provide output file as third argument.\n");
return 0;
}
char *infile = argv[1];
char *outfile = NULL;
if (argc > 2) {
if (strcmp(argv[2], "-d") == 0) {
deanonymize = 1;
if (argc > 3) outfile = argv[3];
} else {
outfile = argv[2];
}
}
process(infile, outfile);
}
|
import os
import argparse
import tqdm
from itertools import chain
from collections import OrderedDict
import torch
import torchvision
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.autograd import Variable
import math
import pdb
from PIL import Image
import numpy as np
from sklearn.metrics import confusion_matrix
def calc_coeff(iter_num, high=1.0, low=0.0, alpha=10.0, max_iter=10000.0):
return np.float(2.0 * (high - low) / (1.0 + np.exp(-alpha*iter_num / max_iter)) - (high - low) + low)
def init_weights(m):
classname = m.__class__.__name__
if classname.find('Conv2d') != -1 or classname.find('ConvTranspose2d') != -1:
nn.init.kaiming_uniform_(m.weight)
nn.init.zeros_(m.bias)
elif classname.find('BatchNorm') != -1:
nn.init.normal_(m.weight, 1.0, 0.02)
nn.init.zeros_(m.bias)
elif classname.find('Linear') != -1:
nn.init.xavier_normal_(m.weight)
nn.init.zeros_(m.bias)
def grl_hook(coeff):
def fun1(grad):
return -coeff*grad.clone()
return fun1
class VGG16Base(nn.Module):
def __init__(self):
super(VGG16Base, self).__init__()
model_vgg = torchvision.models.vgg16(pretrained=True)
self.features = model_vgg.features
self.classifier = nn.Sequential()
for i in range(6):
self.classifier.add_module("classifier"+str(i), model_vgg.classifier[i])
self.feature_layers = nn.Sequential(self.features, self.classifier)
self.in_features = 4096
def forward(self, x):
x = self.features(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return x
class ResBase34(nn.Module):
def __init__(self):
super(ResBase34, self).__init__()
model_resnet = torchvision.models.resnet34(pretrained=True)
self.conv1 = model_resnet.conv1
self.bn1 = model_resnet.bn1
self.relu = model_resnet.relu
self.maxpool = model_resnet.maxpool
self.layer1 = model_resnet.layer1
self.layer2 = model_resnet.layer2
self.layer3 = model_resnet.layer3
self.layer4 = model_resnet.layer4
self.avgpool = model_resnet.avgpool
self.in_features = model_resnet.fc.in_features
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
return x
class ResBase50(nn.Module):
def __init__(self):
super(ResBase50, self).__init__()
model_resnet50 = torchvision.models.resnet50(pretrained=True)
self.conv1 = model_resnet50.conv1
self.bn1 = model_resnet50.bn1
self.relu = model_resnet50.relu
self.maxpool = model_resnet50.maxpool
self.layer1 = model_resnet50.layer1
self.layer2 = model_resnet50.layer2
self.layer3 = model_resnet50.layer3
self.layer4 = model_resnet50.layer4
self.avgpool = model_resnet50.avgpool
self.in_features = model_resnet50.fc.in_features
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
return x
class ResBase101(nn.Module):
def __init__(self):
super(ResBase101, self).__init__()
model_resnet101 = torchvision.models.resnet101(pretrained=True)
self.conv1 = model_resnet101.conv1
self.bn1 = model_resnet101.bn1
self.relu = model_resnet101.relu
self.maxpool = model_resnet101.maxpool
self.layer1 = model_resnet101.layer1
self.layer2 = model_resnet101.layer2
self.layer3 = model_resnet101.layer3
self.layer4 = model_resnet101.layer4
self.avgpool = model_resnet101.avgpool
self.in_features = model_resnet101.fc.in_features
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
return x
class ResClassifier(nn.Module):
def __init__(self, class_num, feature_dim, bottleneck_dim=256):
super(ResClassifier, self).__init__()
self.bottleneck = nn.Linear(feature_dim, bottleneck_dim)
self.fc = nn.Linear(bottleneck_dim, class_num)
self.bottleneck.apply(init_weights)
self.fc.apply(init_weights)
def forward(self, x):
x = self.bottleneck(x)
y = self.fc(x)
return x,y
class ResClassifier_bn(nn.Module):
def __init__(self, class_num, feature_dim, bottleneck_dim=256):
super(ResClassifier_bn, self).__init__()
self.bottleneck = nn.Linear(feature_dim, bottleneck_dim)
self.fc = nn.Linear(bottleneck_dim, class_num)
self.bottleneck.apply(init_weights)
self.fc.apply(init_weights)
self.bn = nn.BatchNorm1d(bottleneck_dim, affine=True)
def forward(self, x):
x = self.bottleneck(x)
x = self.bn(x)
y = self.fc(x)
return x,y
class AdversarialNetwork(nn.Module):
def __init__(self, in_feature, hidden_size, max_iter=10000):
super(AdversarialNetwork, self).__init__()
self.ad_layer1 = nn.Linear(in_feature, hidden_size)
self.ad_layer2 = nn.Linear(hidden_size, hidden_size)
self.ad_layer3 = nn.Linear(hidden_size, 1)
self.relu1 = nn.ReLU()
self.relu2 = nn.ReLU()
self.dropout1 = nn.Dropout(0.5)
self.dropout2 = nn.Dropout(0.5)
self.sigmoid = nn.Sigmoid()
self.apply(init_weights)
self.iter_num = 0
self.alpha = 10
self.low = 0.0
self.high = 1.0
self.max_iter = max_iter
def forward(self, x):
if self.training:
self.iter_num += 1
coeff = calc_coeff(self.iter_num, self.high, self.low, self.alpha, self.max_iter)
x = x * 1.0
x.register_hook(grl_hook(coeff))
x = self.ad_layer1(x)
x = self.relu1(x)
# x = self.dropout1(x)
# x = self.ad_layer2(x)
# x = self.relu2(x)
# x = self.dropout2(x)
y = self.ad_layer3(x)
y = self.sigmoid(y)
return y
def output_num(self):
return 1
def get_parameters(self):
return [{"params":self.parameters(), "lr_mult":10, 'decay_mult':2}]
IMG_EXTENSIONS = ['.jpg', '.JPG', '.jpeg', '.JPEG', '.png', '.PNG', '.ppm', '.PPM', '.bmp', '.BMP',]
def is_image_file(filename):
return any(filename.endswith(extension) for extension in IMG_EXTENSIONS)
def default_loader(path):
return Image.open(path).convert('RGB')
def make_dataset(root, label):
images = []
labeltxt = open(label)
for line in labeltxt:
data = line.strip().split(' ')
if is_image_file(data[0]):
path = os.path.join(root, data[0])
gt = int(data[1])
item = (path, gt)
images.append(item)
return images
class ObjectImage(torch.utils.data.Dataset):
def __init__(self, root, label, transform=None, loader=default_loader):
imgs = make_dataset(root, label)
self.root = root
self.label = label
self.imgs = imgs
self.transform = transform
self.loader = loader
def __getitem__(self, index):
path, target = self.imgs[index]
img = self.loader(path)
if self.transform is not None:
img = self.transform(img)
return img, target
def __len__(self):
return len(self.imgs)
class ObjectImage_mul(torch.utils.data.Dataset):
def __init__(self, root, label, transform=None, loader=default_loader):
imgs = make_dataset(root, label)
self.root = root
self.label = label
self.imgs = imgs
self.transform = transform
self.loader = loader
def __getitem__(self, index):
path, target = self.imgs[index]
img = self.loader(path)
if self.transform is not None:
# print(type(self.transform).__name__)
if type(self.transform).__name__=='list':
img = [t(img) for t in self.transform]
else:
img = self.transform(img)
return img, target, index
def __len__(self):
return len(self.imgs)
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
m.weight.data.normal_(0.0, 0.01)
m.bias.data.normal_(0.0, 0.01)
elif classname.find('BatchNorm') != -1:
m.weight.data.normal_(1.0, 0.01)
m.bias.data.fill_(0)
elif classname.find('Linear') != -1:
m.weight.data.normal_(0.0, 0.01)
m.bias.data.normal_(0.0, 0.01)
def print_args(args):
log_str = ("==========================================\n")
log_str += ("========== config =============\n")
log_str += ("==========================================\n")
for arg, content in args.__dict__.items():
log_str += ("{}:{}\n".format(arg, content))
log_str += ("\n==========================================\n")
print(log_str)
args.out_file.write(log_str+'\n')
args.out_file.flush()
def cal_fea(loader, model):
start_test = True
with torch.no_grad():
iter_test = iter(loader)
for i in range(len(loader)):
inputs, labels = iter_test.next()
inputs = inputs.cuda()
feas, outputs = model(inputs)
if start_test:
all_feas = feas.float().cpu()
all_label = labels.float()
start_test = False
else:
all_feas = torch.cat((all_feas, feas.float().cpu()), 0)
all_label = torch.cat((all_label, labels.float()), 0)
return all_feas, all_label
def cal_acc(loader, model, flag=True, fc=None):
start_test = True
with torch.no_grad():
iter_test = iter(loader)
for i in range(len(loader)):
data = iter_test.next()
inputs = data[0]
labels = data[1]
inputs = inputs.cuda()
if flag:
_, outputs = model(inputs)
else:
if fc is not None:
feas, outputs = model(inputs)
outputs = fc(feas)
else:
outputs = model(inputs)
if start_test:
all_output = outputs.float().cpu()
all_label = labels.float()
start_test = False
else:
all_output = torch.cat((all_output, outputs.float().cpu()), 0)
all_label = torch.cat((all_label, labels.float()), 0)
all_output = nn.Softmax(dim=1)(all_output)
_, predict = torch.max(all_output, 1)
accuracy = torch.sum(torch.squeeze(predict).float() == all_label).item() / float(all_label.size()[0])
return accuracy, predict, all_output, all_label
def cal_acc_visda(loader, model, flag=True, fc=None):
start_test = True
with torch.no_grad():
iter_test = iter(loader)
for i in range(len(loader)):
data = iter_test.next()
inputs = data[0]
labels = data[1]
inputs = inputs.cuda()
if flag:
_, outputs = model(inputs)
else:
if fc is not None:
feas, outputs = model(inputs)
outputs = fc(feas)
else:
outputs = model(inputs)
if start_test:
all_output = outputs.float().cpu()
all_label = labels.float()
start_test = False
else:
all_output = torch.cat((all_output, outputs.float().cpu()), 0)
all_label = torch.cat((all_label, labels.float()), 0)
all_output = nn.Softmax(dim=1)(all_output)
_, predict = torch.max(all_output, 1)
matrix = confusion_matrix(all_label, torch.squeeze(predict).float())
acc = matrix.diagonal()/matrix.sum(axis=1) * 100
aacc = acc.mean() / 100
aa = [str(np.round(i, 2)) for i in acc]
acc = ' '.join(aa)
print(acc)
# accuracy = torch.sum(torch.squeeze(predict).float() == all_label).item() / float(all_label.size()[0])
return aacc, predict, all_output, all_label, acc
def linear_rampup(current, rampup_length):
if rampup_length == 0:
return 1.0
else:
current = np.clip(current / rampup_length, 0.0, 1.0)
return float(current)
class SemiLoss(object):
def __call__(self, outputs_x, targets_x, outputs_u, targets_u, epoch, max_epochs=30, lambda_u=75):
probs_u = torch.softmax(outputs_u, dim=1)
Lx = -torch.mean(torch.sum(F.log_softmax(outputs_x, dim=1) * targets_x, dim=1))
Lu = torch.mean((probs_u - targets_u)**2)
return Lx, Lu, lambda_u * linear_rampup(epoch, max_epochs)
class WeightEMA(object):
def __init__(self, model, ema_model, alpha=0.999):
self.model = model
self.ema_model = ema_model
self.alpha = alpha
self.params = list(model.state_dict().values())
self.ema_params = list(ema_model.state_dict().values())
self.wd = 0.02 * args.lr
for param, ema_param in zip(self.params, self.ema_params):
param.data.copy_(ema_param.data)
def step(self):
one_minus_alpha = 1.0 - self.alpha
for param, ema_param in zip(self.params, self.ema_params):
ema_param.mul_(self.alpha)
ema_param.add_(param * one_minus_alpha)
# customized weight decay
param.mul_(1 - self.wd)
def interleave_offsets(batch, nu):
groups = [batch // (nu + 1)] * (nu + 1)
for x in range(batch - sum(groups)):
groups[-x - 1] += 1
offsets = [0]
for g in groups:
offsets.append(offsets[-1] + g)
assert offsets[-1] == batch
return offsets
def interleave(xy, batch):
nu = len(xy) - 1
offsets = interleave_offsets(batch, nu)
xy = [[v[offsets[p]:offsets[p + 1]] for p in range(nu + 1)] for v in xy]
for i in range(1, nu + 1):
xy[0][i], xy[i][i] = xy[i][i], xy[0][i]
return [torch.cat(v, dim=0) for v in xy] |
<gh_stars>10-100
/* Finish access to a mmap'd malloc managed region.
Copyright 1992 Free Software Foundation, Inc.
Contributed by <NAME> at Cygnus Support. <EMAIL>
This file is part of the GNU C Library.
The GNU C Library is free software; you can redistribute it and/or
modify it under the terms of the GNU Library General Public License as
published by the Free Software Foundation; either version 2 of the
License, or (at your option) any later version.
The GNU C Library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Library General Public License for more details.
You should have received a copy of the GNU Library General Public
License along with the GNU C Library; see the file COPYING.LIB. If
not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA. */
#include <sys/types.h>
#include "mmprivate.h"
/* Terminate access to a mmalloc managed region by unmapping all memory pages
associated with the region, and closing the file descriptor if it is one
that we opened.
Returns NULL on success.
Returns the malloc descriptor on failure, which can subsequently be used
for further action, such as obtaining more information about the nature of
the failure by examining the preserved errno value.
Note that the malloc descriptor that we are using is currently located in
region we are about to unmap, so we first make a local copy of it on the
stack and use the copy. */
PTR
mmalloc_detach (md)
PTR md;
{
struct mdesc mtemp;
if (md != NULL)
{
mtemp = *(struct mdesc *) md;
/* Now unmap all the pages associated with this region by asking for a
negative increment equal to the current size of the region. */
if ((mtemp.morecore (&mtemp, mtemp.base - mtemp.breakval)) == NULL)
{
/* Deallocating failed. Update the original malloc descriptor
with any changes */
*(struct mdesc *) md = mtemp;
}
else
{
if (mtemp.flags & MMALLOC_DEVZERO)
{
close (mtemp.fd);
}
md = NULL;
}
}
return (md);
}
|
Children and careers
takes the best part of a year. This is little use to someone who needs to respond quickly to a domestic crisis, and if you become pregnant shortly after the closing date your application is unlikely to be sorted out before the baby is 10 months old and not within the limits of maternity leave. Secondly, the scheme is supposed to ensure that the .tandard of part timers is comparable with that of full timers. The ratio of full time to part time jobs is fixed and therefore works against those applying for jobs in specialties that attract a lot of women. It is ironic that obstetrics, paediatrics, and psychiatry are just the specialties in which Dr Begley and I believe the experience of being a mother can be so valuable. Thirdly, and perhaps most importantly, the scheme is specifically for doctors with domestic responsibilities, yet only a handful of men are using it. It is essential that the problem ofcombining family and work commitments is shared by both partners and is not seen as a women's problem. Many of us are married to doctors, and too often careers advice is aimed at how best to fit our careers around our husbands'. At present, childcare facilities are being cut rather than expanded and maternity leave arrangements are falling even further below the standards ofother north European countries. When I entered medical school in 1976 half of our year were women. Now, five years afer qualifying, many of us with domestic commitments are struggling. I urge all those who care about their children and their career to push for changes in the system. Can any of us be proud to belong to a profession where official advice for interviews is to conceal your pregnancy? PENELOPE CAMPLING |
/// Build a new Martian adapter with the given registry of Martian stages
/// Arguments:
/// - `stage_map`: names and implementations of the Martian stages that can be run by this binary.
pub fn new(stage_map: HashMap<String, Box<dyn RawMartianStage>, S>) -> MartianAdapter<S> {
MartianAdapter {
stage_map,
log_level: LevelFilter::Warn,
is_error_assert: Box::new(|_| false),
}
} |
def contactChangedNick(self, person, newnick):
oldname = person.name
if oldname in self.contacts:
del self.contacts[oldname]
person.name = newnick
self.contacts[newnick] = person
if oldname in self.onlineContacts:
del self.onlineContacts[oldname]
self.onlineContacts[newnick] = person |
# Argumentos posicionais
def describle_pet(animal_type, pet_name):
"""[Exibe informações sobre o animal de estimação]
Args:
animal_type ([string]): [Qual o tipo do animal]
pet_name ([string]): [Nome do animal]
"""
print(f"\nI have a {animal_type}.")
print(f"My {animal_type}'s name is {pet_name.capitalize()}.")
describle_pet('cat', 'barney')
describle_pet('turtle', 'margarida')
# Argumentos nomeados (Ao nomear o argumento, não importa a ordem)
describle_pet(pet_name='kit kat', animal_type='cat')
# Valor Default (deve sempre ser alocado após os argumentos posicionados)
def describle_pet(pet_name ,animal_type='cat'):
"""[Descreve o animal de estimação]
Args:
pet_name ([string]): [Nome do animal]
animal_type (str, optional): [Tipo do animal]. Defaults to 'cat'.
"""
print(f"\nI have a {animal_type}.")
print(f"My {animal_type}'s name is {pet_name.capitalize()}.")
describle_pet("Luke") |
Calling All Android Users: Open Alpha!
Alright, I’m super excited to be able to invite you guys to alpha test the official Android app! But before you get to excited let’s go over a few things:
The only features that currently work are:
Just and Similar searches. Tap the search icon in the top bar to open the modal.
The features that work on the playlist view are: play/pause, next/previous, seeking to a new place in a song (via clicking), and tapping a new song to play in the playlist.
The app still needs a lot of bugs squashed and featured completed. So let us know everything you find, but please be nice ;)
Ok, now that you know the above, let’s do this. Head here to download the app: http://www.mediafire.com/?xtz6gwd1jd61dze
Send all bugs, feature request, or whatever else to [email protected]. Feel free to make use of /r/tubalr as well!
Final note, the app is being developed by Iheanyi Ekechukwu, check him out here: website, @kwuchu, Facebook
This was posted 5 years ago. It has 7 notes. |
/**
* The implementation of {@link IncomingWebhook}.
*/
public class IncomingWebhookImpl extends WebhookImpl implements IncomingWebhook {
private final String token;
/**
* Creates a new incoming webhook.
*
* @param api The discord api instance.
* @param data The json data of the webhook.
*/
public IncomingWebhookImpl(DiscordApi api, JsonNode data) {
super(api, data);
token = data.get("token").asText();
}
@Override
public Optional<IncomingWebhook> asIncomingWebhook() {
return Optional.of(this);
}
@Override
public WebhookType getType() {
return WebhookType.INCOMING;
}
@Override
public boolean isIncomingWebhook() {
return true;
}
@Override
public boolean isChannelFollowerWebhook() {
return false;
}
@Override
public CompletableFuture<Void> delete(String reason) {
return new RestRequest<Void>(getApi(), RestMethod.DELETE, RestEndpoint.WEBHOOK)
.setUrlParameters(getIdAsString(), getToken())
.setAuditLogReason(reason)
.execute(result -> null);
}
/**
* Gets the secure token of the webhook.
*
* @return The secure token of the webhook.
*/
public String getToken() {
return token;
}
} |
<gh_stars>0
/* Import2: An RPG */
package studio.ignitionigloogames.twistedtrek.import2.maze.objects;
import studio.ignitionigloogames.randomrange.RandomRange;
import studio.ignitionigloogames.twistedtrek.import2.maze.Maze;
import studio.ignitionigloogames.twistedtrek.import2.maze.abc.AbstractGround;
import studio.ignitionigloogames.twistedtrek.import2.resourcemanagers.ObjectImageConstants;
import studio.ignitionigloogames.twistedtrek.import2.resourcemanagers.SoundConstants;
import studio.ignitionigloogames.twistedtrek.import2.resourcemanagers.SoundManager;
public class Ice extends AbstractGround {
public Ice() {
super(false);
}
@Override
public final int getBaseID() {
return ObjectImageConstants.OBJECT_IMAGE_ICE;
}
@Override
public String getName() {
return "Ice";
}
@Override
public String getPluralName() {
return "Squares of Ice";
}
@Override
public boolean overridesDefaultPostMove() {
return true;
}
@Override
public void postMoveAction(final boolean ie, final int dirX, final int dirY) {
SoundManager.playSound(SoundConstants.SOUND_WALK_ICE);
}
@Override
public String getDescription() {
return "Ice is one of the many types of ground - it is frictionless. Anything that crosses it will slide.";
}
@Override
public boolean shouldGenerateObject(final Maze maze, final int row, final int col, final int floor, final int level,
final int layer) {
// Generate Ice at 40% rate
final RandomRange reject = new RandomRange(1, 100);
return reject.generate() < 40;
}
}
|
/**
* Setup the Video preview.
* @param texture
*/
public void SurfaceTextureCreatedCallback(SurfaceTexture texture) {
try {
videoService.showPreview(texture);
if (this.videoService.canStart()) {
this.videoService.start();
} else {
if (this.videoService.getPaused() && this.videoService.canSetPaused()) {
this.videoService.setPaused(false);
}
}
} catch (SFBException e) {
e.printStackTrace();
}
} |
Laying the Foundation for Foundational Technologies
The authors of “Patenting Foundational Technologies: Lessons From CRISPR and Other Core Biotechnologies” (Feeney et al. 2018) propose significant guidelines to ensure both that emerging technologies profit the inventor(s), and that medical breakthroughs coming from such technologies quickly reach those in need as swiftly as is reasonable. Technologies like CRISPR hold the promise of new and exciting therapeutic interventions for a wide variety of conditions. To think that human suffering that might otherwise be prevented would be prolonged by things like exclusivity rights can easily fill one with alarm. But the same could be said for just about any new treatment. The reason why CRISPR and other technologies might be targeted for accelerated and widespread access is their potential classification as foundational technologies. The main worry expressed by Feeney and colleagues is that patents on foundational technologies would bottleneck and delay further advances that might come from the initial innovation. The reason for this is, applications of the foundational technology would require a right to use the foundational technology in the first place—a worry shared, not surprisingly, by many studying these issues (e.g., Rai and Boyle 2007). While I largely agree with Feeney and colleagues’ overall position, I believe their position would benefit from a more thorough examination and definition of what it means for a technology to be foundational. Without such an analysis, establishing whether a technology is truly foundational or not could delay the benefit of the technology from reaching people quickly enough. And this is exactly the problem with temporary exclusivity rights that the authors are trying to address in the first place. Discussion from Feeney and colleagues as to what qualifies as a foundational technology largely focus on the potential for wide use of said technology and its related benefit to the population. It is rightly pointed out that exclusivity rights can exacerbate the problems of some medical interventions only being accessible by the very rich. While the overall sentiment is attractive, the conditions under which a technology should be thought of as foundational needs to extend beyond the scope of its potential benefit. Indeed, even the promise of future innovation strikes me as missing the mark as to how we should understand foundational technologies in this context. For guidance on what constitutes a technology as being foundational, I recommend insight from other fields. Across multiple disciplines, technologies are understood as foundational insofar as they are “domain neutral” (e.g., Chameau et al. 2014; Endy 2005; Iansiti and Lakhani 2017). That is, the potential applications of the technology are not limited to a single set of related problems, or even to one field. Being a foundational technology in this sense is much more about the breadth of potential applications than anything else. Note that this can be independent from the promise of future development, which could be more of a utility evaluation. If the standards for a technology being foundational are more in line with future utility, domain neutrality may be forgotten. For example, an innovation in the treatment of a particular type of cancer may very well serve as the inspiration and basis for many new avenues of research into future treatment. The innovation may indeed be “foundational” to a whole family of new treatment technologies. But that future promise of innovation is relative to a particular kind of cancer, constrained to a specific domain. In that sense, the technology fails to be foundational under the standard just described. Promise of future development and wide-reaching benefit are no doubt laudable goals, and the desire to |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.