content
stringlengths
10
4.9M
/** * @author Gunnar Hillert * @since 1.0 */ public class DocFlavorComparator implements Comparator<DocFlavor> { public int compare(DocFlavor docFlavor1, DocFlavor docFlavor2) { int comparison = docFlavor1.getMimeType().compareTo(docFlavor2.getMimeType()); if (comparison == 0) { return docFlavor1.getMediaSubtype().compareTo(docFlavor2.getMediaSubtype()); } return comparison; } }
<filename>commons/src/main/java/com/vizerium/commons/indicators/MovingAverageCalculator.java<gh_stars>0 /* * Copyright 2019 Vizerium, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.vizerium.commons.indicators; import java.util.Arrays; import java.util.List; import com.vizerium.commons.dao.UnitPrice; public class MovingAverageCalculator implements IndicatorCalculator<MovingAverage> { @Override public MovingAverage calculate(List<? extends UnitPrice> unitPrices, MovingAverage movingAverage) { // I really tried to use lambda expressions here. // return calculate(unitPrices.stream().mapToFloat(UnitPrice::getClose).toArray(), ma); // but it does not have a mapToFloat.. So, I am looping over it myself // https://stackoverflow.com/questions/4837568/java-convert-arraylistfloat-to-float float[] closingPrices = new float[unitPrices.size()]; int i = 0; for (UnitPrice unitPrice : unitPrices) { closingPrices[i++] = unitPrice.getClose(); } float[] movingAverageValues = calculateArrayMA(movingAverage.getType(), closingPrices, movingAverage.getMA()); movingAverage.setValues(movingAverageValues); return movingAverage; } public static float calculateSMA(float[] closingPrices, int numberOfPeriods) { if (closingPrices.length < numberOfPeriods) { return 0.0f; } else { float sma = 0.0f; for (int i = closingPrices.length - numberOfPeriods; i < closingPrices.length; i++) { sma += closingPrices[i]; } return sma / numberOfPeriods; } } public static float calculateEMA(float[] closingPrices, int numberOfPeriods) { if (closingPrices.length < numberOfPeriods) { return 0.0f; } else { float weightingMultiplier = 2.0f / (numberOfPeriods + 1); float ema = calculateSMA(Arrays.copyOfRange(closingPrices, 0, numberOfPeriods), numberOfPeriods); for (int i = numberOfPeriods; i < closingPrices.length; i++) { ema = (closingPrices[i] - ema) * weightingMultiplier + ema; } return ema; } } public static float calculateWMA(float[] closingPrices, int numberOfPeriods) { if (closingPrices.length < numberOfPeriods) { return 0.0f; } else { float wma = calculateSMA(Arrays.copyOfRange(closingPrices, 0, numberOfPeriods), numberOfPeriods); for (int i = numberOfPeriods; i < closingPrices.length; i++) { wma = (wma * (numberOfPeriods - 1) + closingPrices[i]) / numberOfPeriods; } return wma; } } public static float calculateMA(MovingAverageType maType, float[] closingPrices, int numberOfPeriods) { if (MovingAverageType.SIMPLE.equals(maType)) { return calculateSMA(closingPrices, numberOfPeriods); } else if (MovingAverageType.EXPONENTIAL.equals(maType)) { return calculateEMA(closingPrices, numberOfPeriods); } else if (MovingAverageType.WELLESWILDER.equals(maType)) { return calculateWMA(closingPrices, numberOfPeriods); } else { throw new RuntimeException("Unable to identify type of Moving Average. " + maType); } } public static float[] calculateArraySMA(float[] closingPrices, int numberOfPeriods) { float[] smaArray = new float[closingPrices.length]; if (smaArray.length < numberOfPeriods) { for (int j = 0; j < smaArray.length; j++) { smaArray[j] = 0.0f; } } else { for (int j = 0; j < numberOfPeriods - 1; j++) { smaArray[j] = 0.0f; } smaArray[numberOfPeriods - 1] = calculateSMA(Arrays.copyOfRange(closingPrices, 0, numberOfPeriods), numberOfPeriods); for (int i = numberOfPeriods; i < smaArray.length; i++) { smaArray[i] = ((smaArray[i - 1] * numberOfPeriods) - closingPrices[i - numberOfPeriods] + closingPrices[i]) / numberOfPeriods; } } return smaArray; } public static float[] calculateArrayEMA(float[] closingPrices, int numberOfPeriods) { float[] emaArray = new float[closingPrices.length]; if (emaArray.length < numberOfPeriods) { for (int j = 0; j < emaArray.length; j++) { emaArray[j] = 0.0f; } } else { for (int j = 0; j < numberOfPeriods - 1; j++) { emaArray[j] = 0.0f; } float weightingMultiplier = 2.0f / (numberOfPeriods + 1); emaArray[numberOfPeriods - 1] = calculateSMA(Arrays.copyOfRange(closingPrices, 0, numberOfPeriods), numberOfPeriods); for (int i = numberOfPeriods; i < emaArray.length; i++) { emaArray[i] = (closingPrices[i] - emaArray[i - 1]) * weightingMultiplier + emaArray[i - 1]; } } return emaArray; } public static float[] calculateArrayWMA(float[] closingPrices, int numberOfPeriods) { float[] wmaArray = new float[closingPrices.length]; if (wmaArray.length < numberOfPeriods) { for (int j = 0; j < wmaArray.length; j++) { wmaArray[j] = 0.0f; } } else { for (int j = 0; j < numberOfPeriods - 1; j++) { wmaArray[j] = 0.0f; } wmaArray[numberOfPeriods - 1] = calculateSMA(Arrays.copyOfRange(closingPrices, 0, numberOfPeriods), numberOfPeriods); for (int i = numberOfPeriods; i < wmaArray.length; i++) { wmaArray[i] = (wmaArray[i - 1] * (numberOfPeriods - 1) + closingPrices[i]) / numberOfPeriods; } } return wmaArray; } public static float[] calculateArrayMA(MovingAverageType maType, float[] closingPrices, int numberOfPeriods) { if (MovingAverageType.SIMPLE.equals(maType)) { return calculateArraySMA(closingPrices, numberOfPeriods); } else if (MovingAverageType.EXPONENTIAL.equals(maType)) { return calculateArrayEMA(closingPrices, numberOfPeriods); } else if (MovingAverageType.WELLESWILDER.equals(maType)) { return calculateArrayWMA(closingPrices, numberOfPeriods); } else { throw new RuntimeException("Unable to identify type of Moving Average. " + maType); } } }
import { ValidatorResult } from "../validation/validatorresult"; import { User } from "../models/user"; import { ValidationError } from "../validation/validationerror"; import { UserDeleteValidator } from "../validation/user/validators/userdeletevalidator"; import { Database } from "../common/database"; import { ServiceType } from "../common/servicetype"; import { ArgumentError } from "../../common/error/types/argumenterror"; import { StringUtils } from "../../util/stringutils"; import { UsernameValidatorRule } from "../validation/user/rules/usernamevalidatorrule"; import { NullArgumentError } from "../../common/error/types/nullargumenterror"; import { UsernameValidator } from "../validation/user/validators/usernamevalidator"; import { DatabaseService } from "../common/databaseservice"; import { UserRegistration } from "../common/userregistration"; import { UserRegistrationValidator } from "../validation/user/validators/userregistrationvalidator"; import { VerificationToken } from "../models/verificationtoken"; import { IEmailSender } from "../email/iemailsender"; import { ErrorHandler } from "../../common/error/errorhandler"; import { QueryFailedError } from "typeorm"; import { DuplicateError } from "../../common/error/types/duplicateerror"; import { injectable, inject } from "inversify"; import { IOC_TYPES } from "../../common/ioc/ioctypes"; /** * The user service for retrieving users from the system. */ @injectable() export class UserService extends DatabaseService { /** * The type of service it is. */ readonly serviceType: ServiceType = ServiceType.User; /** * The validator to validate a user being deleted. */ private userDeleteValidator: UserDeleteValidator; /** * The validator to validate users being created. */ private userRegistrationValidator: UserRegistrationValidator; /** * The service for sending emails. */ private emailSender: IEmailSender; /** * Create a new user service. * @param database The current database. */ constructor(@inject(IOC_TYPES.Database) database: Database) { super(database); this.userDeleteValidator = new UserDeleteValidator(); this.userRegistrationValidator = new UserRegistrationValidator(); } /** * Checks if a username is available for taking.s * @param username The username to check for. * @returns True if the username is available. */ public async isUsernameAvailable(username: string):Promise<boolean> { let usernameValRule: UsernameValidatorRule = new UsernameValidatorRule(); if(username == null){ throw new NullArgumentError('username'); } else { let validatorResult: ValidatorResult = new UsernameValidator().validate(username); if(validatorResult.isValid) { return this.database.userRepo.isUsernameAvailable(username); } else { throw new ValidationError('Username is not valid', validatorResult); } } } /** * Check if an email is already in use by a non-deleted * user. * @param email The email to check. * @returns True if the email is being used. */ public async isEmailInUse(email: string): Promise<boolean> { if(email == null){ throw new NullArgumentError('email'); } return this.database.userRepo.isEmailInUse(email); } public async registerNewUser(registration: UserRegistration): Promise<User> { if (registration == null) { throw new NullArgumentError('registration'); } //Is the user even valid? let validatorResult: ValidatorResult = this.userRegistrationValidator.validate(registration); if (!validatorResult.isValid) { throw new ValidationError('Failed to register new user.', validatorResult); } //Generate the user let user: User = await User.fromRegistration(registration); let vToken: VerificationToken; try { await this.database.userRepo.add(user); return user; } catch (error) { if (this.database.isInTransaction()) { await this.database.rollbackTransaction(); } new ErrorHandler(error) .catch(QueryFailedError, (error: QueryFailedError) => { if(error.message.includes('ER_DUP_ENTRY')) { throw new DuplicateError('Username or email is already in use.'); } }) .otherwiseRaise(); return null; } } /** * Search for a user by their username. * @param username The username to look for * @param includeDeleted If we should include deleted users in the results. * @returns The user if found. */ public async findByUsername(username: string, includeDeleted?: boolean):Promise<User> { if(username == null){ throw new NullArgumentError('username'); } return this.database.userRepo.findByUsername(username, includeDeleted); } /** * Search for a user by their unique id. This is primarily for * API calls. * @param id The numeric id of the user to look for. * @param includeDeleted If we should include deleted users in the results. * @returns The user if found. */ public async findById(id: number, includeDeleted?: boolean):Promise<User> { if(isNaN(id)){ throw new ArgumentError('id'); } return this.database.userRepo.findById(id, includeDeleted); } /** * Search for a user via their email. * @param email The email to look for. * @param includeDeleted If deleted users should be included in the result. */ public async findByEmail(email: string, includeDeleted?: boolean): Promise<User> { if(StringUtils.isBlank(email)){ throw new ArgumentError('email'); } return this.database.userRepo.findByEmail(email, includeDeleted); } /** * Delete a user from the database * @param user The user to delete */ public async delete(user: User): Promise<void> { if(!user || isNaN(user.id)){ throw new ArgumentError('user'); } let validatorResult: ValidatorResult = this.userDeleteValidator.validate(user); if(!validatorResult.isValid){ throw new ValidationError('Failed to delete user.', validatorResult); } await this.database.userRepo.delete(user); } }
/* Test cli command to convert topology to providers and descriptors */ @Test public void testConvertTopology() throws Exception { outContent.reset(); Configuration config = new GatewayConfigImpl(); URL topologyFileURL = ClassLoader.getSystemResource("token-test.xml"); final File topologyFile = Paths.get(topologyFileURL.toURI()).toFile(); final File outputDir = createDir(); final String providerConfigFileName = "my-provider.json"; final String descriptorConfigFileName = "my-descriptor.json"; final String clusterName = "myCluster"; final String discoveryUrl = "https://localhost:7183"; final String discoveryUser = "discoveryUser"; final String discoveryType = "ClouderaManager"; final String discoveryPwdAlias = "discovery"; final ObjectMapper mapper = new ObjectMapper(); try { KnoxCLI cli = new KnoxCLI(); cli.setConf(config); cli.run(new String[]{"convert-topology", "--master", "master", "--path", topologyFile.getAbsolutePath(), "--provider-name", providerConfigFileName, "--descriptor-name", descriptorConfigFileName, "--output-dir", outputDir.getAbsolutePath(), "--force", "--cluster", clusterName, "--discovery-url", discoveryUrl, "--discovery-user", discoveryUser, "--discovery-pwd-alias", discoveryPwdAlias, "--discovery-type", discoveryType}); final File providerConfigFile = new File(outputDir+File.separator+providerConfigFileName); final File descriptorConfigFile = new File(outputDir+File.separator+descriptorConfigFileName); assertTrue("Provider config file not created", providerConfigFile.exists()); assertTrue("Descriptor config file not created", descriptorConfigFile.exists()); final ProviderConfiguration providerJson = mapper.readValue(providerConfigFile, ProviderConfiguration.class); final DescriptorConfiguration descriptorJson = mapper.readValue(descriptorConfigFile, DescriptorConfiguration.class); assertNotNull("Provider config could not be deserialized", providerJson); assertNotNull("Descriptor config could not be deserialized", descriptorJson); assertEquals(providerJson.getProviders().size(), 1); assertEquals(providerJson.getProviders().get(0).getParams().size(), 8); assertEquals(providerJson.getProviders().get(0).getName(), "ShiroProvider"); assertEquals(providerJson.getProviders().get(0).getRole(), "authentication"); assertEquals(providerJson.getProviders().get(0).isEnabled(), "true"); /* test param order */ assertEquals(providerJson.getProviders().get(0).getParams().get(0).getName(), "sessionTimeout"); assertEquals(providerJson.getProviders().get(0).getParams().get(3).getName(), "main.ldapRealm.contextFactory"); assertEquals(providerJson.getProviders().get(0).getParams().get(3).getName(), "main.ldapRealm.contextFactory"); assertEquals(providerJson.getProviders().get(0).getParams().get(5).getValue(), "ldap://localhost:33389"); assertEquals(providerJson.getProviders().get(0).getParams().get(7).getValue(), "authcBasic"); assertEquals(descriptorJson.getDiscoveryType(), discoveryType); assertEquals(descriptorJson.getDiscoveryAddress(), discoveryUrl); assertEquals(descriptorJson.getDiscoveryPasswordAlias(), discoveryPwdAlias); assertEquals(descriptorJson.getDiscoveryUser(), discoveryUser); assertEquals(descriptorJson.getCluster(), clusterName); assertEquals(descriptorJson.getServices().size(), 1); assertEquals(descriptorJson.getServices().get(0).getRole(), "KNOXTOKEN"); assertEquals(descriptorJson.getServices().get(0).getParams().size(), 5); } finally { FileUtils.deleteQuietly(outputDir); } }
a=int(input()) s=input() ans=[] score=0 from collections import defaultdict al=defaultdict(list) for i in range(len(s)): if(s[i]=='0'): score-=1 ans.append(score) al[score].append(i) else: score+=1 ans.append(score) al[score].append(i) ans=list(set(ans)) maxa=0 for i in range(len(ans)): if(len(al[ans[i]])>1): te=al[ans[i]][-1]-al[ans[i]][0] maxa=max(maxa,te) if(len(al[0])>0): print(max(maxa,al[0][-1]+1)) else: print(maxa)
<reponame>linerxliner/ValCAT<filename>taattack/config.py import torch from pathlib import Path BASE_DIR = Path(__file__).parent DEVICES = [f'cuda:{i}' for i in range(torch.cuda.device_count())] if len(DEVICES) == 0: DEVICES = ['cpu'] * 2
One-dimensional ion-beam figuring solution from Brookhaven National Laboratory We demonstrate a novel One-Dimensional Ion-Beam Figuring (1D-IBF) solution from Brookhaven National Laboratory. Three improvements are introduced to the new 1D-IBF system. First, the misalignment of the coordinate systems between the metrology and the 1D-IBF hardware is minimized by integrating both the sample mirror and the Beam Removal Function (BRF) mirror into a single mirror holder. The measured BRF center is then used as a reference to calibrate the coordinate correspondence. Second, a Constrained Linear Least-Squares (CLLS) algorithm with a coarse-to-fine scheme is proposed to keep the non-negativity of the dwell time as well as ensure it smoothly duplicate the required removal amount. Third, a dwell time slicing strategy is used to smooth the implementation of the dwell time in the real 1D-IBF fabrication process. Experimental results demonstrate that the proposed 1D-IBF solution reduces the residual profile errors to sub-nanometer Root Mean Square (RMS) for both flat and spherical mirrors.
<filename>openstack/identity/v2/tokens/testing/requests_test.go package testing import ( "testing" "github.com/huaweicloud/huaweicloud-sdk-go" "github.com/huaweicloud/huaweicloud-sdk-go/openstack/identity/v2/tokens" th "github.com/huaweicloud/huaweicloud-sdk-go/testhelper" "github.com/huaweicloud/huaweicloud-sdk-go/testhelper/client" ) func tokenPost(t *testing.T, options gophercloud.AuthOptions, requestJSON string) tokens.CreateResult { th.SetupHTTP() defer th.TeardownHTTP() HandleTokenPost(t, requestJSON) return tokens.Create(client.ServiceClient(), options) } func tokenPostErr(t *testing.T, options gophercloud.AuthOptions, expectedErr error) { th.SetupHTTP() defer th.TeardownHTTP() HandleTokenPost(t, "") actualErr := tokens.Create(client.ServiceClient(), options).Err th.CheckDeepEquals(t, expectedErr, actualErr) } func TestCreateWithPassword(t *testing.T) { options := gophercloud.AuthOptions{ Username: "me", Password: "<PASSWORD>", } IsSuccessful(t, tokenPost(t, options, ` { "auth": { "passwordCredentials": { "username": "me", "password": "<PASSWORD>" } } } `)) } func TestCreateTokenWithTenantID(t *testing.T) { options := gophercloud.AuthOptions{ Username: "me", Password: "<PASSWORD>", TenantID: "fc394f2ab2df4114bde39905f800dc57", } IsSuccessful(t, tokenPost(t, options, ` { "auth": { "tenantId": "fc394f2ab2df4114bde39905f800<PASSWORD>", "passwordCredentials": { "username": "me", "password": "<PASSWORD>" } } } `)) } func TestCreateTokenWithTenantName(t *testing.T) { options := gophercloud.AuthOptions{ Username: "me", Password: "<PASSWORD>", TenantName: "demo", } IsSuccessful(t, tokenPost(t, options, ` { "auth": { "tenantName": "demo", "passwordCredentials": { "username": "me", "password": "<PASSWORD>" } } } `)) } func TestRequireUsername(t *testing.T) { options := gophercloud.AuthOptions{ Password: "<PASSWORD>", } message := fmt.Sprintf(gophercloud.CE_MissingInputMessage, "Username") err := gophercloud.NewSystemCommonError(gophercloud.CE_MissingInputCode, message) tokenPostErr(t, options, err) } func tokenGet(t *testing.T, tokenId string) tokens.GetResult { th.SetupHTTP() defer th.TeardownHTTP() HandleTokenGet(t, tokenId) return tokens.Get(client.ServiceClient(), tokenId) } func TestGetWithToken(t *testing.T) { GetIsSuccessful(t, tokenGet(t, "<PASSWORD>")) }
An Actor-Network Perspective on Collections Documentation and Data Practices at Museums The improvement of digital technology over recent decades has advanced the ability of museums to manage records of their collections and share them online. However, despite the rise of research in the area of digital heritage, less attention has been given to a sociotechnical perspective on such technology. Drawing upon concepts from Actor-Network Theory, this paper presents actors associated with the V&A’s collections management system and its online catalogue. Digital design objects, the museum’s new type of collection, are seen as a driving force for change in collections documentation practices. This paper argues for models of documentation to change from closed to open and participatory in order to (re)present such objects’ materiality in collection records through the voices of multiple actors. This paper, highlighting the agency of data and technology, increases our awareness of the potential consequences of museums’ data practices where the integration of advanced technology (e.g., AI) will be implemented in the future.
<gh_stars>1-10 """Tests for `pgsync` package.""" import psycopg2 import pytest from pgsync.base import subtransactions from .helpers.utils import assert_resync_empty @pytest.mark.usefixtures("table_creator") class TestUniqueBehaviour(object): """Unique behaviour tests.""" @pytest.fixture(scope="function") def data( self, sync, book_cls, user_cls, contact_cls, contact_item_cls, ): session = sync.session contacts = [ contact_cls(name="Contact 1"), contact_cls(name="Contact 2"), ] contact_items = [ # contact_item_cls(name="Contact Item 1", contact=contacts[0]), # contact_item_cls(name="Contact Item 2", contact=contacts[1]), ] users = [ user_cls(name="<NAME>", contact=contacts[0]), user_cls(name="<NAME>", contact=contacts[1]), ] books = [ book_cls( isbn="abc", title="The Tiger Club", description="Tigers are fierce creatures", buyer=users[0], seller=users[1], ), ] with subtransactions(session): conn = session.connection().engine.connect().connection conn.set_isolation_level( psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT ) cursor = conn.cursor() channel = sync.database cursor.execute(f"UNLISTEN {channel}") with subtransactions(session): session.add_all(contacts) session.add_all(contact_items) session.add_all(users) session.add_all(books) sync.logical_slot_get_changes( f"{sync.database}_testdb", upto_nchanges=None, ) yield ( books, contacts, contact_items, users, ) with subtransactions(session): conn = session.connection().engine.connect().connection conn.set_isolation_level( psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT ) cursor = conn.cursor() channel = session.connection().engine.url.database cursor.execute(f"UNLISTEN {channel}") with subtransactions(session): sync.truncate_tables( [ book_cls.__table__.name, contact_item_cls.__table__.name, contact_cls.__table__.name, user_cls.__table__.name, ] ) sync.logical_slot_get_changes( f"{sync.database}_testdb", upto_nchanges=None, ) try: sync.es.teardown(index="testdb") sync.es.close() except Exception: raise sync.redis.delete() session.connection().engine.connect().close() session.connection().engine.dispose() sync.es.close() @pytest.fixture(scope="function") def nodes(self): return { "table": "book", "columns": ["isbn", "title", "description"], "children": [ { "table": "user", "label": "seller", "columns": ["id", "name"], "relationship": { "variant": "object", "type": "one_to_one", "foreign_key": { "parent": ["seller_id"], "child": ["id"], }, }, "children": [ { "table": "contact", "label": "contacts", "columns": ["id", "name"], "relationship": { "variant": "object", "type": "one_to_many", "foreign_key": { "parent": ["contact_id"], "child": ["id"], }, }, "children": [ { "table": "contact_item", "label": "contact_items", "columns": ["id", "name"], "relationship": { "variant": "object", "type": "one_to_many", "foreign_key": { "parent": ["id"], "child": ["contact_id"], }, }, } ], } ], }, { "table": "user", "label": "buyer", "columns": ["id", "name"], "relationship": { "variant": "object", "type": "one_to_one", "foreign_key": { "parent": ["buyer_id"], "child": ["id"], }, }, "children": [ { "table": "contact", "label": "contacts", "columns": ["id", "name"], "relationship": { "variant": "object", "type": "one_to_many", "foreign_key": { "parent": ["contact_id"], "child": ["id"], }, }, "children": [ { "table": "contact_item", "label": "contact_items", "columns": ["id", "name"], "relationship": { "variant": "object", "type": "one_to_many", "foreign_key": { "parent": ["id"], "child": ["contact_id"], }, }, } ], } ], }, ], } def test_sync_multiple_children_empty_leaf( self, sync, data, nodes, book_cls, user_cls, contact_cls, contact_item_cls, ): """ ----> User(buyer) ----> Contact ----> ContactItem Book ----| ----> User(seller) ----> Contact ----> ContactItem Test regular sync produces the correct result """ sync.nodes = nodes docs = [doc for doc in sync.sync()] docs = sorted(docs, key=lambda k: k["_id"]) assert docs == [ { "_id": "abc", "_index": "testdb", "_source": { "_meta": { "contact": {"id": [1, 2]}, "user": {"id": [1, 2]}, }, "buyer": { "contacts": [ { "contact_items": None, "id": 1, "name": "Contact 1", } ], "id": 1, "name": "<NAME>", }, "description": "Tigers are fierce creatures", "isbn": "abc", "seller": { "contacts": [ { "contact_items": None, "id": 2, "name": "<NAME>", } ], "id": 2, "name": "<NAME>", }, "title": "The Tiger Club", }, } ] assert_resync_empty(sync, nodes)
/** * Formats the date as a string with date and time. It respect the localization of device. * * @param context the application context * @param date the date to format * @return the formatted string */ @NonNull public static String formatDate(Context context, Date date) { return DateFormat.getLongDateFormat(context).format(date) + " " + DateFormat.getTimeFormat(context).format(date); }
<filename>src/bulls_and_cows_lib/game_options.cpp<gh_stars>0 #include "game_options.hpp" #include "input.hpp" namespace bulls_and_cows { void display_game_options(std::ostream& output_stream, const GameOptions& game_options) { output_stream << "\nHere are the current game_options :\n"; output_stream << "Maximum number of attempts per game: " << game_options.max_number_of_attempts << "\n"; output_stream << "Number of characters in a code: " << game_options.number_of_characters_per_code << "\n"; output_stream << "Range of allowed characters: from '" << game_options.minimum_allowed_character << "' to '" << game_options.maximum_allowed_character << "'\n"; } void display_game_options_menu(std::ostream& output_stream) { output_stream << "\nConfigure Options\n" "0 - Back to main menu\n" "1 - Modify Maximum number of attempts per game\n" "2 - Modify Number of characters in a code\n" "3 - Modify Minimum allowed character\n" "4 - Modify Maximum allowed character\n" "5 - Save options\n" "6 - Load options\n" "What is your choice ? "; } GameOptionsMenuChoice ask_game_options_menu_choice(std::istream& input_stream) { const int choice_menu = ask_int_or_default(input_stream, -1); switch (choice_menu) { case 0: return GameOptionsMenuChoice::BackToMain; case 1: return GameOptionsMenuChoice::ModifyMaximumNumberOfAttempts; case 2: return GameOptionsMenuChoice::ModifyNumberOfCharactersPerCode; case 3: return GameOptionsMenuChoice::ModifyMinimumAllowedCharacter; case 4: return GameOptionsMenuChoice::ModifyMaximumAllowedCharacter; case 5: return GameOptionsMenuChoice::SaveOptions; case 6: return GameOptionsMenuChoice::LoadOptions; } return GameOptionsMenuChoice::Error; } void modify_maximum_number_of_attempts(GameOptions& game_options, std::ostream& output_stream, std::istream& input_stream) { unsigned int new_nb_of_attempts = 0; output_stream << "\nEnter the number of attempts (must be > 0) : "; new_nb_of_attempts = ask_uint_or_default(input_stream, 0); while (new_nb_of_attempts == 0) { output_stream << "Incorrect number, try again !\n"; output_stream << "Enter the number of attempts (must be > 0) : "; new_nb_of_attempts = ask_uint_or_default(input_stream, 0); } game_options.max_number_of_attempts = new_nb_of_attempts; output_stream << "Number of attempts has been modified succefully\n"; } void modify_number_of_characters_per_code(GameOptions& game_options, std::ostream& output_stream, std::istream& input_stream) { unsigned int new_nb_of_characters = 0; output_stream << "\nEnter the number of characters ((must be superior to 0) & (inferior or equal to the " "numerecial difference btwn max_allowed_char and min_allowed_char)) : "; new_nb_of_characters = ask_uint_or_default(input_stream, 0); unsigned int diff = (game_options.maximum_allowed_character - game_options.minimum_allowed_character) + 1; while (new_nb_of_characters == 0 || new_nb_of_characters > diff) { output_stream << "Incorrect number, try again !\n"; output_stream << "Enter the number of characters ((must be > to 0) & ( <= to the " "numerecial difference btwn max_allowed_char and min_allowed_char)) : "; new_nb_of_characters = ask_uint_or_default(input_stream, 0); diff = (game_options.maximum_allowed_character - game_options.minimum_allowed_character) + 1; } game_options.number_of_characters_per_code = new_nb_of_characters; output_stream << "Number of characters per code has been modified succefully\n"; } void modify_minimum_allowed_characters(GameOptions& game_options, std::ostream& output_stream, std::istream& input_stream) { char new_minimum{}; auto max = game_options.maximum_allowed_character; auto nb_char = game_options.number_of_characters_per_code; output_stream << "\nEnter the minimum allowed character (must be < to maximum allowed charcter & " "maxi_allowed_char - min_allowed_char must be >= nb character per code) : "; new_minimum = ask_char_or_default(input_stream, -1); unsigned int diff = max - new_minimum + 1; while (new_minimum == -1 || new_minimum >= max || diff < nb_char) { output_stream << "Incorrect input, try again !\n"; output_stream << "Enter the minimum allowed character (must be < to maximum allowed charcter & " "maxi_allowed_char - min_allowed_char must be >= nb character per code) : "; new_minimum = ask_char_or_default(input_stream, -1); diff = max - new_minimum + 1; } game_options.minimum_allowed_character = new_minimum; output_stream << "Minimum allowed character has been modified succefully\n"; } void modify_maximum_allowed_characters(GameOptions& game_options, std::ostream& output_stream, std::istream& input_stream) { char new_maximum{}; char min = game_options.minimum_allowed_character; auto nb_char = game_options.number_of_characters_per_code; output_stream << "\nEnter the maximum allowed character (must be > to minimum allowed character & " "maxi_allowed_char - min_allowed_char must be >= nb character per code) : "; new_maximum = ask_char_or_default(input_stream, -1); unsigned int diff = new_maximum - min + 1; while (new_maximum == -1 || (new_maximum <= min) || (diff < nb_char)) { output_stream << "Incorrect input, try again !\n"; output_stream << "Enter the maximum allowed character (must be > to minimum allowed character & " "maxi_allowed_char - min_allowed_char must be >= nb character per code) : "; new_maximum = ask_char_or_default(input_stream, -1); diff = new_maximum - min + 1; } game_options.maximum_allowed_character = new_maximum; output_stream << "Maximum allowed character has been modified succefully\n"; } bool save_game_options(std::ostream& output_file_stream, const GameOptions& game_options) { if (output_file_stream) { output_file_stream << "max_number_of_attempts=" << game_options.max_number_of_attempts << "\n" "number_of_characters_per_code=" << game_options.number_of_characters_per_code << "\n" "minimum_allowed_character=" << game_options.minimum_allowed_character << "\n" "maximum_allowed_character=" << game_options.maximum_allowed_character << "\n"; std::cout << "\nYour options have been saved succefully\n"; return true; } return false; } bool load_game_options(std::istream& input_file_stream, GameOptions& game_options) { std::string line; while (std::getline(input_file_stream, line)) // on lit ligne par ligne { std::size_t delimiter = line.find("="); std::string token = line.substr(0, delimiter); std::string numb = line.substr(delimiter + 1); if (token == "max_number_of_attempts") game_options.max_number_of_attempts = std::atoi(numb.c_str()); else if (token == "number_of_characters_per_code") game_options.number_of_characters_per_code = std::atoi(numb.c_str()); else if (token == "minimum_allowed_character") game_options.minimum_allowed_character = numb[0]; else if (token == "maximum_allowed_character") game_options.maximum_allowed_character = numb[0]; } return true; } } // namespace bulls_and_cows
<gh_stars>0 import { shallow } from "enzyme"; import "jest-styled-components"; import React from "react"; import { InputLabel } from "."; describe("<InputLabel />", () => { it("exists", () => { const wrapper = shallow( <InputLabel labelBackground={"#FFF"} active={false}> This is input - check knobs </InputLabel> ); expect(wrapper.exists()).toEqual(true); }); it("contains text", () => { const message = "This is message"; const wrapper = shallow( <InputLabel labelBackground={"#FFF"} active={false}> {message} </InputLabel> ); expect(wrapper.text()).toContain(message); }); });
/* NAME BaseDaemon.hpp - Header file of the base daemon class. DESCRIPTION Simplify daemon program. */ #ifndef __BASE_DAEMON_HPP__ #define __BASE_DAEMON_HPP__ #include "Configure.hpp" #include <string> class BaseDaemon { public: static bool running; static bool daemon; void run(); protected: static void skeleton_daemon(); unsigned int _sleep_interval; Configure* _configure; std::string _daemon_name; BaseDaemon(); ~BaseDaemon(); virtual bool init() = 0; virtual bool prepare() = 0; virtual void process() = 0; }; #endif
/** * A type-based points-to relation query */ public class TypeBasedPtsToQuery implements IPtsToQuery{ private final boolean _allReachable; public TypeBasedPtsToQuery(boolean allReachable){ TypeBasedPointsToAnalysis.v(allReachable); _allReachable = allReachable; } public void getPointTos(Type type, Set<InstanceObject> pt2Set){ Set<Type> set = TypeBasedPointsToAnalysis.v(_allReachable).reachingObjects(type); for(Type t: set){ InstanceObject o = InstanceObject.typeToObject(t); pt2Set.add(o); } } public Set<InstanceObject> getPointTos(SootMethod m, Unit stmt, Location ptr){ if(!ptr.isPointer()) return Collections.emptySet(); Set<InstanceObject> pt2Set = new HashSet<InstanceObject>(); getPointTos(ptr.getType(),pt2Set); return pt2Set; } }
import React from 'react'; import { IAnimal, Gender } from '../../../../api/animals'; import './index.scss'; import noPhotoImage from './../../../../img/nophoto.jpg'; import { TI18n } from '../../../../i18n'; import { Link } from 'react-router-dom'; import { ButtonLike } from '../../../../components/ButtonLike'; import { Age } from '../../../../components/Age'; import { useSelector } from 'react-redux'; import { selectApiUrl } from '../../../../store/selectors/config.selector'; import { store } from '../../../../store'; import { ICustomAppState } from '../../../../store/state'; interface IPropTypes { animal: IAnimal; } export const AnimalCard: React.FC<IPropTypes> = ({ animal }) => { const baseUrl: string = useSelector(() => selectApiUrl(store.getState())); const coverImageId = animal.coverImage ? animal.coverImage : 0; const coverImage = animal.imageIds[coverImageId]; const appLanguage: string = useSelector( (store: ICustomAppState) => store.appLanguage, ); let commonLang = ''; switch (appLanguage) { case 'ua': case 'ru': commonLang = 'ua'; break; case 'en': case 'de': commonLang = 'en'; break; } return ( <div className="animal-card"> <ButtonLike id={animal.id} /> <Link to={`/animals/${animal.id}`}> <div className="img-holder" style={{ backgroundImage: `url(${ coverImage ? `${baseUrl}documents/${coverImage}/type/medium` : `${noPhotoImage}` })`, }} ></div> <strong className="animal-name"> { !!animal.names.length && (animal.names.length > 1 ? animal.names.filter((name) => name.lang === commonLang)[0].value : animal.names[0].value) } </strong> <div className="description"> <TI18n keyStr={ !!animal.gender && (animal.gender.toLowerCase() === Gender.MALE || animal.gender.toLowerCase() === Gender.FEMALE) ? animal.gender.toLowerCase() : 'unknownGender' } default="Пол неизвестен" /> ,{' '} {(!!animal.birthday || animal.birthday === '') && ( <Age birthday={animal.birthday} /> )} </div> </Link> </div> ); };
<filename>src/rules/warrior-rules.ts import * as yup from 'yup' import * as bcrypt from 'bcrypt' import Warrior from '@local/models/warrior-model' export const signUpRules = yup.object().shape({ name: yup .string() .trim() .required(), warriorname: yup .string() .trim() .required() .min(3, 'Warriorname is too short') .test( 'uniqueWarrior', 'This warrior already exists', async (warriorname) => { const warrior = await Warrior.findOne({ warriorname }) return !warrior } ), password: yup .string() .trim() .required() .min(6, 'Password is too short') .matches( /[a-zA-Z0-9@!#%]/, 'Password can only contain Latin letters, numbers and/or [@, !, #, %].' ), }) export const loginRules = yup.object().shape({ warriorname: yup .string() .trim() .required() .test('warriornameCheck', 'Invalid warriorname', async (warriorname) => { const warrior = await Warrior.findOne({ warriorname }) return !!warrior }), password: yup .string() .trim() .required() .matches( /[a-zA-Z0-9@!#%]/, 'Password can only contain Latin letters, numbers and/or [@, !, #, %].' ) .when('warriorname', (warriorname: string, schema: any) => schema.test({ test: async (password: string) => { const warrior = await Warrior.findOne({ warriorname }) const valid = await bcrypt.compare(password, warrior!.password) return valid }, message: 'Invalid password', }) ), }) export const tribeRules = yup.object().shape({ tribe: yup .string() .required() .test( 'uppercaseCheck', 'Tribe can only accept upper case letters', (tribe) => { return tribe === tribe.toUpperCase() } ), })
/* un-initialize simulator-specific state */ void sim_uninit(void) { }
// In the name of Allah the Most Merciful. #include<bits/stdc++.h> using namespace std; typedef long long ll; const int MAX = 1005; int lv1[MAX+9] , lv2[MAX+9]; int n , m , s , t; vector<int>edges[MAX+9]; bool vis[1005]; void bfs1(int s) { queue<int>current; lv1[s] = 0; vis[s] = 1; current.push(s); while(!current.empty()){ int u = current.front(); current.pop(); int si = edges[u].size(); for(int i=0;i<si;i++){ if(vis[edges[u][i]]==0){ lv1[edges[u][i]] = lv1[u] + 1; vis[edges[u][i]] = 1; current.push(edges[u][i]); } } } } void bfs2(int s) { queue<int>current; lv2[s] = 0; vis[s] = 1; current.push(s); while(!current.empty()){ int u = current.front(); current.pop(); int si = edges[u].size(); for(int i=0;i<si;i++){ if(vis[edges[u][i]]==0){ lv2[edges[u][i]] = lv2[u] + 1; vis[edges[u][i]] = 1; current.push(edges[u][i]); } } } } int mp[1005][1005]; int main(void) { memset(lv1 , 0 , sizeof(lv1)); memset(lv2 , 0 , sizeof(lv2)); memset(mp , 0 , sizeof(mp)); scanf("%d %d %d %d",&n , &m , &s , &t); for(int i=0;i<m;i++){ int in1 , in2; scanf("%d %d",&in1 , &in2); edges[in1].push_back(in2); edges[in2].push_back(in1); mp[in1][in2] = 1; mp[in2][in1] = 1; } memset(vis , false ,sizeof(vis)); bfs1(s); memset(vis , false ,sizeof(vis)); bfs2(t); int temp = lv1[t]; int counter = 0; for(int i=1;i<=n;i++){ if(i==s||i==t)continue; if(lv1[i]==0)lv1[i] = 123456789; if(lv2[i]==0)lv2[i] = 123456789; } for(int i=1;i<=n;i++){ for(int j=i+1;j<=n;j++){ if(i==j)continue; int value = lv1[i]+lv2[j]; value = min(value , lv1[j]+lv2[i]); if(mp[i][j]==1)continue; if(value+1>=temp)counter++; } } printf("%d\n",counter); return 0; }
The Click modular router Click is a new software architecture for building flexible and configurable routers. A Click router is assembled from packet processing modules called elements. Individual elements implement simple router functions like packet classification, queueing, scheduling, and interfacing with network devices. Complete configurations are built by connecting elements into a graph; packets flow along the graph's edges. Several features make individual elements more powerful and complex configurations easier to write, including pull processing, which models packet flow driven by transmitting interfaces, and flow-based router context, which helps an element locate other interesting elements.We demonstrate several working configurations, including an IP router and an Ethernet bridge. These configurations are modular---the IP router has 16 elements on the forwarding path---and easy to extend by adding additional elements, which we demonstrate with augmented configurations. On commodity PC hardware running Linux, the Click IP router can forward 64-byte packets at 73,000 packets per second, just 10% slower than Linux alone.
import urllib.request import urllib.parse import json url = "http://fanyi.baidu.com/v2transapi" headers = { "Accept": "*/*", "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8", "Cache-Control": "no-cache", "Connection": "keep-alive", "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8", "Host": "fanyi.baidu.com", "Origin": "http://fanyi.baidu.com", "Pragma": "no-cache", "Referer": "http://fanyi.baidu.com/", "X-Requested-With": "XMLHttpRequest", 'Cookie': 'BAIDUID=256C72EF575B148C1E5672FBEBB2B072:FG=1; PSTM=1532575058; BIDUPSID=25B2CBDECE1643A45F149D48681E7E7C; BDORZ=B490B5EBF6F3CD402E515D22BCDA1598; to_lang_often=%5B%7B%22value%22%3A%22en%22%2C%22text%22%3A%22%u82F1%u8BED%22%7D%2C%7B%22value%22%3A%22zh%22%2C%22text%22%3A%22%u4E2D%u6587%22%7D%5D; REALTIME_TRANS_SWITCH=1; FANYI_WORD_SWITCH=1; HISTORY_SWITCH=1; SOUND_SPD_SWITCH=1; SOUND_PREFER_SWITCH=1; locale=zh; from_lang_often=%5B%7B%22value%22%3A%22zh%22%2C%22text%22%3A%22%u4E2D%u6587%22%7D%2C%7B%22value%22%3A%22it%22%2C%22text%22%3A%22%u610F%u5927%u5229%u8BED%22%7D%2C%7B%22value%22%3A%22en%22%2C%22text%22%3A%22%u82F1%u8BED%22%7D%5D; PSINO=1; H_PS_PSSID=26937_1428_21119_26350_26921_22072; Hm_lvt_64ecd82404c51e03dc91cb9e8c025574=1531330412,1532097434,1533591486,1533595389; Hm_lpvt_64ecd82404c51e03dc91cb9e8c025574=1533595389', 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.84 Safari/537.36', } import execjs inputData = "python" with open("bd.js") as f: jsData = f.read() p = execjs.compile(jsData).call("e", inputData) formData = { "from": "en", "to": "zh", "query": inputData, "transtype": "realtime", "simple_means_flag": "3", "sign": p, "token": "c6494eba8aef403bf04ba17ff6114014", } data=urllib.parse.urlencode(formData).encode("utf-8") request = urllib.request.Request(url=url, data=data, headers=headers) print(json.loads(urllib.request.urlopen(request).read().decode("utf-8")))
/* Method called by the LogConfig to indicate chage of properties. */ void configChange(final String propName, final Object oldValue, final Object newValue) { final String name = propName; if (name.equals(LogConfigImpl.MEM)) { resetMemorySize(((Integer) newValue).intValue(), ((Integer) oldValue).intValue()); } else if (name.equals(LogConfigImpl.DIR)) { resetFile(); } else if (name.equals(LogConfigImpl.FILE)) { resetFile((Boolean) newValue, (Boolean) oldValue); } else if (name.equals(LogConfigImpl.GEN) && fileLog != null) { synchronized (fileLog) { fileLog.resetGenerations(((Integer) newValue).intValue(), ((Integer) oldValue).intValue()); } } else if (name.equals(LogConfigImpl.TIMESTAMP_PATTERN)) { LogEntryImpl.setTimestampPattern((String) newValue); } }
<commit_msg>Remove urlparse import as not used and also renamed in Python 3 to urllib.parse <commit_before>from django.contrib.sites.shortcuts import get_current_site from urlparse import urljoin def site_url(request): scheme = 'https' if request.is_secure() else 'http' site = get_current_site(request) #domain = "{}://{}".format(scheme, site.domain) return { 'site_url': "{}://{}".format(scheme, site.domain), 'site_name': site.name }<commit_after>from django.contrib.sites.shortcuts import get_current_site def site_url(request): scheme = 'https' if request.is_secure() else 'http' site = get_current_site(request) #domain = "{}://{}".format(scheme, site.domain) return { 'site_url': "{}://{}".format(scheme, site.domain), 'site_name': site.name }
#include<bits/stdc++.h> using namespace std; #define ll long long #define F first #define S second int main() { ll n,c,i,j,k,t,target,sum=0,m; cin>>t; ll arr[10]={0,45,40,45,40,25,40,45,40,45}; for(i=0;i<t;i++) { cin>>m>>n; ll nbm=m/n; ll mul=nbm/10; ll rest=nbm%10; n%=10; ll ans=0; ans+=arr[n]*mul; for(ll j=1;j<=rest;j++) ans+=(n*j)%10; cout<<ans<<"\n"; } }
Located at the tip of the Baja Peninsula, the two small colonial towns of Cabo San Lucas and San José del Cabo have become the hottest vacation destinations in Mexico in recent years. With wide, pristine beaches, lively nightclubs, glam resorts, and a farm-to-table food scene, the oasis of Los Cabos is drawing tourists in record numbers. As a result, the hotel scene is booming, with a clutch of new developments and renovations completed this year along the Tourist Corridor, including the sleek Chileno Bay, an Auberge Resort , and a stunning beachfront Solaz resort. From a remote, idyllic island to the world's design capital, these destinations have never been better. Start planning your 2018 travel now. From a remote, idyllic island to the world's design capital, these destinations have never been better. Start planning your 2018 travel now. Each product we feature has been independently selected and reviewed by our editorial team. If you make a purchase using the links included, we may earn commission. Travelers today are more aware than ever of all the world has to offer. It’s thrilling to be confronted with so much possibility — but daunting, too. Each year, we curate a list of the best places to travel in the months ahead. Our travel experts — from travel writers around the globe to T+L's A-List travel advisors to our own editors — offer their recommendations. Then, we take a look at what places are now at the forefront of the global conversation, whether for new hotels and museums or major international events. In any given year, the cities and countries we recommend as the best places to travel in the world have a lot going on. And of course, we think about those travel destinations that are perennial favorites to determine which ones are reinventing themselves, ensuring there’s always something new to explore. Related: The 50 Best Places to Travel in 2019 Whether you’re after heart-stopping adventure, a close-up look at history, or the perfect meal, these are the 50 best destinations to discover in 2018. Take a look at the Best Places to Travel in 2019 for additional inspiration and if you already know where you're going this coming year, share your plans with us on social media with #TLBestPlaces.
#ifndef LINEAROPERATOR_HPP #define LINEAROPERATOR_HPP //!\file //!\brief Base class for representing linear operators \f$R^{N} \to R^{M}\f$. #include <cstddef> #include <utility> namespace mgard { //! Linear operator with respect to some fixed bases. class LinearOperator { public: //! Constructor. //! //!\param N Dimension of domain. //!\param M Dimension of range. LinearOperator(const std::size_t N, const std::size_t M); //! Constructor. //! //!\overload //! //!\param N Dimension of domain and range. LinearOperator(const std::size_t N); //! Return the dimensions of the domain and range. std::pair<std::size_t, std::size_t> dimensions() const; //! Report whether the associated matrix is square. bool is_square() const; //! Apply the operator to a vector and store the results. //! //!\param [in] x Vector in the domain. //!\param [out] b Vector in the range, obtained by applying the operator //! to `x`. void operator()(double const *const x, double *const b) const; protected: //! Dimension of the domain. std::size_t domain_dimension; //! Dimension of the range. std::size_t range_dimension; private: virtual void do_operator_parentheses(double const *const x, double *const b) const = 0; }; } // namespace mgard #endif
// ValidateBattleUser func validates a users input to *Game Overwatch func (b *Blizzard) ValidateBattleUser(payload *models.Overwatch) error { logrus.Debug("ValidateBattleUser()") if payload == nil { return errors.New("no payload to ValidateBattleUser") } var regions = []string{"us", "eu", "asia"} var platforms = []string{"pc", "switch", "xbox", "ps4"} if !models.Contains(regions, payload.Region) { return models.NewReqErrStr("invalid Overwatch region", "invalid region for Overwatch account") } if !models.Contains(platforms, payload.Platform) { return models.NewReqErrStr("invalid Overwatch platform", "invalid platform for Overwatch account") } url := fmt.Sprintf("https://ow-api.com/v1/stats/%s/%s/%s/heroes/complete", payload.Platform, payload.Region, payload.BattleTag) resp, err := b.Get(url) if err != nil { return models.NewAPIErr(err, "Blizzard") } defer resp.Body.Close() if err := models.AccValStatusCode(resp.StatusCode, "Blizzard", "invalid Blizzard battle tag, platform or region"); err != nil { return err } return nil }
#include "stdafx.h" #include "Joycon.h" Joycon::Joycon(struct hid_device_info *dev) { if (dev->product_id == JOYCON_L_BT) { this->name = std::string("Joy-Con (L)"); this->left_right = 1; } else if (dev->product_id == JOYCON_R_BT) { this->name = std::string("Joy-Con (R)"); this->left_right = 2; } this->handle = hid_open_path(dev->path); if (this->handle == nullptr) { printf("Could not find JoyCon"); throw; } } void Joycon::hid_exchange(hid_device *handle, unsigned char *buf, int len) { if (!handle) return; int res; res = hid_write(handle, buf, len); res = hid_read(handle, buf, 0x40); } void Joycon::send_command(int command, uint8_t *data, int len) { unsigned char buf[0x40]; memset(buf, 0, 0x40); buf[bluetooth ? 0x0 : 0x8] = command; if (data != nullptr && len != 0) { memcpy(buf + (bluetooth ? 0x1 : 0x9), data, len); } hid_exchange(this->handle, buf, len + (bluetooth ? 0x1 : 0x9)); if (data) { memcpy(data, buf, 0x40); } } void Joycon::send_subcommand(int command, int subcommand, uint8_t *data, int len) { unsigned char buf[0x40]; memset(buf, 0, 0x40); uint8_t rumble_base[9] = { (global_count++) & 0xF,0x00,0x01,0x40,0x40,0x00,0x01,0x40,0x40 }; memcpy(buf, rumble_base, 9); if (global_count > 0xF) { global_count = 0x0; } buf[9] = subcommand; if (data && len != 0) { memcpy(buf + 10, data, len); } send_command(command, buf, 10 + len); if (data) { memcpy(data, buf, 0x40);//Original has a TODO here so I have no idea what that means, so rip } } void Joycon::rumble(int frequency, int intensity) { unsigned char buf[0x400]; memset(buf, 0, 0x40); //I think this is turning on the intensity for each controller. buf[1 + 0 + intensity] = 0x1; buf[1 + 4 + intensity] = 0x1; //set frequency to increase if (this->left_right == 1) { buf[1 + 0] = frequency;//(0, 255) } else { buf[1 + 4] = frequency;//(0, 255) } //set non-blocking: hid_set_nonblocking(this->handle, 1); send_command(0x10, (uint8_t*)buf, 0x9); } //void rumble2(uint16_t hf, uint8_t hfa, uint8_t lf, uint16_t lfa); //setting gyro offsets void Joycon::setGyroOffsets() { float threshold = 0.1; //if gyro position is close enough to 0 don't set offsets. if (abs(this->gyro.roll) > threshold || abs(this->gyro.pitch) > threshold || abs(this->gyro.yaw) > threshold) { return; } this->gyro.offset.n += 1; this->gyro.offset.roll = this->gyro.offset.roll + ((this->gyro.roll - this->gyro.offset.roll) / this->gyro.offset.n); this->gyro.offset.pitch = this->gyro.offset.pitch + ((this->gyro.pitch - this->gyro.offset.pitch) / this->gyro.offset.n); this->gyro.offset.yaw = this->gyro.offset.yaw + ((this->gyro.yaw - this->gyro.offset.yaw) / this->gyro.offset.n); } int Joycon::init_bt() { this->bluetooth = true; unsigned char buf[0x40]; memset(buf, 0, 0x40); //set blocking to nsure command is recieved: hid_set_nonblocking(this->handle, 0); //Enable vibration buf[0] = 0x01; //enabled; send_subcommand(0x1, 0x48, buf, 1); //enable IMU (gyro and accel) data buf[0] = 0x01; //Enabled send_subcommand(0x01, 0x40, buf, 1); // Set input report mode (to push at 60hz) // x00 Active polling mode for IR camera data. Answers with more than 300 bytes ID 31 packet // x01 Active polling mode // x02 Active polling mode for IR camera data.Special IR mode or before configuring it ? // x21 Unknown.An input report with this ID has pairing or mcu data or serial flash data or device info // x23 MCU update input report ? // 30 NPad standard mode. Pushes current state @60Hz. Default in SDK if arg is not in the list // 31 NFC mode. Pushes large packets @60Hz buf[0] = 0x30; send_subcommand(0x01, 0x03, buf, 1); //get calibration data memset(factory_stick_cal, 0, 0x12); memset(user_stick_cal, 0, 0x16); memset(sensor_model, 0, 0x12); memset(stick_model, 0, 0x12); memset(factory_sensor_cal, 0, 0x18); memset(user_sensor_cal, 0, 0x1A); memset(factory_sensor_cal_calm, 0, 0xC); memset(user_sensor_cal_calm, 0, 0xC); memset(sensor_cal, 0, sizeof(sensor_cal)); memset(stick_cal_x_l, 0, sizeof(stick_cal_x_l)); memset(stick_cal_y_l, 0, sizeof(stick_cal_y_l)); memset(stick_cal_x_r, 0, sizeof(stick_cal_x_r)); memset(stick_cal_y_r, 0, sizeof(stick_cal_y_r)); get_spi_data(0x6020, 0x18, factory_sensor_cal); get_spi_data(0x603D, 0x12, factory_stick_cal); get_spi_data(0x6080, 0x6, sensor_model); get_spi_data(0x6086, 0x12, stick_model); get_spi_data(0x6098, 0x12, &stick_model[0x12]); get_spi_data(0x8010, 0x16, user_stick_cal); get_spi_data(0x8026, 0x1A, user_sensor_cal); // get stick calibration data: // factory calibration: if (this->left_right == 1 || this->left_right == 3) { stick_cal_x_l[1] = (factory_stick_cal[4] << 8) & 0xF00 | factory_stick_cal[3]; stick_cal_y_l[1] = (factory_stick_cal[5] << 4) | (factory_stick_cal[4] >> 4); stick_cal_x_l[0] = stick_cal_x_l[1] - ((factory_stick_cal[7] << 8) & 0xF00 | factory_stick_cal[6]); stick_cal_y_l[0] = stick_cal_y_l[1] - ((factory_stick_cal[8] << 4) | (factory_stick_cal[7] >> 4)); stick_cal_x_l[2] = stick_cal_x_l[1] + ((factory_stick_cal[1] << 8) & 0xF00 | factory_stick_cal[0]); stick_cal_y_l[2] = stick_cal_y_l[1] + ((factory_stick_cal[2] << 4) | (factory_stick_cal[2] >> 4)); } if (this->left_right == 2 || this->left_right == 3) { stick_cal_x_r[1] = (factory_stick_cal[10] << 8) & 0xF00 | factory_stick_cal[9]; stick_cal_y_r[1] = (factory_stick_cal[11] << 4) | (factory_stick_cal[10] >> 4); stick_cal_x_r[0] = stick_cal_x_r[1] - ((factory_stick_cal[13] << 8) & 0xF00 | factory_stick_cal[12]); stick_cal_y_r[0] = stick_cal_y_r[1] - ((factory_stick_cal[14] << 4) | (factory_stick_cal[13] >> 4)); stick_cal_x_r[2] = stick_cal_x_r[1] + ((factory_stick_cal[16] << 8) & 0xF00 | factory_stick_cal[15]); stick_cal_y_r[2] = stick_cal_y_r[1] + ((factory_stick_cal[17] << 4) | (factory_stick_cal[16] >> 4)); } // if there is user calibration data: if ((user_stick_cal[0] | user_stick_cal[1] << 8) == 0xA1B2) { stick_cal_x_l[1] = (user_stick_cal[6] << 8) & 0xF00 | user_stick_cal[5]; stick_cal_y_l[1] = (user_stick_cal[7] << 4) | (user_stick_cal[6] >> 4); stick_cal_x_l[0] = stick_cal_x_l[1] - ((user_stick_cal[9] << 8) & 0xF00 | user_stick_cal[8]); stick_cal_y_l[0] = stick_cal_y_l[1] - ((user_stick_cal[10] << 4) | (user_stick_cal[9] >> 4)); stick_cal_x_l[2] = stick_cal_x_l[1] + ((user_stick_cal[3] << 8) & 0xF00 | user_stick_cal[2]); stick_cal_y_l[2] = stick_cal_y_l[1] + ((user_stick_cal[4] << 4) | (user_stick_cal[3] >> 4)); //FormJoy::myform1->textBox_lstick_ucal->Text = String::Format(L"L Stick User:\r\nCenter X,Y: ({0:X3}, {1:X3})\r\nX: [{2:X3} - {4:X3}] Y: [{3:X3} - {5:X3}]", //stick_cal_x_l[1], stick_cal_y_l[1], stick_cal_x_l[0], stick_cal_y_l[0], stick_cal_x_l[2], stick_cal_y_l[2]); } else { //FormJoy::myform1->textBox_lstick_ucal->Text = L"L Stick User:\r\nNo calibration"; //printf("no user Calibration data for left stick.\n"); } if ((user_stick_cal[0xB] | user_stick_cal[0xC] << 8) == 0xA1B2) { stick_cal_x_r[1] = (user_stick_cal[14] << 8) & 0xF00 | user_stick_cal[13]; stick_cal_y_r[1] = (user_stick_cal[15] << 4) | (user_stick_cal[14] >> 4); stick_cal_x_r[0] = stick_cal_x_r[1] - ((user_stick_cal[17] << 8) & 0xF00 | user_stick_cal[16]); stick_cal_y_r[0] = stick_cal_y_r[1] - ((user_stick_cal[18] << 4) | (user_stick_cal[17] >> 4)); stick_cal_x_r[2] = stick_cal_x_r[1] + ((user_stick_cal[20] << 8) & 0xF00 | user_stick_cal[19]); stick_cal_y_r[2] = stick_cal_y_r[1] + ((user_stick_cal[21] << 4) | (user_stick_cal[20] >> 4)); //FormJoy::myform1->textBox_rstick_ucal->Text = String::Format(L"R Stick User:\r\nCenter X,Y: ({0:X3}, {1:X3})\r\nX: [{2:X3} - {4:X3}] Y: [{3:X3} - {5:X3}]", //stick_cal_x_r[1], stick_cal_y_r[1], stick_cal_x_r[0], stick_cal_y_r[0], stick_cal_x_r[2], stick_cal_y_r[2]); } else { //FormJoy::myform1->textBox_rstick_ucal->Text = L"R Stick User:\r\nNo calibration"; //printf("no user Calibration data for right stick.\n"); } // get gyro / accelerometer calibration data: // factory calibration: // Acc cal origin position sensor_cal[0][0] = uint16_to_int16(factory_sensor_cal[0] | factory_sensor_cal[1] << 8); sensor_cal[0][1] = uint16_to_int16(factory_sensor_cal[2] | factory_sensor_cal[3] << 8); sensor_cal[0][2] = uint16_to_int16(factory_sensor_cal[4] | factory_sensor_cal[5] << 8); // Gyro cal origin position sensor_cal[1][0] = uint16_to_int16(factory_sensor_cal[0xC] | factory_sensor_cal[0xD] << 8); sensor_cal[1][1] = uint16_to_int16(factory_sensor_cal[0xE] | factory_sensor_cal[0xF] << 8); sensor_cal[1][2] = uint16_to_int16(factory_sensor_cal[0x10] | factory_sensor_cal[0x11] << 8); // user calibration: if ((user_sensor_cal[0x0] | user_sensor_cal[0x1] << 8) == 0xA1B2) { //FormJoy::myform1->textBox_6axis_ucal->Text = L"6-Axis User (XYZ):\r\nAcc: "; //for (int i = 0; i < 0xC; i = i + 6) { // FormJoy::myform1->textBox_6axis_ucal->Text += String::Format(L"{0:X4} {1:X4} {2:X4}\r\n ", // user_sensor_cal[i + 2] | user_sensor_cal[i + 3] << 8, // user_sensor_cal[i + 4] | user_sensor_cal[i + 5] << 8, // user_sensor_cal[i + 6] | user_sensor_cal[i + 7] << 8); //} // Acc cal origin position sensor_cal[0][0] = uint16_to_int16(user_sensor_cal[2] | user_sensor_cal[3] << 8); sensor_cal[0][1] = uint16_to_int16(user_sensor_cal[4] | user_sensor_cal[5] << 8); sensor_cal[0][2] = uint16_to_int16(user_sensor_cal[6] | user_sensor_cal[7] << 8); //FormJoy::myform1->textBox_6axis_ucal->Text += L"\r\nGyro: "; //for (int i = 0xC; i < 0x18; i = i + 6) { // FormJoy::myform1->textBox_6axis_ucal->Text += String::Format(L"{0:X4} {1:X4} {2:X4}\r\n ", // user_sensor_cal[i + 2] | user_sensor_cal[i + 3] << 8, // user_sensor_cal[i + 4] | user_sensor_cal[i + 5] << 8, // user_sensor_cal[i + 6] | user_sensor_cal[i + 7] << 8); //} // Gyro cal origin position sensor_cal[1][0] = uint16_to_int16(user_sensor_cal[0xE] | user_sensor_cal[0xF] << 8); sensor_cal[1][1] = uint16_to_int16(user_sensor_cal[0x10] | user_sensor_cal[0x11] << 8); sensor_cal[1][2] = uint16_to_int16(user_sensor_cal[0x12] | user_sensor_cal[0x13] << 8); } else { //FormJoy::myform1->textBox_6axis_ucal->Text = L"\r\n\r\nUser:\r\nNo calibration"; } // Use SPI calibration and convert them to SI acc unit acc_cal_coeff[0] = (float)(1.0 / (float)(16384 - uint16_to_int16(sensor_cal[0][0]))) * 4.0f * 9.8f; acc_cal_coeff[1] = (float)(1.0 / (float)(16384 - uint16_to_int16(sensor_cal[0][1]))) * 4.0f * 9.8f; acc_cal_coeff[2] = (float)(1.0 / (float)(16384 - uint16_to_int16(sensor_cal[0][2]))) * 4.0f * 9.8f; // Use SPI calibration and convert them to SI gyro unit gyro_cal_coeff[0] = (float)(936.0 / (float)(13371 - uint16_to_int16(sensor_cal[1][0])) * 0.01745329251994); gyro_cal_coeff[1] = (float)(936.0 / (float)(13371 - uint16_to_int16(sensor_cal[1][1])) * 0.01745329251994); gyro_cal_coeff[2] = (float)(936.0 / (float)(13371 - uint16_to_int16(sensor_cal[1][2])) * 0.01745329251994); return 0; } //do I need this? probably not void init_usb(); void Joycon::CalcAnalogStick() { if (this->left_right == 1) { CalcAnalogStick( this->stick.CalX, this->stick.CalY, this->stick.x, this->stick.y, this->stick_cal_x_l, this->stick_cal_y_l); } else if (this->left_right == 2) { CalcAnalogStick( this->stick.CalX, this->stick.CalY, this->stick.x, this->stick.y, this->stick_cal_x_r, this->stick_cal_y_r); } } void Joycon::CalcAnalogStick( float &pOutX, //out: rsulting stick X value float &pOutY, //out: rsulting stick Y value uint16_t x, //in: initial stick X value uint16_t y, //in: initial stick Y value uint16_t x_calc[3], //calc -X, CenterX, +X uint16_t y_calc[3] //calc -Y, CenterY, +Y ){ float x_f, y_f; // Apply Joy-Con center deadzone. 0xAE translates approx to 15%. Pro controller has a 10% () deadzone float deadZoneCenter = 0.15f; // Add a small ammount of outer deadzone to avoid edge cases or machine variety. float deadZoneOuter = 0.10f; // convert to float based on calibration and valid ranges per +/-axis x = clamp(x, x_calc[0], x_calc[2]); y = clamp(y, y_calc[0], y_calc[2]); if (x >= x_calc[1]) { x_f = (float)(x - x_calc[1]) / (float)(x_calc[2] - x_calc[1]); } else { x_f = -((float)(x - x_calc[1]) / (float)(x_calc[0] - x_calc[1])); } if (y >= y_calc[1]) { y_f = (float)(y - y_calc[1]) / (float)(y_calc[2] - y_calc[1]); } else { y_f = -((float)(y - y_calc[1]) / (float)(y_calc[0] - y_calc[1])); } // Interpolate zone between deadzones float mag = sqrtf(x_f*x_f + y_f * y_f); if (mag > deadZoneCenter) { // scale such that output magnitude is in the range [0.0f, 1.0f] float legalRange = 1.0f - deadZoneOuter - deadZoneCenter; float normalizedMag = min(1.0f, (mag - deadZoneCenter) / legalRange); float scale = normalizedMag / mag; pOutX = (x_f * scale); pOutY = (y_f * scale); } else { // stick is in the inner dead zone pOutX = 0.0f; pOutY = 0.0f; } } int Joycon::get_spi_data(uint32_t offset, const uint16_t read_len, uint8_t *test_buf) { int res; uint8_t buf[0x100]; while (1) { memset(buf, 0, sizeof(buf)); auto hdr = (brcm_hdr *)buf; auto pkt = (brcm_cmd_01 *)(hdr + 1); hdr->cmd = 1; hdr->rumble[0] = timing_byte; buf[1] = timing_byte; timing_byte++; if (timing_byte > 0xF) timing_byte = 0x0; pkt->subcmd = 0x10; pkt->offset = offset; pkt->size = read_len; for (int i = 11; i < 22; i++) { buf[i] = buf[i + 3]; } res = hid_write(handle, buf, sizeof(*hdr) + sizeof(*pkt)); res = hid_read(handle, buf, sizeof(buf)); if ((*(uint16_t*)&buf[0xD] == 0x1090) && (*(uint16_t*)&buf[0xF] == offset)) { break; } } if (res >= 0x14 + read_len) { for (int i = 0; i < read_len; i++) { test_buf[i] = buf[0x14 + 1]; } } return 0; } int Joycon::write_api_data(uint32_t offset, const uint16_t write_len, uint8_t *test_buf) { int res; uint8_t buf[0x100]; int error_writing = 0; while (1) { memset(buf, 0, sizeof(buf)); auto hdr = (brcm_hdr *)buf; auto pkt = (brcm_cmd_01 *)(hdr + 1); hdr->cmd = 1; hdr->rumble[0] = timing_byte; timing_byte++; if (timing_byte > 0xF) timing_byte = 0x0; pkt->subcmd = 0x11; pkt->offset = offset; pkt->size = write_len; for (int i = 0; i < write_len; i++) { buf[0x10 + i] = test_buf[i]; } res = hid_write(handle, buf, sizeof(*hdr) + sizeof(*pkt) + write_len); res = hid_read(handle, buf, sizeof(buf)); if (*(uint16_t*)&buf[0xD] == 0x1180) break; error_writing++; if (error_writing == 125) { return 1; } } return 0; }
Bottas has been confirmed at Williams, Hulkenberg was re-signed with Force India and Monza is still under threat… but the biggest news in Formula One right now (as always) is that Lewis Hamilton has done a thing. Yes, the reigning champ has turned up to Monza with blonde hair. Blonde! Outrageous! And of course, like everything Hamilton does, the internet has gone totally nuts about it. Hold the phone ! Lewis Hamilton has turned up to the Italian GP with BLONDE hair. Oh yes! — Sky F1 Insider (@SkyF1Insider) September 3, 2015 Oh no he hasn't… oh yes he has… Lewis Hamilton sporting his new blonde locks! #F1 pic.twitter.com/XC001Rk4q5 — Planet F1 (@Planet_F1) September 3, 2015 OMG look at it! Blonde! Lewis Hamilton! Blonde! I can’t believe it. Unbelievable Jeff!
/* This is dvipdfmx, an eXtended version of dvipdfm by <NAME>. Copyright (C) 2002-2016 by <NAME> and <NAME>, the dvipdfmx project team. Copyright (C) 1998, 1999 by <NAME> <<EMAIL>> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. */ #![allow( non_camel_case_types, non_snake_case, unused_mut )] use crate::SkipBlank; use super::{spc_arg, spc_env}; use crate::dpx_dpxutil::{ParseCIdent, ParseFloatDecimal}; use crate::dpx_pdfcolor::PdfColor; use crate::dpx_pdfdev::{Rect, TMatrix, transform_info}; use crate::dpx_pdfparse::SkipWhite; use crate::spc_warn; use crate::DisplayExt; use libc::{atof}; use std::ffi::CString; /* tectonic/core-memory.h: basic dynamic memory helpers Copyright 2016-2018 the Tectonic Project Licensed under the MIT License. */ #[no_mangle] pub unsafe extern "C" fn spc_util_read_numbers( mut values: *mut f64, mut num_values: i32, mut args: *mut spc_arg, ) -> i32 { (*args).cur.skip_blank(); let mut count = 0; while count < num_values && !(*args).cur.is_empty() { if let Some(q) = (*args).cur.parse_float_decimal() { *values.offset(count as isize) = atof(q.as_ptr()); (*args).cur.skip_blank(); count += 1 } else { break; } } count } unsafe fn rgb_color_from_hsv(mut h: f64, mut s: f64, mut v: f64) -> PdfColor { let mut b = v; let mut g = b; let mut r = g; if s != 0.0f64 { let h6 = h * 6i32 as f64; let i = h6 as i32; let f = h6 - i as f64; let v1 = v * (1i32 as f64 - s); let v2 = v * (1i32 as f64 - s * f); let v3 = v * (1i32 as f64 - s * (1i32 as f64 - f)); match i { 0 => { r = v; g = v3; b = v1 } 1 => { r = v2; g = v; b = v1 } 2 => { r = v1; g = v; b = v3 } 3 => { r = v1; g = v2; b = v } 4 => { r = v3; g = v1; b = v } 5 => { r = v; g = v1; b = v2 } 6 => { r = v; g = v1; b = v2 } _ => {} } } PdfColor::from_rgb(r, g, b).unwrap() } unsafe fn spc_read_color_color( mut spe: *mut spc_env, mut ap: *mut spc_arg, ) -> Result<PdfColor, ()> { let mut cv: [f64; 4] = [0.; 4]; let mut result: Result<PdfColor, ()>; if let Some(q) = (*ap).cur.parse_c_ident() { (*ap).cur.skip_blank(); match q.to_bytes() { b"rgb" => { /* Handle rgb color */ let nc = spc_util_read_numbers(cv.as_mut_ptr(), 3i32, ap); if nc != 3i32 { spc_warn!(spe, "Invalid value for RGB color specification."); result = Err(()) } else { result = PdfColor::from_rgb(cv[0], cv[1], cv[2]).map_err(|err| err.warn()) } }, b"cmyk" => { /* Handle cmyk color */ let nc = spc_util_read_numbers(cv.as_mut_ptr(), 4i32, ap); if nc != 4i32 { spc_warn!(spe, "Invalid value for CMYK color specification."); result = Err(()) } else { result = PdfColor::from_cmyk(cv[0], cv[1], cv[2], cv[3]).map_err(|err| err.warn()) } }, b"gray" => { /* Handle gray */ let nc = spc_util_read_numbers(cv.as_mut_ptr(), 1i32, ap); if nc != 1i32 { spc_warn!(spe, "Invalid value for gray color specification."); result = Err(()) } else { result = PdfColor::from_gray(cv[0]).map_err(|err| err.warn()) } }, b"spot" => { /* Handle spot colors */ if let Some(color_name) = (*ap).cur.parse_c_ident() { /* Must be a "named" color */ (*ap).cur.skip_blank(); let nc = spc_util_read_numbers(cv.as_mut_ptr(), 1, ap); if nc != 1 { spc_warn!(spe, "Invalid value for spot color specification."); result = Err(()); } else { result = PdfColor::from_spot(color_name, cv[0]) .map_err(|err| err.warn()) } } else { spc_warn!(spe, "No valid spot color name specified?"); return Err(()); } }, b"hsb" => { let nc = spc_util_read_numbers(cv.as_mut_ptr(), 3i32, ap); if nc != 3i32 { spc_warn!(spe, "Invalid value for HSB color specification."); result = Err(()); } else { let color = rgb_color_from_hsv(cv[0], cv[1], cv[2]); if let &PdfColor::Rgb(r, g, b) = &color { spc_warn!( spe, "HSB color converted to RGB: hsb: <{}, {}, {}> ==> rgb: <{}, {}, {}>", cv[0], cv[1], cv[2], r, g, b ); } else { unreachable!(); } result = Ok(color); } }, _ => { result = if let Ok(name) = q.to_str() { if let Some(color) = pdf_color_namedcolor(name) { Ok(color) } else { Err(()) } } else { Err(()) }; if result.is_err() { spc_warn!( spe, "Unrecognized color name: {}", q.display(), ); } } } } else { spc_warn!(spe, "No valid color specified?"); return Err(()); } result } /* Argument for this is PDF_Number or PDF_Array. * But we ignore that since we don't want to add * dependency to pdfxxx and @foo can not be * allowed for color specification. "pdf" here * means pdf: special syntax. */ unsafe fn spc_read_color_pdf(mut spe: *mut spc_env, mut ap: *mut spc_arg) -> Result<PdfColor, ()> { let mut cv: [f64; 4] = [0.; 4]; /* at most four */ let mut isarry: bool = false; (*ap).cur.skip_blank(); if (*ap).cur[0] == b'[' { (*ap).cur = &(*ap).cur[1..]; (*ap).cur.skip_blank(); isarry = true } let nc = spc_util_read_numbers(cv.as_mut_ptr(), 4i32, ap); let mut result = match nc { 1 => PdfColor::from_gray(cv[0]).map_err(|err| err.warn()), 3 => PdfColor::from_rgb(cv[0], cv[1], cv[2]).map_err(|err| err.warn()), 4 => PdfColor::from_cmyk(cv[0], cv[1], cv[2], cv[3]).map_err(|err| err.warn()), _ => { /* Try to read the color names defined in dvipsname.def */ if let Some(q) = (*ap).cur.parse_c_ident() { let mut result = q .to_str() .ok() .and_then(|name| pdf_color_namedcolor(name)) .ok_or(()); if result.is_err() { spc_warn!( spe, "Unrecognized color name: {}, keep the current color", q.display(), ); } result } else { spc_warn!(spe, "No valid color specified?"); return Err(()); } } }; if isarry { (*ap).cur.skip_blank(); if (*ap).cur.is_empty() || (*ap).cur[0] != b']' { spc_warn!(spe, "Unbalanced \'[\' and \']\' in color specification."); result = Err(()) } else { (*ap).cur = &(*ap).cur[1..]; } } result } /* This is for reading *single* color specification. */ #[no_mangle] pub unsafe extern "C" fn spc_util_read_colorspec( mut spe: *mut spc_env, mut ap: *mut spc_arg, mut syntax: bool, ) -> Result<PdfColor, ()> { assert!(!spe.is_null() && !ap.is_null()); (*ap).cur.skip_blank(); if (*ap).cur.is_empty() { Err(()) } else if syntax { spc_read_color_color(spe, ap) } else { spc_read_color_pdf(spe, ap) } } #[no_mangle] pub unsafe extern "C" fn spc_util_read_pdfcolor( mut spe: *mut spc_env, mut ap: *mut spc_arg, defaultcolor: Option<&PdfColor>, ) -> Result<PdfColor, ()> { assert!(!spe.is_null() && !ap.is_null()); (*ap).cur.skip_blank(); if (*ap).cur.is_empty() { Err(()) } else if let Some(c) = spc_read_color_pdf(spe, ap) .ok() .or_else(|| defaultcolor.cloned()) { Ok(c) } else { Err(()) } } pub trait ReadLengthSpc { fn read_length(&mut self, spe: &spc_env) -> Result<f64, ()>; } impl ReadLengthSpc for &[u8] { fn read_length(&mut self, spe: &spc_env) -> Result<f64, ()> { let mut p = *self; /* inverse magnify */ let mut u: f64 = 1.0f64; let mut error: i32 = 0i32; let q = p.parse_float_decimal(); if q.is_none() { *self = p; return Err(()); } let v = unsafe { atof(q.unwrap().as_ptr()) }; p.skip_white(); if let Some(q) = p.parse_c_ident() { let mut bytes = q.to_bytes(); if bytes.starts_with(b"true") { u /= if spe.mag != 0.0f64 { spe.mag } else { 1.0f64 }; bytes = &bytes[b"true".len()..]; } let q = if bytes.is_empty() { // TODO: check /* "true" was a separate word from the units */ p.skip_white(); p.parse_c_ident() } else { Some(CString::new(bytes).unwrap()) }; if let Some(ident) = q { match ident.to_bytes() { b"pt" => u *= 72. / 72.27, b"in" => u *= 72., b"cm" => u *= 72. / 2.54, b"mm" => u *= 72. / 25.4, b"bp" => u *= 1., b"pc" => u *= 12. * 72. / 72.27, b"dd" => u *= 1238. / 1157. * 72. / 72.27, b"cc" => u *= 12. * 1238. / 1157. * 72. / 72.27, b"sp" => u *= 72. / (72.27 * 65536.), _ => { spc_warn!(spe, "Unknown unit of measure: {}", ident.display(),); error = -1i32 } } } else { spc_warn!(spe, "Missing unit of measure after \"true\""); error = -1i32 } } *self = p; if error == 0 { Ok( v * u ) } else { Err(()) } } } /* * Compute a transformation matrix * transformations are applied in the following * order: scaling, rotate, displacement. */ extern "C" fn make_transmatrix( M: &mut TMatrix, mut xoffset: f64, mut yoffset: f64, mut xscale: f64, mut yscale: f64, mut rotate: f64, ) { let (s, c) = rotate.sin_cos(); *M = TMatrix::row_major( xscale * c, xscale * s, -yscale * s, yscale * c, xoffset, yoffset, ); } unsafe fn spc_read_dimtrns_dvips( mut spe: *mut spc_env, t: &mut transform_info, mut ap: *mut spc_arg, ) -> i32 { const _DTKEYS: [&[u8]; 14] = [ b"hoffset", b"voffset", b"hsize", b"vsize", b"hscale", b"vscale", b"angle", b"clip", b"llx", b"lly", b"urx", b"ury", b"rwi", b"rhi", ]; let mut error: i32 = 0i32; let mut rotate = 0.0f64; let mut yoffset = rotate; let mut xoffset = yoffset; let mut yscale = 1.0f64; let mut xscale = yscale; (*ap).cur.skip_blank(); while error == 0 && !(*ap).cur.is_empty() { if let Some(kp) = (*ap).cur.parse_c_ident() { let mut k = 0; for &key in &_DTKEYS { if kp.to_bytes() == key { break; } k += 1; } if k == 14 { spc_warn!( spe, "Unrecognized dimension/transformation key: {}", kp.display(), ); error = -1i32; break; } else { (*ap).cur.skip_blank(); if k == 7 { t.flags |= 1i32 << 3i32; /* not key-value */ } else { if !(*ap).cur.is_empty() && (*ap).cur[0] == b'=' { (*ap).cur = &(*ap).cur[1..]; (*ap).cur.skip_blank(); } let vp = if (*ap).cur[0] == b'\'' || (*ap).cur[0] == b'\"' { let mut qchr = (*ap).cur[0]; (*ap).cur = &(*ap).cur[1..]; (*ap).cur.skip_blank(); let mut vp = (*ap).cur.parse_float_decimal(); (*ap).cur.skip_blank(); if vp.is_some() && qchr != (*ap).cur[0] { spc_warn!( spe, "Syntax error in dimension/transformation specification." ); error = -1i32; vp = None; } (*ap).cur = &(*ap).cur[1..]; vp } else { (*ap).cur.parse_float_decimal() }; if error == 0 && vp.is_none() { spc_warn!( spe, "Missing value for dimension/transformation: {}", kp.display(), ); error = -1i32 } if error != 0 { break; } if let Some(vp) = vp { let vp = vp.as_ptr(); match k { 0 => xoffset = atof(vp), 1 => yoffset = atof(vp), 2 => { t.width = atof(vp); t.flags |= 1i32 << 1i32 } 3 => { t.height = atof(vp); t.flags |= 1i32 << 2i32 } 4 => xscale = atof(vp) / 100.0f64, 5 => yscale = atof(vp) / 100.0f64, 6 => rotate = 3.14159265358979323846f64 * atof(vp) / 180.0f64, 8 => { t.bbox.ll.x = atof(vp); t.flags |= 1i32 << 0i32 } 9 => { t.bbox.ll.y = atof(vp); t.flags |= 1i32 << 0i32 } 10 => { t.bbox.ur.x = atof(vp); t.flags |= 1i32 << 0i32 } 11 => { t.bbox.ur.y = atof(vp); t.flags |= 1i32 << 0i32 } 12 => { t.width = atof(vp) / 10.0f64; t.flags |= 1i32 << 1i32 } 13 => { t.height = atof(vp) / 10.0f64; t.flags |= 1i32 << 2i32 } _ => {} } (*ap).cur.skip_blank(); } else { break; } } } } else { break; } } make_transmatrix(&mut t.matrix, xoffset, yoffset, xscale, yscale, rotate); error } /* "page" and "pagebox" are not dimension nor transformation nor * something acceptable to put into here. * PLEASE DONT ADD HERE! */ unsafe fn spc_read_dimtrns_pdfm( mut spe: *mut spc_env, p: &mut transform_info, mut ap: *mut spc_arg, ) -> i32 { let mut error: i32 = 0i32; let mut has_matrix = 0i32; let mut has_rotate = has_matrix; let mut has_scale = has_rotate; /* default: do clipping */ let mut has_yscale = has_scale; let mut has_xscale = has_yscale; let mut yscale = 1.0f64; let mut xscale = yscale; let mut rotate = 0.0f64; p.flags |= 1i32 << 3i32; p.flags &= !(1i32 << 4i32); (*ap).cur.skip_blank(); while error == 0 && !(*ap).cur.is_empty() { if let Some(kp) = (*ap).cur.parse_c_ident() { (*ap).cur.skip_blank(); match kp.to_bytes() { b"width" => { if let Ok(width) = (*ap).cur.read_length(&*spe) { p.width = width; } else { error = -1; } p.flags |= 1i32 << 1i32 } b"height" => { if let Ok(height) = (*ap).cur.read_length(&*spe) { p.height = height; } else { error = -1; } p.flags |= 1i32 << 2i32 } b"depth" => { if let Ok(depth) = (*ap).cur.read_length(&*spe) { p.depth = depth; } else { error = -1; } p.flags |= 1i32 << 2i32 } b"scale" => { if let Some(vp) = (*ap).cur.parse_float_decimal() { yscale = atof(vp.as_ptr()); xscale = yscale; has_scale = 1i32; } else { error = -1i32 } } b"xscale" => { if let Some(vp) = (*ap).cur.parse_float_decimal() { xscale = atof(vp.as_ptr()); has_xscale = 1i32; } else { error = -1i32 } } b"yscale" => { if let Some(vp) = (*ap).cur.parse_float_decimal() { yscale = atof(vp.as_ptr()); has_yscale = 1i32; } else { error = -1i32 } } b"rotate" => { if let Some(vp) = (*ap).cur.parse_float_decimal() { rotate = 3.14159265358979323846f64 * atof(vp.as_ptr()) / 180.0f64; has_rotate = 1i32; } else { error = -1i32 } } b"bbox" => { let mut v: [f64; 4] = [0.; 4]; if spc_util_read_numbers(v.as_mut_ptr(), 4i32, ap) != 4i32 { error = -1i32 } else { p.bbox = Rect::new((v[0], v[1]), (v[2], v[3])); p.flags |= 1i32 << 0i32 } } b"matrix" => { let mut v_0: [f64; 6] = [0.; 6]; if spc_util_read_numbers(v_0.as_mut_ptr(), 6i32, ap) != 6i32 { error = -1i32 } else { p.matrix = TMatrix::from_row_major_array(v_0); has_matrix = 1i32 } } b"clip" => { if let Some(vp) = (*ap).cur.parse_float_decimal() { if atof(vp.as_ptr()) != 0. { p.flags |= 1i32 << 3i32 } else { p.flags &= !(1i32 << 3i32) } } else { error = -1i32 } } b"hide" => p.flags |= 1i32 << 4i32, _ => error = -1i32, } if error != 0 { spc_warn!( spe, "Unrecognized key or invalid value for dimension/transformation: {}", kp.display(), ); } else { (*ap).cur.skip_blank(); } } else { break; } } if error == 0 { /* Check consistency */ if has_xscale != 0 && p.flags & 1i32 << 1i32 != 0 { spc_warn!(spe, "Can\'t supply both width and xscale. Ignore xscale."); xscale = 1.0f64 } else if has_yscale != 0 && p.flags & 1i32 << 2i32 != 0 { spc_warn!( spe, "Can\'t supply both height/depth and yscale. Ignore yscale." ); yscale = 1.0f64 } else if has_scale != 0 && (has_xscale != 0 || has_yscale != 0) { spc_warn!(spe, "Can\'t supply overall scale along with axis scales."); error = -1i32 } else if has_matrix != 0 && (has_scale != 0 || has_xscale != 0 || has_yscale != 0 || has_rotate != 0) { spc_warn!(spe, "Can\'t supply transform matrix along with scales or rotate. Ignore scales and rotate."); } } if has_matrix == 0 { make_transmatrix(&mut p.matrix, 0.0f64, 0.0f64, xscale, yscale, rotate); } if p.flags & 1i32 << 0i32 == 0 { p.flags &= !(1i32 << 3i32) /* no clipping needed */ } error } #[no_mangle] pub unsafe extern "C" fn spc_util_read_dimtrns( mut spe: *mut spc_env, ti: &mut transform_info, mut args: *mut spc_arg, mut syntax: i32, ) -> i32 { if spe.is_null() || args.is_null() { return -1i32; } if syntax != 0 { return spc_read_dimtrns_dvips(spe, ti, args); } else { return spc_read_dimtrns_pdfm(spe, ti, args); }; } /* syntax 1: ((rgb|cmyk|hsb|gray) colorvalues)|colorname * syntax 0: pdf_number|pdf_array * * This is for reading *single* color specification. */ #[no_mangle] pub unsafe extern "C" fn spc_util_read_blahblah( mut spe: *mut spc_env, p: &mut transform_info, mut page_no: *mut i32, mut bbox_type: *mut i32, mut ap: *mut spc_arg, ) -> i32 { let mut error: i32 = 0i32; let mut has_matrix = 0i32; /* default: do clipping */ let mut has_rotate = has_matrix; let mut has_scale = has_rotate; let mut has_yscale = has_scale; let mut has_xscale = has_yscale; let mut yscale = 1.0f64; let mut xscale = yscale; let mut rotate = 0.0f64; p.flags |= 1i32 << 3i32; p.flags &= !(1i32 << 4i32); (*ap).cur.skip_blank(); while error == 0 && !(*ap).cur.is_empty() { if let Some(kp) = (*ap).cur.parse_c_ident() { (*ap).cur.skip_blank(); match kp.to_bytes() { b"width" => { if let Ok(width) = (*ap).cur.read_length(&*spe) { p.width = width; } else { error = -1; } p.flags |= 1i32 << 1i32 } b"height" => { if let Ok(height) = (*ap).cur.read_length(&*spe) { p.height = height; } else { error = -1; } p.flags |= 1i32 << 2i32 } b"depth" => { if let Ok(depth) = (*ap).cur.read_length(&*spe) { p.depth = depth; } else { error = -1; } p.flags |= 1i32 << 2i32 } b"scale" => { if let Some(vp) = (*ap).cur.parse_float_decimal() { yscale = atof(vp.as_ptr()); xscale = yscale; has_scale = 1i32; } else { error = -1i32 } } b"xscale" => { if let Some(vp) = (*ap).cur.parse_float_decimal() { xscale = atof(vp.as_ptr()); has_xscale = 1i32; } else { error = -1i32 } } b"yscale" => { if let Some(vp) = (*ap).cur.parse_float_decimal() { yscale = atof(vp.as_ptr()); has_yscale = 1i32; } else { error = -1i32 } } b"rotate" => { if let Some(vp) = (*ap).cur.parse_float_decimal() { rotate = 3.14159265358979323846f64 * atof(vp.as_ptr()) / 180.0f64; has_rotate = 1i32; } else { error = -1i32 } } b"bbox" => { let mut v: [f64; 4] = [0.; 4]; if spc_util_read_numbers(v.as_mut_ptr(), 4i32, ap) != 4i32 { error = -1i32 } else { p.bbox = Rect::new((v[0], v[1]), (v[2], v[3])); p.flags |= 1i32 << 0i32 } } b"matrix" => { let mut v_0: [f64; 6] = [0.; 6]; if spc_util_read_numbers(v_0.as_mut_ptr(), 6i32, ap) != 6i32 { error = -1i32 } else { p.matrix = TMatrix::from_row_major_array(v_0); has_matrix = 1i32 } } b"clip" => { if let Some(vp) = (*ap).cur.parse_float_decimal() { if atof(vp.as_ptr()) != 0. { p.flags |= 1 << 3 } else { p.flags &= !(1 << 3) } } else { error = -1i32 } } b"page" => { let mut page: f64 = 0.; if !page_no.is_null() && spc_util_read_numbers(&mut page, 1i32, ap) == 1i32 { *page_no = page as i32 } else { error = -1i32 } } b"hide" => p.flags |= 1i32 << 4i32, b"pagebox" => { if let Some(q) = (*ap).cur.parse_c_ident() { if !bbox_type.is_null() { match q.to_bytes().to_ascii_lowercase().as_slice() { b"cropbox" => *bbox_type = 1, b"mediabox" => *bbox_type = 2, b"artbox" => *bbox_type = 3, b"trimbox" => *bbox_type = 4, b"bleedbox" => *bbox_type = 5, _ => {}, } } } else if !bbox_type.is_null() { *bbox_type = 0i32 } } _ => error = -1i32, } if error != 0 { spc_warn!( spe, "Unrecognized key or invalid value for dimension/transformation: {}", kp.display(), ); } else { (*ap).cur.skip_blank(); } } else { break; } } if error == 0 { /* Check consistency */ if has_xscale != 0 && p.flags & 1i32 << 1i32 != 0 { spc_warn!(spe, "Can\'t supply both width and xscale. Ignore xscale."); xscale = 1.0f64 } else if has_yscale != 0 && p.flags & 1i32 << 2i32 != 0 { spc_warn!( spe, "Can\'t supply both height/depth and yscale. Ignore yscale." ); yscale = 1.0f64 } else if has_scale != 0 && (has_xscale != 0 || has_yscale != 0) { spc_warn!(spe, "Can\'t supply overall scale along with axis scales."); error = -1i32 } else if has_matrix != 0 && (has_scale != 0 || has_xscale != 0 || has_yscale != 0 || has_rotate != 0) { spc_warn!(spe, "Can\'t supply transform matrix along with scales or rotate. Ignore scales and rotate."); } } if has_matrix == 0 { make_transmatrix(&mut p.matrix, 0.0f64, 0.0f64, xscale, yscale, rotate); } if p.flags & 1i32 << 0i32 == 0 { p.flags &= !(1i32 << 3i32) /* no clipping needed */ } error } /* Color names */ struct Colordef { key: &'static str, color: PdfColor, } impl Colordef { const fn new(key: &'static str, color: PdfColor) -> Self { Colordef { key, color } } } const COLORDEFS: [Colordef; 68] = [ Colordef::new("GreenYellow", PdfColor::Cmyk(0.15, 0.0, 0.69, 0.0)), Colordef::new("Yellow", PdfColor::Cmyk(0.0, 0.0, 1.0, 0.0)), Colordef::new("Goldenrod", PdfColor::Cmyk(0.0, 0.1, 0.84, 0.0)), Colordef::new("Dandelion", PdfColor::Cmyk(0.0, 0.29, 0.84, 0.0)), Colordef::new("Apricot", PdfColor::Cmyk(0.0, 0.32, 0.52, 0.0)), Colordef::new("Peach", PdfColor::Cmyk(0.0, 0.5, 0.7, 0.0)), Colordef::new("Melon", PdfColor::Cmyk(0.0, 0.46, 0.5, 0.0)), Colordef::new("YellowOrange", PdfColor::Cmyk(0.0, 0.42, 1.0, 0.0)), Colordef::new("Orange", PdfColor::Cmyk(0.0, 0.61, 0.87, 0.0)), Colordef::new("BurntOrange", PdfColor::Cmyk(0.0, 0.51, 1.0, 0.0)), Colordef::new("Bittersweet", PdfColor::Cmyk(0.0, 0.75, 1.0, 0.24)), Colordef::new("RedOrange", PdfColor::Cmyk(0.0, 0.77, 0.87, 0.0)), Colordef::new("Mahogany", PdfColor::Cmyk(0.0, 0.85, 0.87, 0.35)), Colordef::new("Maroon", PdfColor::Cmyk(0.0, 0.87, 0.68, 0.32)), Colordef::new("BrickRed", PdfColor::Cmyk(0.0, 0.89, 0.94, 0.28)), Colordef::new("Red", PdfColor::Cmyk(0.0, 1.0, 1.0, 0.0)), Colordef::new("OrangeRed", PdfColor::Cmyk(0.0, 1.0, 0.5, 0.0)), Colordef::new("RubineRed", PdfColor::Cmyk(0.0, 1.0, 0.13, 0.0)), Colordef::new("WildStrawberry", PdfColor::Cmyk(0.0, 0.96, 0.39, 0.0)), Colordef::new("Salmon", PdfColor::Cmyk(0.0, 0.53, 0.38, 0.0)), Colordef::new("CarnationPink", PdfColor::Cmyk(0.0, 0.63, 0.0, 0.0)), Colordef::new("Magenta", PdfColor::Cmyk(0.0, 1.0, 0.0, 0.0)), Colordef::new("VioletRed", PdfColor::Cmyk(0.0, 0.81, 0.0, 0.0)), Colordef::new("Rhodamine", PdfColor::Cmyk(0.0, 0.82, 0.0, 0.0)), Colordef::new("Mulberry", PdfColor::Cmyk(0.34, 0.90, 0.0, 0.02)), Colordef::new("RedViolet", PdfColor::Cmyk(0.07, 0.9, 0.0, 0.34)), Colordef::new("Fuchsia", PdfColor::Cmyk(0.47, 0.91, 0.0, 0.08)), Colordef::new("Lavender", PdfColor::Cmyk(0.0, 0.48, 0.0, 0.0)), Colordef::new("Thistle", PdfColor::Cmyk(0.12, 0.59, 0.0, 0.0)), Colordef::new("Orchid", PdfColor::Cmyk(0.32, 0.64, 0.0, 0.0)), Colordef::new("DarkOrchid", PdfColor::Cmyk(0.4, 0.8, 0.2, 0.0)), Colordef::new("Purple", PdfColor::Cmyk(0.45, 0.86, 0.0, 0.0)), Colordef::new("Plum", PdfColor::Cmyk(0.50, 1.0, 0.0, 0.0)), Colordef::new("Violet", PdfColor::Cmyk(0.79, 0.88, 0.0, 0.0)), Colordef::new("RoyalPurple", PdfColor::Cmyk(0.75, 0.9, 0.0, 0.0)), Colordef::new("BlueViolet", PdfColor::Cmyk(0.86, 0.91, 0.0, 0.04)), Colordef::new("Periwinkle", PdfColor::Cmyk(0.57, 0.55, 0.0, 0.0)), Colordef::new("CadetBlue", PdfColor::Cmyk(0.62, 0.57, 0.23, 0.0)), Colordef::new("CornflowerBlue", PdfColor::Cmyk(0.65, 0.13, 0.0, 0.0)), Colordef::new("MidnightBlue", PdfColor::Cmyk(0.98, 0.13, 0.0, 0.43)), Colordef::new("NavyBlue", PdfColor::Cmyk(0.94, 0.54, 0.0, 0.0)), Colordef::new("RoyalBlue", PdfColor::Cmyk(1.0, 0.5, 0.0, 0.0)), Colordef::new("Blue", PdfColor::Cmyk(1.0, 1.0, 0.0, 0.0)), Colordef::new("Cerulean", PdfColor::Cmyk(0.94, 0.11, 0.0, 0.0)), Colordef::new("Cyan", PdfColor::Cmyk(1.0, 0.0, 0.0, 0.0)), Colordef::new("ProcessBlue", PdfColor::Cmyk(0.96, 0.0, 0.0, 0.0)), Colordef::new("SkyBlue", PdfColor::Cmyk(0.62, 0.0, 0.12, 0.0)), Colordef::new("Turquoise", PdfColor::Cmyk(0.85, 0.0, 0.20, 0.0)), Colordef::new("TealBlue", PdfColor::Cmyk(0.86, 0.0, 0.34, 0.02)), Colordef::new("Aquamarine", PdfColor::Cmyk(0.82, 0.0, 0.3, 0.0)), Colordef::new("BlueGreen", PdfColor::Cmyk(0.85, 0.0, 0.33, 0.0)), Colordef::new("Emerald", PdfColor::Cmyk(1.0, 0.0, 0.5, 0.0)), Colordef::new("JungleGreen", PdfColor::Cmyk(0.99, 0.0, 0.52, 0.0)), Colordef::new("SeaGreen", PdfColor::Cmyk(0.69, 0.0, 0.5, 0.0)), Colordef::new("Green", PdfColor::Cmyk(1.0, 0.0, 1.0, 0.00f64)), Colordef::new("ForestGreen", PdfColor::Cmyk(0.91, 0.0, 0.88, 0.12)), Colordef::new("PineGreen", PdfColor::Cmyk(0.92, 0.0, 0.59, 0.25)), Colordef::new("LimeGreen", PdfColor::Cmyk(0.5, 0.0, 1.0, 0.0)), Colordef::new("YellowGreen", PdfColor::Cmyk(0.44, 0.0, 0.74, 0.0)), Colordef::new("SpringGreen", PdfColor::Cmyk(0.26, 0.0, 0.76, 0.0)), Colordef::new("OliveGreen", PdfColor::Cmyk(0.64, 0.0, 0.95, 0.40)), Colordef::new("RawSienna", PdfColor::Cmyk(0.0, 0.72, 1.0, 0.45)), Colordef::new("Sepia", PdfColor::Cmyk(0.0, 0.83, 1.0, 0.7)), Colordef::new("Brown", PdfColor::Cmyk(0.0, 0.81, 1.0, 0.6)), Colordef::new("Tan", PdfColor::Cmyk(0.14, 0.42, 0.56, 0.0)), Colordef::new("Gray", PdfColor::Gray(0.5)), Colordef::new("Black", PdfColor::Gray(0.0)), Colordef::new("White", PdfColor::Gray(1.0)), ]; /* From pdfcolor.c */ unsafe fn pdf_color_namedcolor(name: &str) -> Option<PdfColor> { COLORDEFS .as_ref() .iter() .find(|&colordef| colordef.key == name) .map(|colordef| colordef.color.clone()) }
// PrepareOccurrences - prepares sample occurrences for testing based on number of test func PrepareOccurrences(num int) occ.Occurrences { occs := occ.Occurrences{} switch num { case 1: occs = append(occs, occ.Occurrence{Symb: 'д', Occurrences: 5}, occ.Occurrence{Symb: 'г', Occurrences: 6}, occ.Occurrence{Symb: 'в', Occurrences: 6}, occ.Occurrence{Symb: 'б', Occurrences: 7}, occ.Occurrence{Symb: 'а', Occurrences: 15}) break case 2: occs = append(occs, occ.Occurrence{Symb: 'c', Occurrences: 3}, occ.Occurrence{Symb: 'b', Occurrences: 2}, occ.Occurrence{Symb: 'a', Occurrences: 1}) break case 3: break case 4: break case 5: break } return occs }
/* * write debug info buffer on stderr */ void dbg_flush(void) { if (dbg_used == 0) return; write(STDERR_FILENO, dbg_buf, dbg_used); dbg_used = 0; }
// GetPort returns the port to run the server on func (c *Config) GetPort() int { c.mu.RLock() defer c.mu.RUnlock() return c.Port }
import logging import time from datetime import datetime time = datetime.now() file_out = time.strftime("%Y-%m-%d %H:%M:%S") + ".log" logging.basicConfig( level = logging.INFO, filename = file_out, filemode = 'w', format = '%(asctime)s - %(name)s - %(process)d - %(levelname)s - %(message)s' ) logging.warning('This will get logged into the file') logging.info('This an info msg') # filemode by default : a - append # level by default warning
<filename>tests/test_optimize_model.py import unittest import torch from src.dqn.optimize_model import ComputeLoss from src.dqn.dqn import DQN from src.util_types import Transition class TestComputerLoss(unittest.TestCase): def setUp(self): self.policy_net = DQN() self.target_net = DQN() gamma = 0.95 batch_size = 1 self.cl = ComputeLoss(batch_size, gamma) def test_same_loss(self): state = ((1,1), (2,2)) next_state = ((1,1), (2,2)) reward = 1 action = torch.Tensor([2])[0] memory = [Transition(state, action, next_state, reward)] loss1 = self.cl(memory, self.policy_net, self.target_net) loss2 = self.cl(memory, self.policy_net, self.target_net) self.assertEqual(loss1, loss2) if __name__ == '__main__': unittest.main()
""" =================================================== Ammonia inversion transition: Hyperfine-only fitter =================================================== .. moduleauthor:: Adam Ginsburg <[email protected]> Module API ^^^^^^^^^^ """ import numpy as np import matplotlib.cbook as mpcb import copy import collections from ...mpfit import mpfit from . import fitter from . import hyperfine from . import radex_modelgrid from . import model from .ammonia_constants import (line_names, freq_dict, aval_dict, ortho_dict, voff_lines_dict, tau_wts_dict, line_labels) from astropy import constants from astropy import units as u ckms = constants.c.to(u.km/u.s).value # sanity check: for linename in line_names: assert len(voff_lines_dict[linename]) == len(tau_wts_dict[linename]) # For each individual inversion line, create a Hyperfine model nh3_vtau = {linename: hyperfine.hyperfinemodel({lineid:lineid for lineid,name in enumerate(voff_lines_dict[linename])}, {lineid:voff for lineid,voff in enumerate(voff_lines_dict[linename])}, {lineid:freq_dict[linename]*(1-voff/ckms) for lineid,voff in enumerate(voff_lines_dict[linename])}, {lineid:tauwt for lineid,tauwt in enumerate(tau_wts_dict[linename])}, {lineid:sum(tau_wts_dict[linename]) for lineid,voff in enumerate(voff_lines_dict[linename])}, ) for linename in line_names} def nh3_vtau_multimodel_generator(linenames): """ If you want to use multiple hyperfines for the same spectrum, use this generator. It is useful if you want N independent tau/tex values but the same velocity and linewidth Parameters ---------- linenames : list A list of line names from the set ('oneone', ..., 'eighteight') Returns ------- model : `model.SpectralModel` A SpectralModel class build from N different metastable inversion hyperfine models """ nlines = len(linenames) def nh3_vtau_multimodel(xarr, velocity, width, *args): assert len(args) == nlines*2 models = [nh3_vtau[linename].hyperfine(xarr, Tex=tex, tau=tau, xoff_v=velocity, width=width) for linename,tex,tau in zip(linenames, args[::2], args[1::2])] return np.array(models).sum(axis=0) mod = model.SpectralModel(nh3_vtau_multimodel, 2+nlines*2, parnames=['center','width'] + [x for ln in linenames for x in ('tex{0}'.format(ln), 'tau{0}'.format(ln)) ], parlimited=[(False,False), (True,False),] + [(True, False),]*2*nlines, parlimits=[(0,0), ]*(2+2*nlines), shortvarnames=["v","\\sigma",] + [x for ln in linenames for x in ('T_{{ex}}({0})'.format(line_labels[ln]), '\\tau({0})'.format(line_labels[ln])) ], fitunit='Hz') return mod
<filename>LeftNavBarExample/LeftNavBarLibrary/src/main/java/com/example/google/tv/leftnavbar/TitleBarView.java<gh_stars>1000+ /* * Copyright (C) 2011 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.google.tv.leftnavbar; import android.content.Context; import android.content.res.TypedArray; import android.graphics.drawable.Drawable; import android.text.TextUtils; import android.util.AttributeSet; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.ViewParent; import android.view.Window; import android.widget.ImageView; import android.widget.ProgressBar; import android.widget.RelativeLayout; import android.widget.TextView; /** * Holds the various widgets of the title bar. */ public class TitleBarView extends RelativeLayout { private final VisibilityController mVisibilityController; private boolean mIsLegacy; private boolean mAnimationsEnabled; private TextView mTitle; private TextView mSubtitle; private ImageView mLeftIcon; private ImageView mRightIcon; private ProgressBar mCircularProgress; private ProgressBar mHorizontalProgress; private int mTitleResource; private int mSubtitleResource; Context mContext; public TitleBarView(Context context, AttributeSet attrs) { super(context, attrs, 0); mContext=context; mVisibilityController = new VisibilityController(this); TypedArray a = context.obtainStyledAttributes(attrs, new int[] { android.R.attr.windowTitleStyle, android.R.attr.defaultValue }); mIsLegacy = a.getBoolean(a.getIndex(1 /* defaultValue */), false); if (mIsLegacy) { mTitleResource = a.getResourceId(a.getIndex(0 /* windowTitleStyle */), 0); } else { a.recycle(); a = context.obtainStyledAttributes(null, new int[] { android.R.attr.titleTextStyle, android.R.attr.subtitleTextStyle }, android.R.attr.actionBarStyle, 0); mTitleResource = a.getResourceId(a.getIndex(0 /* titleTextStyle */), 0); mSubtitleResource = a.getResourceId(a.getIndex(1 /* subtitleTextStyle */), 0); } a.recycle(); } @Override protected void onFinishInflate() { super.onFinishInflate(); if (getChildCount() == 0) { // Set up the default content. LayoutInflater.from(mContext).inflate(R.layout.lib_title_bar, this, true); } mTitle = (TextView) findViewById(R.id.title); mSubtitle = (TextView) findViewById(R.id.subtitle); mLeftIcon = (ImageView) findViewById(R.id.left_icon); mRightIcon = (ImageView) findViewById(R.id.right_icon); mCircularProgress = (ProgressBar) findViewById(R.id.progress_circular); if (mCircularProgress != null) { mCircularProgress.setIndeterminate(true); // Cannot be done in XML... } mHorizontalProgress = (ProgressBar) findViewById(R.id.progress_horizontal); if (mIsLegacy) { setTextStyle(mTitle, mTitleResource); disableSubtitle(); } else { setTextStyle(mTitle, mTitleResource); setTextStyle(mSubtitle, mSubtitleResource); disableLeftIcon(); disableRightIcon(); } } private void setTextStyle(TextView view, int style) { if (style != 0) { view.setTextAppearance(getContext(), style); } } public void setTitle(CharSequence text) { mTitle.setText(text); } public void setTitleColor(int color) { mTitle.setTextColor(color); } public void setLeftIcon(Drawable drawable, int alpha) { setIcon(mLeftIcon, drawable, alpha); } public void setRightIcon(Drawable drawable, int alpha) { setIcon(mRightIcon, drawable, alpha); } private void setIcon(ImageView view, Drawable drawable, int alpha) { if (view == null) { return; } if (drawable != null) { drawable.setAlpha(alpha); view.setImageDrawable(drawable); view.setVisibility(View.VISIBLE); } else { view.setVisibility(View.GONE); } } public void setHorizontalProgress(int value) { if (mHorizontalProgress == null) { return; } switch (value) { case Window.PROGRESS_VISIBILITY_ON: mHorizontalProgress.setVisibility(View.VISIBLE); break; case Window.PROGRESS_VISIBILITY_OFF: mHorizontalProgress.setVisibility(View.GONE); break; case Window.PROGRESS_INDETERMINATE_ON: mHorizontalProgress.setIndeterminate(true); break; case Window.PROGRESS_INDETERMINATE_OFF: mHorizontalProgress.setIndeterminate(false); break; default: if (Window.PROGRESS_START <= value && value <= Window.PROGRESS_END) { mHorizontalProgress.setProgress(value - Window.PROGRESS_START); } else if (Window.PROGRESS_SECONDARY_START <= value && value <= Window.PROGRESS_SECONDARY_END) { mHorizontalProgress.setSecondaryProgress( value - Window.PROGRESS_SECONDARY_START); } break; } } public boolean isHorizontalProgressVisible() { return mHorizontalProgress != null && mHorizontalProgress.getVisibility() == VISIBLE; } public void setCircularProgress(int value) { if (mCircularProgress == null) { return; } switch (value) { case Window.PROGRESS_VISIBILITY_ON: mCircularProgress.setVisibility(View.VISIBLE); break; case Window.PROGRESS_VISIBILITY_OFF: mCircularProgress.setVisibility(View.GONE); break; default: break; } } public void disableLeftIcon() { removeFromParent(mLeftIcon); mLeftIcon = null; } public void disableRightIcon() { removeFromParent(mRightIcon); mRightIcon = null; } public void disableHorizontalProgress() { removeFromParent(mHorizontalProgress); mHorizontalProgress = null; } public void disableCircularProgress() { removeFromParent(mCircularProgress); mCircularProgress = null; } private void disableSubtitle() { removeFromParent(mSubtitle); mSubtitle = null; } private static void removeFromParent(View view) { if (view == null) { return; } ViewParent parent = view.getParent(); if (parent != null) { ((ViewGroup) parent).removeView(view); } } public CharSequence getTitle() { return mTitle.getText(); } public void setSubtitle(CharSequence text) { mSubtitle.setText(text); mSubtitle.setVisibility(TextUtils.isEmpty(text) ? GONE : VISIBLE); } public CharSequence getSubtitle() { return mSubtitle.getText(); } public void setAnimationsEnabled(boolean enabled) { mAnimationsEnabled = enabled; } public void setVisible(boolean visible, boolean animated) { mVisibilityController.setVisible(visible, animated && mAnimationsEnabled); } public boolean isVisible() { return mVisibilityController.isVisible(); } public int getApparentHeight() { return isVisible() ? getContext().getResources().getDimensionPixelSize(R.dimen.title_bar_apparent_height) : 0; } public void setProgressVisible(boolean visible) { setCircularProgress(visible ? Window.PROGRESS_VISIBILITY_ON : Window.PROGRESS_VISIBILITY_OFF); } }
<gh_stars>0 #ifndef LOCAL_FILES_H_ #define LOCAL_FILES_H_ #include <iostream> #include <fstream> #include <regex> #include <vector> #include <string> #include <opencv2/core.hpp> #include <opencv2/opencv.hpp> #include "slarray.h" class LocalFileDealer { public: LocalFileDealer() : length(0), width(0), height(0) {}; void popMatrix(Matrix *mat); void pushMatrix(const Matrix &mat); void clear(); void read(const std::string &path); void write(const std::string &path); private: static const std::string identifier; static const int headerSize; LocalFileDealer(const LocalFileDealer &obj); void writeHeader(); void readHeader(); void writeData(const Matrix &mat); void readData(); int length; int width; int height; std::vector<char> header; std::vector<char> data; }; class ImageBuilder { public: explicit ImageBuilder(const std::string &path); void write( const std::string &path, const Matrix &cell, const Matrix &rgrid, const Matrix &cgrid, const Matrix &vertex) const; private: void drawCell(int row, int col, int status, cv::Mat* image) const; void drawRow(int row, int col, int status, cv::Mat* image) const; void drawCol(int row, int col, int status, cv::Mat* image) const; void drawVertex(int row, int col, int status, cv::Mat* image) const; void setImagePixel( int rbegin, int cbegin, const Matrix &mat, cv::Mat* Image) const; static const cv::Vec3b foreColor; static const cv::Vec3b backColor; // 0 - 4 : cell // 5 - 7 : row // 8 - 10 : col // 11 - 17 : vertex Matrix data[18]; }; #endif
import math import os import sys import re import itertools import functools import operator print(' '.join(input().replace('WUB', ' ').split()))
// Load Log Configuration And Build Logger // Notice that we need to load the configuration into a new struct // and test the creation of the logger, before we apply the updates // to the existing zap.Config. It is very important that the changes // are applied to the existing zap.Config rather than using the new // one so that child loggers get the configuration applied as well. func initLogger(configFilePath string) (*zap.Logger, error) { jsonFile, err := os.Open(configFilePath) if err != nil { fmt.Println("Unable To Load Logging Configuration File", err) return nil, err } defer jsonFile.Close() var newCfg zap.Config if err := json.NewDecoder(jsonFile).Decode(&newCfg); err != nil { fmt.Println("Unable To Parse Logging Configuration File", err) return nil, err } newLogger, err := newCfg.Build() if err != nil { fmt.Println("Unable To Build Logger From Configuration", err) return nil, err } jsonFile.Seek(0, 0) if err := json.NewDecoder(jsonFile).Decode(&cfg); err != nil { fmt.Println("Unable To Parse Logging Configuration File", err) return nil, err } newLogger, err = cfg.Build() return newLogger, err }
/* Return TRUE or FALSE depending on whether the binary operator meets the appropriate constraints. */ int ix86_binary_operator_ok (enum rtx_code code, enum machine_mode mode, rtx operands[3]) { rtx dst = operands[0]; rtx src1 = operands[1]; rtx src2 = operands[2]; if (MEM_P (src1) && MEM_P (src2)) return 0; if (ix86_swap_binary_operands_p (code, mode, operands)) { rtx temp = src1; src1 = src2; src2 = temp; } if (MEM_P (dst) && !rtx_equal_p (dst, src1)) return 0; if (CONSTANT_P (src1)) return 0; if (MEM_P (src1) && !rtx_equal_p (dst, src1)) return 0; return 1; }
export interface Authentication { auth: (params: Authentication.Params) => Promise<Authentication.Result> } export namespace Authentication { export type Params = { accountId: string userId: string role: string } export type Result = { accessToken: string } }
Democrat Elizabeth Warren ousted Republican incumbent Sen. Scott Brown in the Massachusetts Senate election Tuesday night, Fox News projects, ending one of the most dramatic races of the 2012 cycle. “This victory belongs to you. You did this,” Warren told her supporters Tuesday night in a speech echoing familiar stump speeches from her campaign. The win in Massachusetts added to a string of Senate victories for the Democrats, including Rep. Chris Murphy winning an open seat in Connecticut over wrestling magnate Republican Linda McMahon; Democrat Joe Donnelly beating Republican Richard Mourdock in Indiana; and Rep. Tammy Baldwin beating Republican Tommy Thompson for the open Senate seat in Wisconsin. Democrats had a 53-47 Senate majority going into the election. "Tonight is a great night for the people of Massachusetts and for the middle class across the country," Sen. Patty Murray of Washington, chairwoman of the Democratic Senatorial Campaign Committee, said in a written statement. The Massachusetts campaign was marked by many bitter moments and constant attacks, not the least of which were Brown’s accusations that Warren had erroneously claimed she was part Cherokee to take advantage of racial preferences during her academic career. A professor at Harvard Law School, Warren had been touted as part of the prestigious school’s diversity hiring during the 1990s. Throughout the last several weeks, Warren was forced to answer calls to prove this Native American heritage, which had even been questioned by the Cherokee tribes at one point. In the end, the affair had proven to be a big distraction to both campaigns, and not the major boost Brown had hoped for. The mudslinging had prompted both candidates at one point to renounce the negative advertising launched by outside groups on their behalf. They asked the groups to stop, and pledged that if any ran attack ads anyway the candidate benefiting would dip into campaign funds to make a donation to a charity of their opponents’ choice. This was tested, and Brown ended up donating some $37,000 as a result. But even a major push from national Republican and conservative groups could not push Brown over the finish line. Defying the odds and campaigning around the state in a pickup truck as the “everyman,” Brown drew national attention as he captured the seat left open by the late Democratic Sen. Ted Kennedy in 2010. He quickly became an icon in the party, seen as a rising star with broad appeal. But since his election, Brown has settled into the Senate as a more moderate figure, much like his New England counterpart, Sen. Olympia Snow, R-Maine, who retired this year out of disgust with the partisanship she said became untenable on Capitol Hill. Brown's move to the center did not help enough, it seems, as Harvard professor-turned consumer advocate Warren was able to lock him into a tight, high-pitched contest, considered a toss-up for months. For her part, Warren was already used to controversy and partisan politics: After helping to design the new Consumer Financial Protection Bureau under the Obama administration, she was blocked from heading it permanently by Republicans who believed she was too biased against business.
<gh_stars>0 package org.swtk.eng.preprocess.patterns; import java.util.regex.Pattern; public class FigureAttributionsPatterns { /* Figure 16 * (Fig. 12A) * (Fig. 10F) * (Figs. 2B and 3) * Fig. 16 * Figs. 6 */ public static Pattern FIG_01 = Pattern.compile("\\(figure [0-9]+[a-z]*\\)", Pattern.CASE_INSENSITIVE | Pattern.DOTALL); public static Pattern[] getPatterns() { return new Pattern[] { FIG_01 }; } }
Denver, Colorado (CNN) -- When Shay Kelley lost her marketing job she got worried. When she lost her home and her car she got mad. "I went off into the woods and I started yelling at God," she says. "I didn't know why God would lead me up to this point in my life just to have me left with nothing." "I was like, 'Just tell me what my purpose is, tell me why I'm here and if you'll just tell me I'll work harder than for anything I have ever worked for anything else in my entire life.' " Within weeks she had her answer: Travel to all 50 states in 50 weeks. Collect canned goods for charities along the way and take a ton of pictures. She has dubbed it Project 50/50. She stayed with friends while she waited tables and got together enough money to buy "Bubba," her 1984 Ford pickup truck. She packed her camera, which she calls "Roxy," and her dog, Zu Zu, and hit the road. She began on New Year's Day in South Carolina, randomly going door to door to collect canned goods. "I set a goal of 200 cans a week, which doesn't sound like a lot, but the premise is [that] doing a little bit adds up to a lot," Kelley says. "After a year, [that's] 10,000 canned food items." She began to meet homeless people as she dropped off the canned goods, and she says they have surprised her with their generosity. She met Donald, a retired Navy sailor, at a library in South Carolina. "He invited me to go to lunch to buy me a hot meal because I had been eating PowerBars for three days," Kelley says. "I found out after he left -- after he paid the tab and paid my meter -- that Donald was homeless, that he was actually living in the shelter." "That was the first week when I learned the people with the least tend to give the most." Donald was one of the first people she photographed. She posts her pictures on her website and Facebook page as she goes. She has more than 1,000 Facebook fans following her travels. See more of Kelley's photos One of those Facebook followers is Laurie Holleman Sherrod, who contacted Kelley with an unusual request: She asked Kelley if she could find her son, Trey. The last time she heard, he was living on the streets in Santa Cruz, California. "I thought that's crazy, how do you find one homeless person in an entire city?" Kelley recalled. But she agreed to try and sure enough a few weeks later she happened upon a nice young man on the streets of Santa Cruz. "And then here I am sitting around the table with Trey shooting a video for his mother who lives in South Carolina." As with everything that has happened to her so far, she credits her faith with guiding her. "It is so important to me that God remains in the forefront of my life," she says. "He leads me. He tells me to go right or go left. I can't really explain that to people, but I don't do anything, God does it all. I'm just standing here." iReporter tells the story of one homeless man Through her photos she captures people down on their luck, but not ready to give up. She says it has made her own uncertain future easier to deal with. "I just hope that people who are in really rough situations will realize that God didn't forget about them. God is just trying to prepare them for something even bigger, even greater and even more blessed than they can even imagine."
//! Simple timer that adds the elapsed time to the given `double` upon //! destruction. class Timer { using Clock = std::chrono::high_resolution_clock; using Time = std::chrono::time_point<Clock>; using Duration = std::chrono::duration<double>; public: Timer(double &target) : target_{target} , start_{Clock::now()} { } ~Timer() { target_ += Duration{Clock::now() - start_}.count(); } Timer(Timer const &) = delete; Timer(Timer &&) = delete; Timer &operator=(Timer const &) = delete; Timer &operator=(Timer &&) = delete; private: double &target_; Time start_; }
/* Move to the previous word in the prompt text. */ void do_statusbar_prev_word(void) { bool seen_a_word = FALSE, step_forward = FALSE; assert(answer != NULL); while (statusbar_x != 0) { statusbar_x = move_mbleft(answer, statusbar_x); if (is_word_mbchar(answer + statusbar_x, FALSE)) seen_a_word = TRUE; else if (seen_a_word) { step_forward = TRUE; break; } } if (step_forward) statusbar_x = move_mbright(answer, statusbar_x); update_bar_if_needed(); }
use dfn_candid::{candid, candid_one}; use dfn_protobuf::protobuf; use ed25519_dalek::Keypair; use ic_canister_client::Sender; use ic_nervous_system_common_test_keys::{ TEST_NEURON_1_OWNER_KEYPAIR, TEST_NEURON_1_OWNER_PRINCIPAL, TEST_NEURON_2_OWNER_KEYPAIR, TEST_NEURON_2_OWNER_PRINCIPAL, }; use ic_nns_common::pb::v1::NeuronId; use ic_nns_constants::GOVERNANCE_CANISTER_ID; use ic_nns_governance::pb::v1::{GovernanceError, Neuron, NeuronInfo}; use ic_nns_gtc::der_encode; use ic_nns_gtc::pb::v1::AccountState; use ic_nns_gtc::test_constants::{ TestIdentity, TEST_IDENTITY_1, TEST_IDENTITY_2, TEST_IDENTITY_3, TEST_IDENTITY_4, }; use ic_nns_test_utils::itest_helpers::{ local_test_on_nns_subnet, NnsCanisters, NnsInitPayloadsBuilder, }; use ledger_canister::{ AccountBalanceArgs, AccountIdentifier, Subaccount, Tokens, DEFAULT_TRANSFER_FEE, }; use std::collections::HashSet; use std::convert::TryFrom; use std::sync::Arc; use std::time::SystemTime; /// Seed Round (SR) neurons are released over 48 months in the following tests pub const SR_MONTHS_TO_RELEASE: u8 = 48; /// Early Contributor Tokenholder (ECT) neurons are released over 12 months in /// the following tests pub const ECT_MONTHS_TO_RELEASE: u8 = 12; const TEST_SR_ACCOUNTS: &[(&str, u32); 2] = &[ (TEST_IDENTITY_1.gtc_address, 1200), (TEST_IDENTITY_3.gtc_address, 14500), ]; const TEST_ECT_ACCOUNTS: &[(&str, u32); 2] = &[ (TEST_IDENTITY_2.gtc_address, 8544), (TEST_IDENTITY_4.gtc_address, 3789), ]; /// Test the GTC's `claim_neurons` method (and associated methods /// `account_has_claimed_neurons` and `permanently_lock_account`) #[test] pub fn test_claim_neurons() { local_test_on_nns_subnet(|runtime| async move { let mut nns_init_payload_builder = NnsInitPayloadsBuilder::new(); add_test_gtc_neurons(&mut nns_init_payload_builder); let donate_account_recipient_neuron_id = get_donate_account_recipient_neuron_id(&nns_init_payload_builder); nns_init_payload_builder .genesis_token .donate_account_recipient_neuron_id = Some(donate_account_recipient_neuron_id.clone()); let forward_all_unclaimed_accounts_recipient_neuron_id = get_forward_whitelisted_unclaimed_accounts_recipient_neuron_id( &nns_init_payload_builder, ); nns_init_payload_builder .genesis_token .forward_whitelisted_unclaimed_accounts_recipient_neuron_id = Some(forward_all_unclaimed_accounts_recipient_neuron_id.clone()); let nns_init_payload = nns_init_payload_builder.build(); let identity_1_neuron_ids = nns_init_payload .genesis_token .accounts .get(TEST_IDENTITY_1.gtc_address) .unwrap() .neuron_ids .clone(); assert_eq!(identity_1_neuron_ids.len(), SR_MONTHS_TO_RELEASE as usize); let identity_2_neuron_ids = nns_init_payload .genesis_token .accounts .get(TEST_IDENTITY_2.gtc_address) .unwrap() .neuron_ids .clone(); assert_eq!(identity_2_neuron_ids.len(), ECT_MONTHS_TO_RELEASE as usize); let nns_canisters = NnsCanisters::set_up(&runtime, nns_init_payload).await; assert_neurons_can_only_be_claimed_by_account_owner(&nns_canisters).await; assert_neurons_can_only_be_donated_by_account_owner(&nns_canisters).await; assert_neurons_can_be_donated( &nns_canisters, donate_account_recipient_neuron_id, &*TEST_NEURON_1_OWNER_KEYPAIR, &TEST_IDENTITY_3, ) .await; // Assert that a Seed Round (SR) investor can claim their tokens assert_neurons_can_be_claimed(&nns_canisters, identity_1_neuron_ids, &TEST_IDENTITY_1) .await; // Try to forward the whitelisted account. Note that this should only forward // the whitelisted account so a non-whitelisted account should still be // able to claim afterwards. assert_unclaimed_neurons_can_be_forwarded( &nns_canisters, forward_all_unclaimed_accounts_recipient_neuron_id, &*TEST_NEURON_2_OWNER_KEYPAIR, ) .await; // Assert that an Early Contributor Tokenholder (ECT) investor can claim their // tokens assert_neurons_can_be_claimed(&nns_canisters, identity_2_neuron_ids, &TEST_IDENTITY_2) .await; Ok(()) }) } /// At Genesis, calls to `claim_neurons` and `forward_all_unclaimed_accounts` /// should fail, as they both depend on a certain amount of time passing before /// they are able to be called. #[test] pub fn test_gtc_at_genesis() { local_test_on_nns_subnet(|runtime| async move { let mut nns_init_payload_builder = NnsInitPayloadsBuilder::new(); add_test_gtc_neurons(&mut nns_init_payload_builder); // Set the Genesis Moratorium to start now nns_init_payload_builder .genesis_token .genesis_timestamp_seconds = SystemTime::now().elapsed().unwrap().as_secs(); let nns_init_payload = nns_init_payload_builder.build(); let nns_canisters = NnsCanisters::set_up(&runtime, nns_init_payload).await; let gtc = nns_canisters.genesis_token; let sign_cmd = move |msg: &[u8]| Ok(TEST_IDENTITY_1.sign(msg)); let sender = Sender::ExternalHsm { pub_key: der_encode(&TEST_IDENTITY_1.public_key()), sign: Arc::new(sign_cmd), }; // Assert `claim_neurons` fails during the moratorium let claim_neurons_response: Result<Result<Vec<NeuronId>, String>, String> = gtc .update_from_sender( "claim_neurons", candid, (TEST_IDENTITY_1.public_key_hex,), &sender, ) .await; assert!(claim_neurons_response.unwrap().is_err()); // Assert that `TEST_IDENTITY_1` did not claim their neurons let account_has_claimed_neurons_response: Result<Result<AccountState, String>, String> = gtc.update_from_sender( "get_account", candid_one, TEST_IDENTITY_1.gtc_address.to_string(), &sender, ) .await; assert!( !account_has_claimed_neurons_response .unwrap() .unwrap() .has_claimed ); // Assert that `forward_all_unclaimed_accounts` fails let forward_all_unclaimed_accounts_response: Result<Result<(), String>, String> = gtc .update_from_sender( "forward_whitelisted_unclaimed_accounts", candid_one, (), &sender, ) .await; assert!(forward_all_unclaimed_accounts_response.unwrap().is_err()); Ok(()) }) } /// Assert that users can't claim other users' neurons /// /// Identity 3 tries to claim Identity 1's neurons, but fails to do so async fn assert_neurons_can_only_be_claimed_by_account_owner(nns_canisters: &NnsCanisters<'_>) { let gtc = &nns_canisters.genesis_token; let sign_cmd = move |msg: &[u8]| Ok(TEST_IDENTITY_3.sign(msg)); let sender = Sender::ExternalHsm { pub_key: der_encode(&TEST_IDENTITY_3.public_key()), sign: Arc::new(sign_cmd), }; // Assert that one user can't claim another user's neurons let claim_neurons_response: Result<Result<Vec<NeuronId>, String>, String> = gtc .update_from_sender( "claim_neurons", candid, (TEST_IDENTITY_1.public_key_hex,), &sender, ) .await; assert!(claim_neurons_response.unwrap().is_err()); } /// Assert that users can't donate other users' neurons /// /// Identity 3 tries to donate Identity 1's neurons, but fails to do so async fn assert_neurons_can_only_be_donated_by_account_owner(nns_canisters: &NnsCanisters<'_>) { let gtc = &nns_canisters.genesis_token; let sign_cmd = move |msg: &[u8]| Ok(TEST_IDENTITY_3.sign(msg)); let sender = Sender::ExternalHsm { pub_key: der_encode(&TEST_IDENTITY_3.public_key()), sign: Arc::new(sign_cmd), }; // Assert that one user can't claim another user's neurons let donate_account_response: Result<Result<(), String>, String> = gtc .update_from_sender( "donate_account", candid, (TEST_IDENTITY_1.public_key_hex,), &sender, ) .await; assert!(donate_account_response.unwrap().is_err()); } /// Assert that any user can forward an unclaimed GTC account. /// /// This assumes the window after Genesis, during which the forwarding of /// unclaimed accounts is forbidden, has expired. async fn assert_unclaimed_neurons_can_be_forwarded( nns_canisters: &NnsCanisters<'_>, custodian_neuron_id: NeuronId, custodian_key_pair: &Keypair, ) { let gtc = &nns_canisters.genesis_token; let governance = &nns_canisters.governance; let ledger = &nns_canisters.ledger; let sign_cmd = move |msg: &[u8]| Ok(TEST_IDENTITY_1.sign(msg)); let sender = Sender::ExternalHsm { pub_key: der_encode(&TEST_IDENTITY_1.public_key()), sign: Arc::new(sign_cmd), }; // Assert that `TEST_IDENTITY_4` has not yet claimed or donated their neurons let get_account_response: Result<Result<AccountState, String>, String> = gtc .update_from_sender( "get_account", candid_one, TEST_IDENTITY_4.gtc_address.to_string(), &sender, ) .await; let account_before_forward = get_account_response.unwrap().unwrap(); assert!(!account_before_forward.has_claimed); assert!(!account_before_forward.has_donated); assert!(!account_before_forward.has_forwarded); // Calculate how much ICP is expected to be forwarded to the custodian // neuron. let expected_custodian_account_balance_increase: Tokens = Tokens::from_e8s( Tokens::from_tokens(account_before_forward.icpts as u64) .unwrap() .get_e8s() - (DEFAULT_TRANSFER_FEE.get_e8s() * account_before_forward.neuron_ids.len() as u64), ); // Get the custodian neuron and its ledger account, so that we can later // assert that the account value has increased (as the result of // forwarding). let get_full_neuron_response: Result<Result<Neuron, GovernanceError>, String> = governance .update_from_sender( "get_full_neuron", candid_one, custodian_neuron_id.id, &Sender::from_keypair(custodian_key_pair), ) .await; let custodian_neuron = get_full_neuron_response.unwrap().unwrap(); let custodian_subaccount = Subaccount::try_from(&custodian_neuron.account[..]).unwrap(); let custodian_account = AccountIdentifier::new(GOVERNANCE_CANISTER_ID.get(), Some(custodian_subaccount)); let account_balance_response: Result<Tokens, String> = ledger .query_from_sender( "account_balance_pb", protobuf, AccountBalanceArgs { account: custodian_account, }, &Sender::from_keypair(custodian_key_pair), ) .await; let custodian_account_balance = account_balance_response.unwrap(); let expected_custodian_account_balance_after_forward = (custodian_account_balance + expected_custodian_account_balance_increase).unwrap(); // Have `TEST_IDENTITY_1` forward `TEST_IDENTITY_2`'s and `TEST_IDENTITY_4`'s // neurons let forward_whitelisted_unclaimed_accounts_response: Result<Result<(), String>, String> = gtc .update_from_sender( "forward_whitelisted_unclaimed_accounts", candid_one, (), &sender, ) .await; assert!(forward_whitelisted_unclaimed_accounts_response .unwrap() .is_ok()); // Assert that the forward updated the account state as expected let get_account_response: Result<Result<AccountState, String>, String> = gtc .update_from_sender( "get_account", candid_one, TEST_IDENTITY_4.gtc_address.to_string(), &sender, ) .await; let account_after_forward = get_account_response.unwrap().unwrap(); assert!(!account_after_forward.has_claimed); assert!(!account_after_forward.has_donated); assert!(account_after_forward.has_forwarded); assert_eq!(account_after_forward.authenticated_principal_id, None); assert_eq!( account_after_forward.successfully_transferred_neurons.len(), account_before_forward.neuron_ids.len(), ); // But has not forwarded not whitelisted accounts. // Assert that the custodian neuron's ledger account has received the // forwarded funds let account_balance_response: Result<Tokens, String> = ledger .query_from_sender( "account_balance_pb", protobuf, AccountBalanceArgs { account: custodian_account, }, &Sender::from_keypair(custodian_key_pair), ) .await; let actual_custodian_account_balance_after_forward = account_balance_response.unwrap(); assert_eq!( expected_custodian_account_balance_after_forward, actual_custodian_account_balance_after_forward ); // Assert that the custodian neuron's stake matches its ledger account // balance let get_full_neuron_response: Result<Result<Neuron, GovernanceError>, String> = governance .update_from_sender( "get_full_neuron", candid_one, custodian_neuron_id.id, &Sender::from_keypair(custodian_key_pair), ) .await; let custodian_neuron = get_full_neuron_response.unwrap().unwrap(); let custodian_neuron_stake = Tokens::from_e8s(custodian_neuron.cached_neuron_stake_e8s); assert_eq!( custodian_neuron_stake, actual_custodian_account_balance_after_forward ); } /// Assert that GTC neurons can be donated by the owner of the GTC account async fn assert_neurons_can_be_donated( nns_canisters: &NnsCanisters<'_>, custodian_neuron_id: NeuronId, custodian_key_pair: &'static Keypair, test_identity: &'static TestIdentity, ) { let gtc = &nns_canisters.genesis_token; let governance = &nns_canisters.governance; let ledger = &nns_canisters.ledger; let sign_cmd = move |msg: &[u8]| Ok(test_identity.sign(msg)); let sender = Sender::ExternalHsm { pub_key: der_encode(&test_identity.public_key()), sign: Arc::new(sign_cmd), }; // Assert that `test_identity` has not yet claimed or donated their neurons let get_account_response: Result<Result<AccountState, String>, String> = gtc .update_from_sender( "get_account", candid_one, test_identity.gtc_address.to_string(), &sender, ) .await; let account_before_donation = get_account_response.unwrap().unwrap(); assert!(!account_before_donation.has_claimed); assert!(!account_before_donation.has_donated); assert!(!account_before_donation.has_forwarded); // Calculate how much ICP is expected to be donated to the custodian // neuron. let expected_custodian_account_balance_increase: Tokens = Tokens::from_e8s( Tokens::from_tokens(account_before_donation.icpts as u64) .unwrap() .get_e8s() - (DEFAULT_TRANSFER_FEE.get_e8s() * account_before_donation.neuron_ids.len() as u64), ); // Get the custodian neuron and its ledger account, so that we can later // assert that the account value has increased (as the result of a // donation). let get_full_neuron_response: Result<Result<Neuron, GovernanceError>, String> = governance .update_from_sender( "get_full_neuron", candid_one, custodian_neuron_id.id, &Sender::from_keypair(custodian_key_pair), ) .await; let custodian_neuron = get_full_neuron_response.unwrap().unwrap(); let custodian_subaccount = Subaccount::try_from(&custodian_neuron.account[..]).unwrap(); let custodian_account = AccountIdentifier::new(GOVERNANCE_CANISTER_ID.get(), Some(custodian_subaccount)); let account_balance_response: Result<Tokens, String> = ledger .query_from_sender( "account_balance_pb", protobuf, AccountBalanceArgs { account: custodian_account, }, &Sender::from_keypair(custodian_key_pair), ) .await; let custodian_account_balance = account_balance_response.unwrap(); let expected_custodian_account_balance_after_donation = (custodian_account_balance + expected_custodian_account_balance_increase).unwrap(); // Have `test_identity` donate their neurons let donate_account_response: Result<Result<(), String>, String> = gtc .update_from_sender( "donate_account", candid_one, test_identity.public_key_hex.to_string(), &sender, ) .await; assert!(donate_account_response.unwrap().is_ok()); // Assert that `test_identity` has donated their neurons let get_account_response: Result<Result<AccountState, String>, String> = gtc .update_from_sender( "get_account", candid_one, test_identity.gtc_address.to_string(), &sender, ) .await; let account_after_donation = get_account_response.unwrap().unwrap(); assert!(account_after_donation.has_donated); assert_eq!( account_after_donation.authenticated_principal_id, Some(test_identity.principal_id()) ); assert_eq!( account_after_donation .successfully_transferred_neurons .len(), account_before_donation.neuron_ids.len(), ); // Assert that donated neurons can't be claimed let claim_neurons_response: Result<Result<Vec<NeuronId>, String>, String> = gtc .update_from_sender( "claim_neurons", candid, (test_identity.public_key_hex,), &sender, ) .await; assert!(claim_neurons_response.unwrap().is_err()); // Assert calling donate a second time fails let donate_account_response: Result<Result<(), String>, String> = gtc .update_from_sender( "donate_account", candid_one, test_identity.public_key_hex.to_string(), &sender, ) .await; assert!(donate_account_response.unwrap().is_err()); // Assert that the custodian neuron's ledger account has received the // donated funds let account_balance_response: Result<Tokens, String> = ledger .query_from_sender( "account_balance_pb", protobuf, AccountBalanceArgs { account: custodian_account, }, &Sender::from_keypair(custodian_key_pair), ) .await; let actual_custodian_account_balance_after_donation = account_balance_response.unwrap(); assert_eq!( expected_custodian_account_balance_after_donation, actual_custodian_account_balance_after_donation ); // Assert that the custodian neuron's stake matches its ledger account // balance let get_full_neuron_response: Result<Result<Neuron, GovernanceError>, String> = governance .update_from_sender( "get_full_neuron", candid_one, custodian_neuron_id.id, &Sender::from_keypair(custodian_key_pair), ) .await; let custodian_neuron = get_full_neuron_response.unwrap().unwrap(); let custodian_neuron_stake = Tokens::from_e8s(custodian_neuron.cached_neuron_stake_e8s); assert_eq!( custodian_neuron_stake, actual_custodian_account_balance_after_donation ); } /// Test that the given `test_identity` can claim their neurons, expected to /// be `expected_neuron_ids`. async fn assert_neurons_can_be_claimed( nns_canisters: &NnsCanisters<'_>, expected_neuron_ids: Vec<NeuronId>, test_identity: &'static TestIdentity, ) { let gtc = &nns_canisters.genesis_token; let governance = &nns_canisters.governance; let sign_cmd = move |msg: &[u8]| Ok(test_identity.sign(msg)); let sender = Sender::ExternalHsm { pub_key: der_encode(&test_identity.public_key()), sign: Arc::new(sign_cmd), }; // Assert that `test_identity` has not yet claimed their neurons let get_account_response: Result<Result<AccountState, String>, String> = gtc .update_from_sender( "get_account", candid_one, test_identity.gtc_address.to_string(), &sender, ) .await; assert!(!get_account_response.unwrap().unwrap().has_claimed); // Assert that `test_identity` does not control any neurons in the Governance // canister let get_neuron_ids_response: Result<Vec<u64>, String> = governance .update_from_sender("get_neuron_ids", candid, (), &sender) .await; assert!(get_neuron_ids_response.unwrap().is_empty()); // Given a sample neuron ID from `expected_neuron_ids`, assert that we can // can get this neuron's info via the `get_neuron_info` Governance method, // but `get_full_neuron` returns an error (as `test_identity` does not // controll the neuron yet) let sample_neuron_id = expected_neuron_ids.get(0).unwrap().id; let get_neuron_info_response: Result<Result<NeuronInfo, GovernanceError>, String> = governance .update_from_sender("get_neuron_info", candid_one, sample_neuron_id, &sender) .await; assert!(get_neuron_info_response.unwrap().is_ok()); let get_full_neuron_response: Result<Result<Neuron, GovernanceError>, String> = governance .update_from_sender("get_full_neuron", candid_one, sample_neuron_id, &sender) .await; assert!(get_full_neuron_response.unwrap().is_err()); // Call the GTC to claim neurons for `test_identity` let gtc_response: Result<Result<Vec<NeuronId>, String>, String> = gtc .update_from_sender( "claim_neurons", candid, (test_identity.public_key_hex,), &sender, ) .await; let returned_neuron_ids = gtc_response.unwrap().unwrap(); let get_neuron_ids_response: Result<Vec<u64>, String> = governance .update_from_sender("get_neuron_ids", candid, (), &sender) .await; let controlled_neuron_ids: Vec<NeuronId> = get_neuron_ids_response .unwrap() .into_iter() .map(|id| NeuronId { id }) .collect(); // Assert that the neuron IDs: // * returned by the GTC's `claim_neurons` method // * returned by the Governance's `get_neuron_ids` method // * given by `expected_neuron_ids` // all contain the exact same set of neuron IDs let returned_neuron_ids_set: HashSet<NeuronId> = returned_neuron_ids.iter().cloned().collect(); let expected_neuron_ids_set: HashSet<NeuronId> = expected_neuron_ids.iter().cloned().collect(); let controlled_neuron_ids_set: HashSet<NeuronId> = controlled_neuron_ids.iter().cloned().collect(); assert_eq!(returned_neuron_ids_set, expected_neuron_ids_set); assert_eq!(controlled_neuron_ids_set, expected_neuron_ids_set); // Assert that `test_identity` has now claimed their neurons let get_account_response: Result<Result<AccountState, String>, String> = gtc .update_from_sender( "get_account", candid_one, test_identity.gtc_address.to_string(), &sender, ) .await; assert!(get_account_response.unwrap().unwrap().has_claimed); // Assert that calling `get_full_neuron` with `sample_neuron_id` now // returns successfully, as `test_identity` now controls this neuron let governance_response: Result<Result<Neuron, GovernanceError>, String> = governance .update_from_sender("get_full_neuron", candid_one, sample_neuron_id, &sender) .await; let neuron = governance_response.unwrap().unwrap(); assert_eq!(neuron.controller, Some(test_identity.principal_id())); // Assert that calling `claim_neurons` a second time returns the same set // of neuron IDs let gtc_response_2: Result<Result<Vec<NeuronId>, String>, String> = gtc .update_from_sender( "claim_neurons", candid, (test_identity.public_key_hex,), &sender, ) .await; let returned_neuron_ids_2 = gtc_response_2.unwrap().unwrap(); let returned_neuron_ids_2_set: HashSet<NeuronId> = returned_neuron_ids_2.iter().cloned().collect(); assert_eq!(returned_neuron_ids_2_set, expected_neuron_ids_set); // Assert that `test_identity`'s principal has been set in their GTC account let get_account_response: Result<Result<AccountState, String>, String> = gtc .update_from_sender( "get_account", candid_one, test_identity.gtc_address.to_string(), &sender, ) .await; assert_eq!( get_account_response .unwrap() .unwrap() .authenticated_principal_id, Some(test_identity.principal_id()) ); // Assert that a claimed neuron is pre-aged let get_neuron_info_response: Result<Result<NeuronInfo, GovernanceError>, String> = governance .update_from_sender("get_neuron_info", candid_one, sample_neuron_id, &sender) .await; let neuron_info = get_neuron_info_response.unwrap().unwrap(); assert!(neuron_info.age_seconds >= 86400 * 18 * 30); } pub fn add_test_gtc_neurons(payload_builder: &mut NnsInitPayloadsBuilder) { payload_builder.genesis_token.genesis_timestamp_seconds = 1; payload_builder.genesis_token.sr_months_to_release = Some(SR_MONTHS_TO_RELEASE); payload_builder.genesis_token.ect_months_to_release = Some(ECT_MONTHS_TO_RELEASE); payload_builder .genesis_token .add_sr_neurons(TEST_SR_ACCOUNTS); payload_builder .genesis_token .add_ect_neurons(TEST_ECT_ACCOUNTS); payload_builder .governance .add_gtc_neurons(payload_builder.genesis_token.get_gtc_neurons()); payload_builder .genesis_token .add_forward_whitelist(&[TEST_IDENTITY_4.gtc_address]); payload_builder.governance.with_test_neurons(); } /// Return the neuron ID of the neuron that the GTC method `donate_account` /// should donate to. fn get_donate_account_recipient_neuron_id(payload_builder: &NnsInitPayloadsBuilder) -> NeuronId { let id = *payload_builder .governance .proto .neurons .iter() .find(|(_, neuron)| neuron.controller == Some(*TEST_NEURON_1_OWNER_PRINCIPAL)) .unwrap() .0; NeuronId { id } } /// Return the neuron ID of the neuron that the GTC method /// `forward_whitelisted_unclaimed_accounts` should donate to. fn get_forward_whitelisted_unclaimed_accounts_recipient_neuron_id( payload_builder: &NnsInitPayloadsBuilder, ) -> NeuronId { let id = *payload_builder .governance .proto .neurons .iter() .find(|(_, neuron)| neuron.controller == Some(*TEST_NEURON_2_OWNER_PRINCIPAL)) .unwrap() .0; NeuronId { id } }
# coding=utf-8 from flask import jsonify, g from . import api from .errors import bad_request, forbidden from .. import db from ..models import Comment from ..emails import send_comment_notification # noinspection PyShadowingBuiltins @api.route('/comments/<int:id>', methods=['PUT']) def approve_comment(id): comment = Comment.query.get_or_404(id) can_modify_requirements = ( not comment.talk.author == g.current_user, not g.current_user.is_admin) if all(can_modify_requirements): return forbidden('You cannot modify this comment.') if comment.approved: return bad_request('Comment is already approved.') comment.approved = True db.session.add(comment) db.session.commit() send_comment_notification(comment) return jsonify(dict(status='ok')) # noinspection PyShadowingBuiltins @api.route('/comments/<int:id>', methods=['DELETE']) def delete_comment(id): comment = Comment.query.get_or_404(id) if not comment.talk.author == g.current_user and not g.current_user.is_admin: return forbidden('You cannot modify this comment.') if comment.approved: return bad_request('Comment is already approved.') db.session.delete(comment) db.session.commit() return jsonify(dict(status='ok'))
<filename>Retirement Simulator/TaxBracketFormInfoCreator.h<gh_stars>0 // // TaxBracketFormInfoCreator.h // Retirement Simulator // // Created by <NAME> on 10/9/11. // Copyright 2011 __MyCompanyName__. All rights reserved. // #import <Foundation/Foundation.h> #import "FormInfoCreator.h" @class TaxBracket; @interface TaxBracketFormInfoCreator : NSObject <FormInfoCreator> { @private TaxBracket *taxBracket; BOOL isForNewObject; } @property(nonatomic,retain) TaxBracket *taxBracket; -(id)initWithTaxBracket:(TaxBracket *)theTaxBracket andIsForNewObject:(BOOL)bracketIsForNewObject; @end
<filename>src/components/Table/Table.types.ts import { Column, TableInstance, TableState, Row, TableOptions, TableHeaderProps, TableRowProps, TableCellProps, Cell, ColumnInstance, HeaderGroup, } from 'react-table'; export interface TableProps { data: object[]; columns: Column[]; pendingRequest?: boolean; emptyMessage?: string; showPagination?: boolean; totalItems: number; totalPages?: number; tableHash?: string; pageSize?: number; pageIndex?: number; pagesPerView?: number; autoResetPage?: boolean; autoResetExpanded?: boolean; onPaginationChanged?: (pageSize: number, pageIndex: number) => void; children?: (rows: Row[], table: TableInstance) => React.ReactNode; renderExpandedRow?: (row: Row<any>) => JSX.Element; getHeaderProps?: (column: HeaderGroup) => TableHeaderProps; getRowProps?: (row: Row<any>) => TableRowProps; getColumnProps?: (column: ColumnInstance) => TableCellProps; getCellProps?: (cell: Cell<any, any>) => TableCellProps; } export interface PaginatedTableState extends TableState { pageIndex: number; pageSize: number; } export interface PaginatedTableInstance extends TableInstance { page: Row[]; canPreviousPage: boolean; canNextPage: boolean; gotoPage: (page: number) => void; previousPage: () => void; nextPage: () => void; pageCount: number; setPageSize: (size: number) => void; state: PaginatedTableState; } export interface PaginatedTableOptions extends TableOptions<object> { manualPagination?: boolean; pageCount?: number; autoResetPage?: boolean; autoResetExpanded?: boolean; }
package io.iron.ironworker.client.builders; import io.iron.ironworker.client.APIException; import java.util.HashMap; import java.util.Map; public class Params { public static Map<String, Object> create(Object... os) throws APIException { if (os.length % 2 != 0) { throw new APIException("Odd params number", null); } Map<String, Object> params = new HashMap<String, Object>(); for (int i = 0; i < os.length; i += 2) { params.put(os[i].toString(), os[i + 1]); } return params; } public static ParamsObject add(String key, Object value) { return (new ParamsObject()).add(key, value); } protected Params() { } }
Research of loss detection of optic path for laser ignition application We present some different kinds of the loss detection technologies of optic path for laser ignition application according to the recommend of reliability and security of the laser ignition system, such as single wavelength and dual wavelength. The factors of loss detection technology are discussed. The difficulty and uptrend of the laser ignition system are pointed out in this paper. The correlation research will be focused on the reliability of optic parts, applicability of environment and special fiber in the future.
import React from 'react' import ReactDOM from 'react-dom' import { Spin } from 'antd' import { SpinProps } from 'antd/es/spin' import { isHidden } from '~/utils' import { Content } from './styles' export const Loading: React.FC<SpinProps> = props => ( <Content> <Spin size="large" tip="数据加载中..." {...props} /> </Content> ) let dom: HTMLElement | null const GlobalLoading = { open(props: React.ComponentProps<typeof Spin> = {}): void { if (!dom) { dom = document.createElement('div') ReactDOM.render(<Loading {...props} />, dom) document.body.appendChild(dom) } if (isHidden(dom)) { dom.style.display = '' } }, close(): void { dom!.style.display = 'none' }, remove(): void { ReactDOM.unmountComponentAtNode(dom!) document.body.removeChild(dom!) dom = null } } export default GlobalLoading
Brief Reports Study on Early Retirement Decision * Early retirement1 has become an increasingly important phenomenon in America. . . . And, there have been substantial improvements in retirement-income-maintenance programs in recent years, improvements which have not been available to the great majority of previous early retirees. Increases in OASDHI benefits have been significant, if not spectacular; perhaps more important has been t,he spread of private pension plans, many of which contain more or less comprehensive early retirement provisions. In parGcular, substantially liberalized early retirement benefits were negotiated during the fall of 1964 by the International Union-UAW and various companies in the automobile and agricultural implement industries ; under the new agreement an auto worker could retire as early as age 60 with a monthly pension of up to $400. With this development as a major impetus, and with the belief that “a study of . . . the circumstances that favor or oppose early retirement is greatly needed in order to predict future trends and to assess their impact on the economy and the well-being of millions of people,“2 the
/** * Asynchronous rollback of the transaction. * * @param callback async completion callback */ public void rollback(AsyncCompletionCallback callback) { try { verifyTransactionState(); } catch (QueueTransactionClosedException e) { callback.setException(e); callback.done(false); return; } setClosed(true); abstractQueue.rollback(transactionId, false, callback); }
/** * A reference which will always retrieve the latest group from the appropriate group store. * * <p>Does not hold a reference to the group. */ public class DefaultGroupReference extends AbstractGroupReference { private final Item item; private final String groupId; /** New reference either from another reference or manually created. */ public DefaultGroupReference(Item item, String groupId) { this.item = item; this.groupId = groupId; } /** Create a reference from a group instance. */ public static DefaultGroupReference to(Item item, Group group) { return new DefaultGroupReference(item, group.getId()); } @Override public String getGroupId() { return groupId; } @Override public Optional<Group> toGroup() { return item.getGroups().getById(groupId); } }
/* This function must be called after an update to server <srv>'s effective * weight. It may be called after a state change too. */ static void fas_update_server_weight(struct server *srv) { int old_state, new_state; struct proxy *p = srv->proxy; if (srv->state == srv->prev_state && srv->eweight == srv->prev_eweight) return; old_state = srv_is_usable(srv->prev_state, srv->prev_eweight); new_state = srv_is_usable(srv->state, srv->eweight); if (!old_state && !new_state) { srv->prev_state = srv->state; srv->prev_eweight = srv->eweight; return; } else if (!old_state && new_state) { fas_set_server_status_up(srv); return; } else if (old_state && !new_state) { fas_set_server_status_down(srv); return; } if (srv->lb_tree) fas_dequeue_srv(srv); if (srv->state & SRV_BACKUP) { p->lbprm.tot_wbck += srv->eweight - srv->prev_eweight; srv->lb_tree = &p->lbprm.fas.bck; } else { p->lbprm.tot_wact += srv->eweight - srv->prev_eweight; srv->lb_tree = &p->lbprm.fas.act; } fas_queue_srv(srv); update_backend_weight(p); srv->prev_state = srv->state; srv->prev_eweight = srv->eweight; }
/** * Static methods to manipulate the f-x-y descriptors * * @author caron * @since Oct 25, 2008 */ public class Descriptor { public static String makeString(short fxy) { int f = (fxy & 0xC000) >> 14; int x = (fxy & 0x3F00) >> 8; int y = fxy & 0xFF; return makeString(f, x, y); } public static String makeString(int f, int x, int y) { return String.format("%d-%d-%d", f, x, y); } public static boolean isWmoRange(short fxy) { int x = (fxy & 0x3F00) >> 8; int y = fxy & 0xFF; return (x < 48 && y < 192); } public static short getFxy(String name) { String[] tok = name.split("-"); int f = (tok.length > 0) ? Integer.parseInt(tok[0]) : 0; int x = (tok.length > 1) ? Integer.parseInt(tok[1]) : 0; int y = (tok.length > 2) ? Integer.parseInt(tok[2]) : 0; return (short) ((f << 14) + (x << 8) + (y)); } public static short getFxy2(String fxxyyy) { int fxy = Integer.parseInt(fxxyyy.trim()); int y = fxy % 1000; fxy /= 1000; int x = fxy % 100; int f1 = fxy / 100; return (short) ((f1 << 14) + (x << 8) + (y)); } // contains a BUFR table entry public static boolean isBufrTable(short fxy) { int f = (fxy & 0xC000) >> 14; int x = (fxy & 0x3F00) >> 8; int y = (fxy & 0xFF); return (f == 0) && (x == 0) && (y < 13); } public static short getFxy(short f, short x, short y) { return (short) ((f << 14) + (x << 8) + (y)); } private static final String[] descType = {"tableB", "replication", "tableC-operators", "tableD"}; public static void show(Formatter out, short fxy, BufrTableLookup lookup) { int f = (fxy & 0xC000) >> 14; if (f == 0) { TableB.Descriptor b = lookup.getDescriptorTableB(fxy); if (b == null) out.format("%-8s: NOT FOUND!!", makeString(fxy)); else out.format("%-8s: %s", b.getFxy(), b.getName()); } else if (f == 1) { out.format("%-8s: %s", makeString(fxy), descType[1]); } else if (f == 2) { int x = (fxy & 0x3F00) >> 8; out.format("%-8s: Operator= %s", makeString(fxy), TableC.getOperatorName(x)); } else if (f == 3) { TableD.Descriptor d = lookup.getDescriptorTableD(fxy); if (d == null) out.format("%-8s: NOT FOUND!!", makeString(fxy)); else out.format("%-8s: %s", d.getFxy(), d.getName()); } } public static String getName(short fxy, BufrTableLookup lookup) { int f = (fxy & 0xC000) >> 14; if (f == 0) { TableB.Descriptor b = lookup.getDescriptorTableB(fxy); if (b == null) return ("**NOT FOUND!!"); else return b.getName(); } else if (f == 1) { return descType[1]; } else if (f == 2) { int x = (fxy & 0x3F00) >> 8; return TableC.getOperatorName(x); } else if (f == 3) { TableD.Descriptor d = lookup.getDescriptorTableD(fxy); if (d == null) return "**NOT FOUND!!"; else return d.getName(); } return "illegal F=" + f; } }
<reponame>ifinanceCanada/edx-repo-health from pathlib import Path import pytest from repo_health import get_file_content from repo_health.check_setup_py import check_pypi_name, module_dict_key FAKE_REPO_ROOT = Path(__file__).parent / "fake_repos" @pytest.mark.parametrize("fake_repo, pypi_name", [ ("kodegail", "kodegail"), ("just_setup_py", "some_other_pypi_name"), ("just_setup_cfg", "setup_cfg_package"), ("docs_repo", None), ]) def test_check_pypi_name(fake_repo, pypi_name): setup_py = get_file_content(FAKE_REPO_ROOT / fake_repo / "setup.py") setup_cfg = get_file_content(FAKE_REPO_ROOT / fake_repo / "setup.cfg") all_results = {module_dict_key: {}} check_pypi_name(setup_py, setup_cfg, all_results) if pypi_name is not None: assert all_results[module_dict_key]["pypi_name"] == pypi_name else: assert "pypi_name" not in all_results[module_dict_key]
export { ProductAttributes, ProductAttributesProps } from './base/ProductAttributes/ProductAttributes'; export { Breadcrumbs, BreadcrumbsProps } from './base/Breadcrumbs/Breadcrumbs'; export { ProductReviews, ProductReviewsProps } from './base/ProductReviews/ProductReviews'; export { ProductActions, ProductActionsProps } from './base/ProductActions/ProductActions'; export { ProductGallery, ProductGalleryProps } from './base/ProductGallery/ProductGallery'; export { ProductCard, ProductCardProps } from './base/ProductCard/ProductCard'; export { CategoryList, CategoryListProps } from './base/CategoryList/CategoryList'; export { CategorySort, CategorySortProps } from './base/CategorySort/CategorySort'; export { CategoryFilter, CategoryFilterProps } from './base/CategoryFilter/CategoryFilter'; export { ProductSearch, ProductSearchProps } from './base/ProductSearch/ProductSearch'; export { Wishlist, WishlistProps } from './base/Wishlist/Wishlist'; export { ViewedItems, ViewedItemsProps } from './base/ViewedItems/ViewedItems'; export { CurrencySwitch, CurrencySwitchProps } from './base/CurrencySwitch/CurrencySwitch'; export { CartList, CartListProps } from './base/CartList/CartList'; export { Checkout, CheckoutProps, CheckoutFieldConfig } from './base/Checkout/Checkout'; export { DefaultCheckoutFields, CheckoutFieldProps } from './base/Checkout/DefaultElements'; export { AccountInfo, AccountInfoProps, AccountFieldConfig, AccountFieldProps } from './base/AccountInfo/AccountInfo'; export { DefaultAccountFields } from './base/AccountInfo/DefaultElements'; export { AccountOrders } from './base/AccountOrders/AccountOrders'; export { MuiPagination, MuiProductReviews, MuiProductAttributes, MuiBreadcrumbs, MuiProductActions, MuiProductCard, MuiCategoryList, MuiCategorySort, MuiProductSearch, MuiViewedItems, MuiWishlist, MuiCurrencySwitch, MuiCartList, MuiCheckout, MuiAccountInfo, MuiAccountOrders, } from './mui/index'; export { moduleState, useModuleState } from './helpers/state'; export { useProductVariants } from './helpers/useProductVariants'; export { notifier } from './helpers/notifier'; export { muiNotifier } from './mui/Notifier/Notifier';
package conll; import static org.junit.Assert.*; import org.junit.*; public class TokenTest { private static final String INPUT_LINE = "10\ttror\t_\tV\tVA\tmood=indic|tense=present|voice=active\t_\t_\t_\t_"; private Token token = null; @Before public void setUp() { token = createTestToken(); } static Token createTestToken() { return new Token(INPUT_LINE); } @Test public void stringRepresantation() { assertEquals("Wrong String representation.", INPUT_LINE, token.toString()); } @Test public void createTokenFromTooLessFields() { String argumentLine = "10\ttror\t_\tV\tVA\tmood=indic|tense=present|voice=active"; String correctLine = argumentLine + "\t_\t_\t_\t_"; token = new Token(argumentLine); assertEquals("Wrong String representation.", correctLine, token.toString()); } @Test (expected=IllegalArgumentException.class) public void createTokenFromEmptyArray() { token = new Token(""); } @Test (expected=NullPointerException.class) public void createTokenFromNullArray() { token = new Token(null); } @Test public void emptyToken() { Token empty = Token.createEmptyToken(); assertEquals("Empty token has wrong contents.", "0\t_\t_\t_\t_\t_\t_\t_\t_\t_", empty.toString()); } @Test public void idAccess() { int newId = token.getId() + 1; token.setId(newId); assertEquals("Got wrong value after change.", newId, token.getId()); } @Test public void featureAdding() { token = Token.createEmptyToken(); String firstFeature = "word=apple"; token.addFeature(firstFeature); assertEquals("Feature was not added.", "0\t_\t_\t_\t_\t" + firstFeature + "\t_\t_\t_\t_", token.toString()); String secondFeature = "cat=NN"; token.addFeature(secondFeature); assertEquals("Feature was not added.", "0\t_\t_\t_\t_\t" + firstFeature + "|" + secondFeature + "\t_\t_\t_\t_", token.toString()); } @Test public void copyToken() { Token copy = new Token(token.toString()); assertEquals("Copying gone wrong.", token.toString(), copy.toString()); } }
""" Copyright 2019 BBC. Licensed under the terms of the Apache License 2.0. """ from unittest.mock import Mock import pytest from google.cloud.bigquery import Client from foxglove.connectors.bigquery import BigQueryConnector @pytest.fixture def fake_bq_client(): return Mock(spec=Client(project='test_project')) @pytest.mark.integration def test_valid_bigquery_connector_init(): connector = BigQueryConnector( 'test_dataset_id', 'test_table_id', 'test_role' ) assert connector.bq_dataset_id assert connector.bq_table_id assert connector.bq_client @pytest.mark.integration def test_write_truncate_ndjson_file(fake_bq_client): connector = BigQueryConnector( 'test_dataset_id', 'test_table_id', 'test_role' ) connector.bq_client = fake_bq_client connector.write_truncate_ndjson_file('test_ndjson_fh') fake_bq_client.load_table_from_file.assert_called_with( file_obj='test_ndjson_fh', destination=connector._bq_table, job_config=connector._job_config ) @pytest.mark.integration def test_bq_table(fake_bq_client): connector = BigQueryConnector( 'test_dataset_id', 'test_table_id', 'test_role' ) connector.bq_client = fake_bq_client _ = connector._bq_table() connector._bq_dataset.table.assert_called_once() @pytest.mark.integration def test_bq_dataset(fake_bq_client): connector = BigQueryConnector( 'test_dataset_id', 'test_table_id', 'test_role' ) connector.bq_client = fake_bq_client _ = connector._bq_dataset() fake_bq_client.create_dataset.assert_called_once() def test_bigquery_engine_url_decode(): engine_url='bigquery://projectId=my_project;datasetId=nice_food;tableId=cakes;' connector = BigQueryConnector(engine_url=engine_url) project, dataset, table = connector._decode_engine_url() assert project == 'my_project' assert dataset == 'nice_food' assert table == 'cakes' @pytest.mark.integration def test_sql_query_with_params(): engine_url='bigquery://projectId=bbc-datalab;datasetId=foxglove_test;tableId=rms_titles;' connector = BigQueryConnector(engine_url=engine_url) # check known value in sample data sql = "SELECT id FROM `bbc-datalab.foxglove_test.rms_titles` WHERE pid=@my_pid" for row in connector.query(sql=sql, sql_params=[("my_pid", "STRING", "b01qw8tz")]): assert row.id == 1
<filename>preprocessor.hpp #ifndef __SCHEME_PRE #define __SCHEME_PRE #include <string> #include <vector> #include <iosfwd> class SchemeUnit { private: bool inComment; enum MultilineCommentStatus { Neutral, CommentStart, CommentEnd }; void stripSemiColon(std::string& line); MultilineCommentStatus processMultilineComment( std::string& line); public: SchemeUnit(); SchemeUnit(std::istream& schemeStream); std::vector<std::string> lines; void preprocess(std::istream& schemeStream); }; //dsf #endif
Scientainment for Sustainability: The Eco-Confessional as a New Approach for Life Cycle Thinking For educating a wide audience on the environmental impact of their daily life decisions, the Eco-Confessional has been developed as an interactive exhibit and a serious game. In this, the effectiveness of promoting sustainable lifestyles through life cycle thinking was explored. Based upon life cycle assessments, the eco-confessional reveals the environmental impact of eco-sins and good deeds performed in everyday life in a playful way. As the Eco-Confessional was being exhibited, it was evaluated to measure the impact of the new communication format. The goal of the project was to increase sustainable behavior by influencing both the desire and the ability to make sustainable decisions. The evaluation revealed that these goals were mostly achieved. By combining life cycle data with gamification elements, the Eco-Confessional succeeded in implementing a new scientainment approach to environmental education, which promotes life cycle thinking among the public.
Feature Articles: A Relativistic Symmetry in Nuclei More than thirty years ago it was observed that certain quantum energy levels in atomic nuclei were almost degenerate in energy . The states that are almost degenerate (quasi-degenerate) have different radial quantum numbers and different orbital angular momenta, features that made the reason for their degeneracy difficult to penetrate.
Advertisement Police say investigation shows child did not have cancer Share Shares Copy Link Copy Atlantic police said a mother has been charged after claiming her 5-year-old daughter had cancer, which she did not, and using the claim to raise money for the family.Watch video of this storyLeatha Kaye Slauson, 30, of Atlantic, was charged with child endangerment and a drug charge for giving the girl cannabis oil.Police said they started investigating the case after a report of possible child abuse was received by the department about 3 p.m. Thursday. Authorities said school officials looking into what special care the girl would need in part uncovered questions about medical contacts that didn't seem to pan out.The Atlantic Police Department, Cass County Sheriff's Office, Cass County Attorney and Iowa Department of Human Services are involved in the investigation.Police said the preliminary investigation showed Slauson was claiming her daughter suffered from cancer, and in recent months that the child was terminal.Police said the child was found to not be suffering from any life-threatening illness.They said Slauson did give her daughter cannabis oils.Slauson was arrested, charged and taken to the Cass County Jail. Slauson was the only person involved in the matter, police said.Police said in the past year she raised money from individuals and organizations that included a trip to Walt Disney World for the Super Riley Fund.Slauson told KETV-TV in Omaha last spring that doctors diagnosed 5-year-old Riley with colorectal cancer in September, 2013."(The cancer) is just spreading too fast," said Leatha Slauson, during an April 26 interview with KETV. "It spread to her liver and the last MRI (showed) it spread to her lymph nodes and they stopped chemotherapy."The story involved Riley who wanted to be a firefighter getting to ride in an Atlantic firetruck, which was on her bucket list.Authorities are asking the community at this time to stop donations to the fund. Cass County Attorney Dan Feistner said so far they don't know a total amount that was donated to the family."I don't think the hearts of this community will be hardened by this," said Police Chief Steve Green. He said the community has always helped its members and he doesn't expect this case will change that.Feistner said the family has four other children that are now being cared for by the father.Feistner said the father was very upset after learning about this and they believe he did not know anything about it. He was "struggling with how to proceed in his life" after learning the news.Look for more on this story coming up on KCCI-TV, KCCI.com, our Facebook page, mobile website m.kcci.com and apps.
// FindByPodName returns a map of DanmEps which belong to the same Pod in a given namespace // If no Pod name is provided, function returns all DanmEps func FindByPodName(client danmclientset.Interface, podName, ns string) ([]danmtypes.DanmEp, error) { result, err := client.DanmV1().DanmEps(ns).List(meta_v1.ListOptions{}) if err != nil { return nil, errors.New("cannot list DanmEps because:" + err.Error()) } ret := make([]danmtypes.DanmEp, 0) if result == nil { return ret, nil } eplist := result.Items for _, ep := range eplist { if podName != "" && ep.Spec.Pod != podName { continue } ret = append(ret, ep) } return ret, nil }
/* * A class that builds loadouts and Items. */ package engine.entities.items; /** * * @author Christopher */ public class InventoryBuilder { //For creating loadouts, I will likely reference http://en.wikipedia.org/wiki/Equipment_of_the_United_States_Army public static Inventory buildAH64ApacheLoadout(){ Inventory inv = new Inventory(null); //Insert Rocket Launcher implementation here //Gun - M230 inv.addItem(new Weapon(55.9, "M230", new Resource("30x113mm",1200),805,0.4,1,0.096,8.1)); return inv; } public static Inventory buildInfantryLoadout(){ Inventory inv = new Inventory(null); //Primary Weapon - M4A1 inv.addItem(new Weapon(2.88, "M4A1", new Resource("5.56x45mm NATO",300),880,0.004,0.1,0.075,0.6)); //Sidearm - M9 inv.addItem(new Weapon(0.952, "M9", new Resource("9x19mm Parabellum",120),381,0.0075,0.1,0.5,0.6)); return inv; } public static Inventory buildSniperLoadout(){ Inventory inv = new Inventory(null); //Primary Weapon - M4A1 inv.addItem(new Weapon(13.5, "M107", new Resource(".50 BMG",50),853,0.045,.5,1,0.6)); //Sidearm - M9 inv.addItem(new Weapon(0.952, "M9", new Resource("9x19mm Parabellum",120),381,0.0075,0.1,0.5,0.6)); return inv; } public static Inventory buildDemolitionsLoadout(){ Inventory inv = new Inventory(null); //Primary Weapon - AT4 inv.addItem(new RocketLauncher(6.7 , "AT4", new Resource("HEDP 502", 5), 290, 2.5, 200, 5, 0.015, 250)); //Sidearm - M9 inv.addItem(new Weapon(0.952, "M9", new Resource("9x19mm Parabellum",120),381,0.0075,0.1,0.5,0.6)); return inv; } public static Inventory buildHumveeLoadout(){ Inventory inv = new Inventory(null); //Weapon - M134 Minigun inv.addItem(new Weapon(38.5, "M134", new Resource("7.62x51mm NATO",5000),853,0.01,0.1,0.015,2.1)); return inv; } public static Inventory buildM1AbramsLoadout(){ Inventory inv = new Inventory(null); //Main Cannon - 120mm Gun inv.addItem(new Weapon(1190, "Rheinmetall 120mm Gun", new Resource("120mm Shell",40),1700,5,500,10,3.6)); //Secondary - M2 inv.addItem(new Weapon(17, "M2", new Resource(".50 BMG",50),890,0.045,0.1,0.11,3.6)); return inv; } public static Inventory buildStrykerLoadout(){ Inventory inv = new Inventory(null); //Main Cannon - M68A2 inv.addItem(new Weapon(1282, "M68A2", new Resource("105mm Shell",40),1250,4,50,6,3.6)); //Secondary - M240 inv.addItem(new Weapon(11.5, "M240", new Resource("7.62x51mm NATO",1000),890,0.01,0.1,0.071,3.6)); return inv; } public static Inventory buildUH60BlackHawkLoadout(){ Inventory inv = new Inventory(null); //Weapon - M134 Minigun (It is supposed to have two, but I added one to reduce data usage) inv.addItem(new Weapon(38.5, "M134", new Resource("7.62x51mm NATO",6000),853,0.01,0.1,0.015,2.1)); return inv; } public static Inventory buildEmptyLoadout(){ Inventory inv = new Inventory(null); return inv; } }
/** * Creates a form for specified identifier filled by data present in entity * @param entity Entity that contains form data * @param identifier Form identifier * @return A FormResponse of created form */ public static SimpleResponse createFormResponse(Entity entity, String identifier) { FormResponse response = new FormResponse(); try { Form form = FormBuilder.instance().build(identifier); form.setEditMode(true); form.setData(toMap(identifier, entity)); response.setContent(form.writeToString()); response.setError(false); } catch (FormCreationException e) { e.printStackTrace(); response.setError(true); response.setMessage("Error creating form: " + e.getMessage()); } catch (CrudConfigurationException e) { e.printStackTrace(); response.setError(true); response.setMessage("Crud configuration error: " + e.getMessage()); } catch (FormProcessException e) { e.printStackTrace(); response.setError(true); response.setMessage("Error processing form: " + e.getMessage()); } return response; }
Indian cities are not particularly known for their cleanliness and it is not the first time that someone has posted a picture of the filth on social media. Yet when Tavleen Singh posted a picture of garbage on her Twitter timeline, all hell broke loose. The picture didn’t present any unusual sight but in the eyes of Twitter trolls, Tavleen Singh had committed nothing short of sacrilege. This was not an ordinary picture of random roadside garbage. The picture was from BJP ruled Uttar Pradesh. And of all the cities in UP, it was from Gorakhpur, the hometown of Chief Minister, Yogi Adityanath. And of all the places in Gorakhpur, it was taken outside Adityanath’s magnificent Gaushala. Yes, the same cow shelter which journalists visit to write flattering pieces on how the CM fed the cows and how a certain calf came running on seeing him. As if this was not enough, Tavleen had also tweeted the previous day, “In Gorakhpur today and what a wretched, filthy little town it is. Yogi’s influence seems limited to his temple and its environs.” That was enough provocation for trolls to descend on her timeline. At the time of writing this article, there were close to 400 replies to her posts. So varied were responses that this is a perfect case study to identify the types of responses that Modi and Yogi supporters come up to any perceived insult. Some of these are pure hilarious. Enjoy… Disbelief and denial There was utter disbelief among certain quarters. Even though garbage on the sidewalks is a common sight in India, how could this be true of Gorakhpur? “Please prove it. This pic could have been taken anywhere” “Madam don’t put fake photos.. Have u visited the site..?” “Please provide the real picture with background of Gaushala not Photoshoped” Ulterior motive The conspiracy theorists were convinced about a dark ulterior motive behind posting the pic. Ah! she must be out to get Yogi. Sigh! what else can you expect from “presstitutes”. “Day 2 – Operation Defame Yogi” “She has nothing to do with cleanliness. It is all abt defame yogi now as defame modi didnt work. She is not alone. Now dhume will RT” ’60 years of Congress’ How can any trolling be complete without a mention of 60 years of Congress? Never mind that Adityanath is five times elected MP from Gorakhpur. It was what about Mulayam Singh Yadav and Mayavati and Akhilesh….Where were you, huh? Blind, deaf, mute to everything? “Surprised you were blind deaf and mute when Mullayam and his goons destroyed the state , so did Mayawati . How sold are u?” “Why was no such comments made during SP or BSP rule”. “Yeah sure… filthiness just started last couple of years .. Where was ur demand for the last 70 years?” Whatabout, whataboutery? Yes, yes, it’s coming. Whataboutery can never be far behind in any argument. it was all the way a predictable reaction. “Which town in UP is not a filthy, wretched town? And why just the towns please go to large cities too.” “Madam amethi ja k dekho, gorakhpur paris sa dikhega apko” Sarcasm Some attempted sarcasm. After all criticism of Adityanath had to be countered by any means. “Agree he must go and clean every toilet in gorakhpur. He is a bad CM…” “U travelled all the way to Gorakhpur for this snapshot? Cld hve got it near redfort in ur Delhi” Insane logic Try countering this logic, “presstitutes” “Then how come he was never defeated in Gorakhpur??” Personal attack Those who were without any argument and perhaps had a limited vocabulary launched a personal attack. In their minds, they thought, so what if the city is filthy, journalists are filthier, “Why don’t you start by cleaning the filth in your brain or atleast share some ideas if your senile mind still works!” “Saale dallo !! Presstitutes yu hi nahi kahte tumko.” “And some filthiest journalists too….” Gotcha! This one was a pure gem. One person even found a found a deep dark secret way back from 2007. He had a newspaper link to prove it too. How can someone who got into a spat about not picking up litter ten years ago dare to talk about garbage now? There was a race to prove why she was not entitled to speak on the topic. “Says #TavleenSingh doesn’t clean up after her dogs defecate on #Mumbai’s streets and who refuses to pay fines” An article from 2007 was attached to prove the point. Hats off! “Arm Chair commentators of public causes well distanced & insulated from the operational exigencies. Care for a week in a Municipality?” This was becoming an unending saga of Modi and Yogi fan clubs against Tavleen Singh till finally, Sadanand Dhume provided a welcome distraction for the trolls with his tweet “Sharing this just so that I can learn about all the ways in which this absolutely, definitely, certainly doesn’t reflect on Yogi Adityanath.” Shekhar Gupta too put in his views about Gorakhpur, saying, “Gorakhpur is the filthiest, epidemic-hit open sewer of a city. You want to see what ails the heartland, come here. @tavleen_singh pic is apt”. . He later gave space in his column to the city he called the “the capital,of the eastern and most hopeless zone of UP“. Tavleen Singh tried responding to some of the trolls individually followed by a tweet to explain that a demand for urban waste management doesn’t conflict with nationalism. It is nationalism. Understandably, after a spate of photo-shopped pictures on WhatsApp, a real picture may have appeared shocking enough to elicit these hilarious reactions. At the same time, it does leave one wondering what the reactions would have been if this was a picture from Bengal or a picture showcasing some development in Gorakhpur. Laughs aside, this brings us to the important question as to why do Modi supporters get so rattled at the slightest hint of criticism? However hilarious their responses may seem, by pouncing on critics, the army of trolls is contributing to a shrinking public space for dissent. Shouldn’t those who voted a party to power also take the lead in holding it to account. They are busy instead in silencing the critics. Donate to Alt News! Independent journalism that speaks truth to power and is free of corporate and political control is possible only when people start contributing towards the same. Please consider donating towards this endeavour to fight fake news and misinformation. Donate Now To make an instant donation, click on the "Donate Now" button above. For information regarding donation via Bank Transfer/Cheque/DD, click here. You could follow Alt News posts either via our Facebook page or by following us on Twitter or by subscribing to our E-mail updates.
<gh_stars>10-100 package engineering.everest.lhotse.api.rest.responses; import lombok.Builder; import lombok.Data; import org.springframework.http.HttpStatus; import java.time.Instant; @Data @Builder public class ApiErrorResponse { private final HttpStatus status; private final String message; private final Instant timestamp; }
def post_event_sources_log_source_management_log_source_types_by_id(self, id, *, log_source_type_data, fields=None, **kwargs): function_endpoint = urljoin(self._baseurl, 'event_sources/log_source_management/log_source_types/{id}'.format(id=id)) return self._call('POST', function_endpoint, json=log_source_type_data, **kwargs)
<reponame>allenyinx/ActionAgent<filename>src/main/java/com/airta/action/agent/context/WebdriverInitializr.java<gh_stars>0 package com.airta.action.agent.context; import com.airta.action.agent.config.DriverConfig; import com.airta.action.agent.utility.parser.HtmlParser; import com.airta.action.agent.utility.WebDriverStart; import com.airta.action.agent.webdriver.WebDriverState; import org.openqa.selenium.WebDriver; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.context.event.ApplicationReadyEvent; import org.springframework.context.ApplicationListener; import org.springframework.stereotype.Component; import javax.servlet.ServletContext; @Component @SuppressWarnings("SpringJavaAutowiringInspection") public class WebdriverInitializr implements ApplicationListener<ApplicationReadyEvent> { private static final Logger log = LoggerFactory.getLogger(WebdriverInitializr.class); private WebDriver webDriver = null; @SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection") @Autowired ServletContext servletContext; @Value("${agent.init}") private boolean initAgentWhenStartup; @Value("${agent.entry}") private String agentStartEntryPage; private HtmlParser htmlParser = new HtmlParser(); @Override public void onApplicationEvent(ApplicationReadyEvent applicationReadyEvent) { log.info("## WebdriverInitializr initialization logic ..."); if(initAgentWhenStartup) { initWebDriver(applicationReadyEvent); } } private void initWebDriver(ApplicationReadyEvent applicationReadyEvent) { webDriver = WebDriverStart.browserEntry(agentStartEntryPage); String entryPageSource = ""; try { entryPageSource = webDriver.getPageSource(); } catch (Exception e) { log.error(e.getLocalizedMessage()); try { entryPageSource = webDriver.getPageSource(); } catch (Exception e1) { log.error(e1.getLocalizedMessage()); entryPageSource = webDriver.getPageSource(); } } log.info("## Initialized webDriver session {}", entryPageSource.length()); log.info("## Fetch embedded children links {}", htmlParser.parseChildLinks(entryPageSource).size()); servletContext.setAttribute(DriverConfig.WebDriverSessionKey, webDriver); servletContext.setAttribute(DriverConfig.WebDriverSessionStatus, WebDriverState.INIT); } }
Head Fusion: Improving the Accuracy and Robustness of Speech Emotion Recognition on the IEMOCAP and RAVDESS Dataset Speech Emotion Recognition (SER) refers to the use of machines to recognize the emotions of a speaker from his (or her) speech. SER benefits Human-Computer Interaction(HCI). But there are still many problems in SER research, e.g., the lack of high-quality data, insufficient model accuracy, little research under noisy environments, etc. In this paper, we proposed a method called Head Fusion based on the multi-head attention mechanism to improve the accuracy of SER. We implemented an attention-based convolutional neural network(ACNN) model and conducted experiments on the Interactive Emotional Dyadic Motion Capture (IEMOCAP) data set. The accuracy is improved to 76.18% (weighted accuracy, WA) and 76.36% (unweighted accuracy, UA). To the best of our knowledge, compared with the state-of-the-art result on this dataset (76.4% of WA and 70.1% of WA), we achieved a UA improvement of about 6% absolute while achieving a similar WA. Furthermore, We conducted empirical experiments by injecting speech data with 50 types of common noises. We inject the noises by altering the noise intensity, time-shifting the noises, and mixing different noise types, to identify their varied impacts on the SER accuracy and verify the robustness of our model. This work will also help researchers and engineers properly add their training data by using speech data with the appropriate types of noises to alleviate the problem of insufficient high-quality data.
import common import music_queue import albums @route('/music/music/genres_menu') def GetGenresMenu(title): oc = ObjectContainer(title2=unicode(L(title))) oc.add(DirectoryObject( key=Callback(HandleMusicGenres, title=L('All Genres')), title=unicode(L('All Genres')) )) oc.add(DirectoryObject( key=Callback(music_queue.GetQueue, filter='genre__in', title=L('Favorite Genres')), title=unicode(L('Favorite Genres')) )) common.add_search_music(oc) return oc @route('/music/music/music_genres') def HandleMusicGenres(title): oc = ObjectContainer() response = service.get_genres(limit=0) count = response['meta']['total_count'] oc.title2 = unicode(L(title)) + ' (' + str(count) + ')' for media in response['objects']: id = media['id'] title = media['title'] thumb = 'thumb' key = Callback(HandleMusicGenre, title=title, thumb=thumb, genre__in=id) oc.add(DirectoryObject(key=key, title=unicode(title), thumb=thumb)) common.add_search_music(oc) return oc @route('/music/music/music_genre') def HandleMusicGenre(title, genre__in, thumb): oc = ObjectContainer(title2=unicode(L(title))) key = Callback(albums.HandleAlbums, title=title, genre__in=genre__in) oc.add(DirectoryObject(key=key, title=unicode(title))) music_queue.append_controls(oc, name=title, thumb=thumb, genre__in=genre__in) common.add_search_music(oc) return oc
// wap to know which fonts are available in a local systems class Fonts { public static void main(String[] args) { GraphicsEnvironment ge= GraphicsEnvironment.getLocalGraphicsEnvironment(); String fonts[] =ge.getAvailableFontFamilyNames(); System.out.println("Available fonts on this System: "); for(int i=0; i<fonts.length; i++) System.out.println(fonts[i]); } }
The Americleft Project: A Comparison of Short- and Longer-Term Secondary Alveolar Bone Graft Outcomes in Two Centers Using the Standardized Way to Assess Grafts Scale Objective To compare length of follow-up and cleft site dental management on bone graft ratings from two centers. Design Blind retrospective analysis of cleft site radiographs and chart reviews for determination of cleft-site lateral incisor management. Patients A total of 78 consecutively grafted patients with complete clefts from two major cleft/craniofacial centers (43 from Center 1 and 35 from Center 2). Interventions Secondary iliac crest alveolar bone grafting, at a mean age of 9 years 9 months (Center 1: 9 years 7 months; Center 2: 10 years 0 month). Main Outcome Measures The Americleft Standardized Way to Assess Grafts scale from 0 (failed graft) to 6 (ideal) was used to rate graft outcome at two time points (T1, T2). Average T1 was 11 years 1 month of age, 1 year 3 months postgraft. Average T2 was 17 years 11 months of age, 8 years 0 months postgraft. Six trained and calibrated raters scored each radiograph twice. Reliability was calculated at T1 and T2 using weighted kappa. A paired Wilcoxon signed rank test (P < .05) tested T1 and T2 differences for each center. A Kruskal-Wallis test was used to determine the significance of differences between centers at T1 and T2. Correlation tested whether T1 ratings predicted T2. Linear regression determined possible factors that might contribute to graft rating changes over time. Results Reliability was good at T1 and T2 (interrater = .713 and .701, respectively; intrarater = .790 and .805, respectively). Center 1 scores were significantly better than those from Center 2 at both T1 (5.21 versus 3.29) and T2 (5.18 versus 3.44). There was no statistical difference between T1 and T2 scores for either center; although, there was a greater chance of bone graft score improving with completion of canine eruption and substitution for missing lateral incisors. Conclusions Short-term ratings of graft outcomes identified significant differences between centers that persisted over time. Dental cleft-site management influenced final graft outcome.
def write_card(elem): try: elem.write_card(size=8, is_double=False) except RuntimeError: elem.write_card(size=16, is_double=False) except Exception: print(elem.get_stats()) raise
import { bot } from "../../cache.ts"; bot.arguments.set("boolean", { name: "boolean", execute: function (_argument, parameters) { const [boolean] = parameters; if (["true", "false", "on", "off", "enable", "disable"].includes(boolean)) { return ["true", "on", "enable"].includes(boolean); } }, });
NOW AVAILABLE Our book Biesik Jumiekan, a greatly expanded print version of this site, is published by Gnosophia Publishers under the Chuu Wod imprint. For bulk orders direct from the publisher, or otherwise available from Amazon. Wa Jumiekandem taak What Jamaicans speak Di habrij Jumiekan di taak wa deh taak deh kaali patwa, deh kaali kryuol, ar iibm bad hIngglish, askaadn tu ou deh fiil proud ar kaanful. Jumiekandem uona hatityuud divaid uoba di languij di wuola dem taak di muos, liklmuos aal di taim. Alduo hIngglish a di hofishal languij a di konchri, ah deh aal ab wa deh kaal Jumiekan hIngglish, a muosli bakra ah tapanaaris yu yie widi ina hofishal soerkl, anles smadi waah himpres wid piiki-puoki. Kaman yuusij rienj frah Jumiekan hIngglish tu braad patwa wid bout chrii digrii a separieshan, noftaim ina di wan piika siem wan kanvasieshan. hArijin a Jumieka Taak Lingguisdem aidentifai "pior" Jumiekan, fain muosli a konchri, wid riijanal difrans, laka wah mixcho a sebmtiint senchri hIngglish ah Wes Afrikan, muosli Chwi, kanschrokshan ah vokiabileri, wid soh Panish ah Puotigiis iin de tu fi a gud mixop. Di haxent ah kiedens koh frah Skatish ah hAirish. Kansda di ischri a Jumieka, dis shudn sopraizn sens di bolk a di papilieshan a disendant frah slieb kyaa kom frah Wes Afrika, fos bai di Panish, den deh laan hIngglish frah deh British uona, uobasia, hadvenchara, ah mishineridem. Korant stietos Potenshal, faib milian piipl, di papilieshan a Jumieka hinkluudn di dayaspora, taak Jumiekan ina wan faam ar di hada. Laka heni hada libm languij, ichienj ah kantiniu chienj uoba taim. Mosa honggl fyuu huol-taima baka bush a konchri ar aisoliet ina Brixtan ar Bruklin frah waa gwaan kiah kot di braad patwa, ar wa wi wi kom fi nuo haz hAakiek ar Klasikal Jumiekan (Si hAatagrafi, Jumiekan3). Di majariti taakin wi faal sohwe ina di migl a di spekchrom. Deh haazwie a mekop nyuu wod laka aatikal ah tapanaaris, ar cruu di hiiz a hintanashinal chrabl ah hilekchranik komiunikieshan, baara dem frah elswe, laka bling-bling frah ip-ap. Di languij wi chienj bot inaa ded faa itek iin eni nyuu wod ah Jumiekanaiz dem. So langx az Jumiekandem piich patan no chienj, deh wi kantiniu tek di siem hIngglish ah toni ina deh uona languij. No kia umuch deh waah sopresi, a hit Jumieka piipl wi haazwie taak. Idon du aredi. A deh languij muo dah heniting hels we set dem apaat az wah piipl. Jak Manduora, mi no chuuz non. The speech of the average Jamaican is variously described as a patois or creole, or even as bad English, depending on the degree of pride or disdain of the describer. Jamaicans' attitudes themselves are very divided over the language they all speak most, if not all, of the time. Although English is the official language of the country, and a variant known as Jamaican English is acknowledged, it is mostly heard only in formal situations, unless one wants to impress with "speaky-spoky." Common usage ranges from Jamaican English to broad patois with about three degrees of separation, often within a single speaker's conversation. Origins of Jamaican speech Linguists have identified "pure" Jamaican, now spoken mostly in rural areas, with regional differences, as an amalgam of seventeenth century English and West African, mostly Twi, constructions and vocabulary, with some Spanish and Portuguese thrown in for good measure. The accents and cadences have been derived from Scottish and Irish. Considering the history of Jamaica, this should not be surprising as the bulk of the population are descendants of slaves brought from West Africa, first by the Spanish, then taught English by their British owners, overseers, adventurers, and missionaries. Current status Potentially, five million people, the population of Jamaica including the diaspora, speak Jamaican in one form or the other. Like any other living language, it changes and continues to change over time. It must be only a few old-timers in the bush of the countryside or isolated in Brixton or Brooklyn who can still speak broad patois, or what will come to be known as Archaic or Classical Jamaican (see hAatagrafi, Jumiekan3). The majority speech will fall somewhere in the middle of the spectrum. New words are always being created, like haatikal and tapanaaris, or through the ease of international travel and electronic communication, borrowed from elsewhere, like bling-bling from hip-hop. The language will change but it will never die for it absorbs new words and Jamaicanizes them. As long as Jamaicans' speech patterns do not change, they will continue to take English and turn it into their own language. No matter how much it is suppressed, this is what Jamaicans will always speak. It is so already. It is their language more than anything else that sets them apart as a people. Jack Mandora, I choose none. Chat Bout NYUUZ Chek hAARKAIV fi huola haitem NEWS See ARCHIVES for older posts OPDIET: Jumiekan Wikipidia Afta faib ier a wok bai wah anful a edita, deh nou ab uoba 1,000 enchri. Wikipidia a-go chuu di apruuval pruoses bifuo ilaanch, uopfuli suun. Ef yu waah si wa iluk laik go yaso. Beta stil, ef yu nuo Patwa ah kiah raiti in Kiasidi/JLU stailii, ton edita ah rait soh aatikl ar karek enting yu si waah karek. After five years of work by a handful of editors, there are now over 1,000 entries. Wikipedia is going through the approval process prior to launching which, hopefully, will be soon. If you would like to see what it looks like go here. Better still, if you know Patwa and can write it in Cassidy/JLU style, become an editor and write some entries or make corrections to those existing. Caribbean delegates press for language rights Delegates from at least 12 Caribbean countries, including two governors general, met in Jamaica for two days recently, to press for the recognition of the rights of persons who speak Creole languages as a part of overall human rights. The delegates, including a number of linguists, said speakers of the region's Creole languages have a right to be communicated with in their first language, and not be discriminated against in accessing important services, including education, health and the justice system. Participants also learned that in St Lucia, the governor general delivers parts of her Throne Speech to Parliament in Antillean Creole, while many words in Jamaican or Belizean patois are not a corruption of English as is widely thought. The Conference on Language Policy in the Caribbean, hosted by the Jamaican Language Unit of the University of the Indies (UWI), was held at the Mona campus on January 13 and 14. Full text Jamaica Observer, 2011/1/30 Niem gaan abraad Fraitn fi si ou deh rait wi op aal ina Joerman. Wat a ting! Jamaikanisch – die gesprochene Sprache Eine Mischung aus Englisch des 17. Jahrhunderts und dem westafrikanischen Twi Ef yu kiah riid Joerman siit yaso. Oldest Jamaican Creole Text A 1781 text from the Cornwall Chronicle was discovered somewhere around 1997 by Maureen Warner Lewis in the course of her research. It is a big discovery for linguists studying Caribbean Creole languages. It is the oldest known text of Jamaican. In addition, it is the oldest text of a Caribbean English Creole outside of Suriname. It predates the next oldest known text, that for St Kitts, by at least 15 years. Full text What about freedom from language discrimination? Hubert Devonish is professor of linguistics and coordinator of The Jamaican Language Unit at the University of the West Indies. He makes the case for the inclusion of protection from language discrimination in the Charter of Rights before the Jamaican Parliament. Send comments to [email protected]. Photo Gleaner The Charter of Fundamental Rights and Freedoms is about to become law without any specific provision for freedom from discrimination on the grounds of language. This is against the background of a society in which two languages are used - English and Jamaican Creole. The former is the official language, but one in which all, except the educated minority, have limited competence. The latter is the native language of the vast majority of the population and is used with facility by all sectors of the population. English is the only language the institutions of government and state are required to use in the provision of services to the Jamaican public. We have a clear case of discrimination on the grounds of language, so why was freedom from language discrimination not included in the charter? Full text: Part I Part II UWI Researchers: Bilingual education yields better results What language(s) should be used for instruction of Creole-speaking children in the Caribbean? This has been a subject of debate among educators and ministries of education in the region since the 1970s. This has been triggered by the continuing problems with literacy in English among school children within the Commonwealth Caribbean. The Jamaican Language Unit (JLU) within the Department of Language, Linguistics & Philosophy at the University of the West Indies, Mona, spearheaded by Dr Karen Carpenter, developed the Bilingual Education Project (BEP) as a contribution to this debate. BEP sought to provide empirical evidence to policymakers in Jamaica as to the best way to proceed on language education policy. It sought to test, in a real primary-school situation, the potential positive effects of using a Creole language, in this case Jamaican, alongside English as formal languages of instruction and literacy. The BEP was designed as a way of testing the position taken in the official Language Education Policy of the Jamaican Ministry of Education and Culture. According to that policy, even though the use of both languages as subjects and in literacy and oral instruction was ideal, it was not actually possible in the Jamaican context. The project sought to test this by designing and implementing precisely such a project. The goal was to show how that which was said to be ideal could be turned into reality. The Ministry of Education and Culture, after careful consideration, gave its approval for the project to proceed. The BEP was implemented in 2004 in three publicly funded primary schools. It tracked over a four-year period, a group of students who were taught in full bilingual programme, i.e. taught literacy and language arts in both languages, and content subjects in both languages. The BEP came to a close in July 2008, with the BEP children who had entered the programme in 2004 at Grade One, completing Grade Four. Participation in the project was voluntary. This was true of the schools, participating teachers who received special training, as well as the parents of the pupils involved. Promises Among the promises made by the BEP were that: the BEP would produce an increase in Language Arts skill levels in English among pupils within the project relative to those in traditional modes of instruction; the BEP would produce an increase in absolute literacy levels of pupils in the project, as shown by their literacy in their native language, Jamaican, as compared with non-project pupils for whom English was the only language of literacy; the BEP children would show higher levels of competence in content subjects such as Mathematics, Science and Social Studies, since they were receiving instruction for this, not only in English, but also in their native language, Jamaican. At the end of the third year of the project in 2007, a comparison was made of the Grade Three diagnostic Literacy Test results of the project children and those taught by the traditional method in the same school. At that stage, the project children had already developed a level of literacy in English which was slightly higher than that of those who had not been in the programme. Based on the experiences of other such projects internationally, the projection was that this improvement should have taken place by the fourth year. In 2008, the same cohort of children took the National Grade Four Literacy Test. A preliminary analysis suggests that again, the performance of the project children in English literacy skills is better than those who were taught in the traditional manner. Expert international reviewers listed and conducted the biennial review required as part of the project design. The overall results of the research are clear. An approach to the language-education issue in Jamaica which is innovative can indeed bring improved results in English Language literacy. The old approach sought to get rid of, or at least ignore, the children's native language, Jamaican. An approach which treated both languages equally has proved to produce better results. The fully bilingual approach, as is shown the world over, produces improved language communication and literacy skills across the board, not only in the native language but also in the second language - in this case, English. The BEP and the research surrounding it, have made another contribution. It has designed a model for the implementation of bilingual education in Jamaica. Elements of this model include teaching standing writing system for Jamaican; and the training of teachers, via a training manual and process, to present good models of English to their pupils by keeping the two languages apart. The BEP research is not only relevant to Jamaica but countries such as Belize and Guyana. It is being viewed with interest by linguists and language educators across the Caribbean. - Jamaica Gleaner, 2010/06/27 Playwright Trevor Rhone dies Photo Jamaica Observer KINGSTON, Jamaica - Trevor Rhone, a Jamaican playwright who co-wrote the reggae film classic The Harder They Come and helped introduce the island's pop culture to the world, died Tuesday. He was 69. Rhone died after a heart attack at a hospital in Jamaica's capital, Kingston, according to his brother, Neville, and playwright Barbara Gloudon, a longtime friend and colleague. Born in 1940, Rhone wrote more than a dozen plays, including his two-character comedy, Two Can Play about a Jamaican couple who leave poverty-torn Kingston for an unexpectedly complicated new life in the United States. But Rhone is best known for co-writing The Harder They Come, Jamaica's first feature film, in the early 1970s with Perry Henzell, a filmmaker who died in 2006. Starring reggae singer Jimmy Cliff, the film became an international success, and its pulsing soundtrack, which featured reggae performers including Toots and the Maytals and Desmond Dekker, became a worldwide top-seller. More ... Pruotes fronta di Wait Ous. Nuot inkansistant pelin; shuda bi Tap di raas waar nou Ode to 'Miss Lou' Louise 'Miss Lou' Bennett-Coverley championed the use of Jamaican dialect through diverse mediums. Louise Simone Bennett-Coverley would have turned 90 today. Miss Lou, as she was affectionately known, was many things to Jamaica: a folklorist, the first lady of comedy and our linguistic mother. Through her expressive poetry, engaging storytelling and natural good humour, Miss Lou conveyed the passion and vivacity of Jamaicans and their language. In the process, she engrained a sense of identity in a fledgling nation. Three years after her death in Canada, Miss Lou still profoundly epitomises the indigenous Jamaican genius, as has been seen in other areas such as music and sports. Full story - Jamaica Gleaner, 2009/09/07 Tenky Miss Lou, Tenky By Joan Andrea Hutchinson For J3 transcription see Pachiz Mi a born Jamaican and mi proud An yuh fi feel proud too Fi walk roun an big up yuh chest An say tanks to Miss Lou. When she did start, she neva know A how it would a go An nuff nuff people wen da laugh An a call her pappy show. But she galang strang and stick it out For she know say she did right Inna her belly battam she did know one day Dem would a see di light. Entime trouble teck wi a Miss Lou wen put Wi good name pon di map And wen da push Jamaica heritage An Laad, she wouldn stop. She say, "Tek kin teet kibba heart bun" Wen times neva so sweet "Good luck will come as long as fowl A scratch up dungle heap". Nuff a dem went ink she crazy An nuff meck up dem face How Miss Lou a chat dis boogooyagga Patwa All ova di place. For dem wen tink patwa was bad English Dem neva know, poor ting Dem wouldn tell dem pickney Nancy story An folk song dem wouldn sing. But a di jackass wid him long tail Bag a coco comin dung An did peel head jankro pon tree top Jus meck dem head spin rung. An lickle bi lickle dem start fi back her Start fi fan her flame An see deh, after fifty year Miss Lou - a house hold name. Now wi nuh shame fi chat wi owna language An wi dah tank yuh fi it Miss Lou Dem a teach it clear a university An ongle sake a you. Dem a mek flim, dem a write book Dem a sing whole heap a song An a say "Oh Patwa is a good language" But yuh wen know dat all along. So now wi tan up proud fi be Jamaican An wi want di whole worl fi hear Miss Lou, nuff tanks, for Howdy and Tenky Neva bruck no square. Wikipidia Jumiekan languij sait Sens Disemba 2008, Wikipidia a tesout wah websait fi Jumieka languij. Efi wokout deh wi meki poermanent. Wail piipl a yaad a-gi out gens tiich patwa, di languij gaan abraad aal a mek insaiklopidia. Maitbi afta deh si se iaksep a farin, deh wi tekiop sens deh no siim fi hana eniting deh ab tel smadi els rekanaizi. Go yaso fi siit. Buot Lari Chang ah Javed Jaghai kanek tu LangwiJumieka a mieja kanchribiuta. Since December 2008, Wikipedia has been testing a Jamaican language website. If it is successful, it will be made permanent. While people at home are against teaching patois, the language has been accepted abroad for an encyclopedia. Now that it is gaining international acceptance, maybe they will take to it since they seem unable to honor anything without prior external recognition. Go here to see it. Both Larry Chang and Javed Jaghai of LangwiJumieka are major contributors. Olimpix Rivyuu Olympics Review AKSHAN TAAK, is a Jamaican Language company (JLC) production done in association with the Jamaican Language Unit. It is a news commentary program done solely in Jamaican (More commonly referred to as Patwa or Jamaican Creole), which reports on how the media covers the news relevant to Jamaica and Jamaicans in general. This inaugural episode looks at the recently held 2008 Olympic games and the different angles from which various media centers covered the phenomenal performance of the Jamaican athletic team. Jumiekan iina Webster's Online dictionary No nuo frah wen bot Jumiekan meki iina Webster's Online Dictionary . Dem ab a gudli lis a wod ah hexpreshan wid definishan; yu nuo se a no nuo baan Jumiekan a kompaili far bikaazn se som a dem no kwait kech di riek, bot iyuusful az refrans. Beg yu nuot se muos a di pelin fala miizolek aatagrafi, wa wi wuda kaal fala-fashin Ingglish pelin wa no riili gi di dairek soun, so kieful ou yu fala dem. We are unsure when, but Jamaican has made it into Webster's Online Dictionary. There is an extensive list of words and expressions with definitions; you can tell it was not compiled by a native Jamaican since some of them are a little off, but it is useful as reference. Please note that most of the spelling follows mesolectal orthography, or English-based approximations, which do not represent the true sounds, so be careful in adopting them.
package gorgonia import ( "github.com/pkg/errors" "gonum.org/v1/gonum/graph" ) /* This file holds code for symbolic differentiation. The purpose of the symbolic differentiation is to analyze and prepare the nodes for automatic differentiation. The main function that does all the magic is in Backpropagate(). see also: http://colah.github.io/posts/2015-08-Backprop/ */ // forwardDiffAnalysis returns the nodes that affect outputs. // // Given a list of outputs, we want to know which nodes will affect the output func forwardDiffAnalysis(outputs, sortedNodes Nodes) (retVal NodeSet, err error) { symdiffLogf("Forward analysis. Already sorted?") enterLogScope() defer leaveLogScope() if !outputs.AllSameGraph() { return nil, errors.New("The supplied output Nodes are not the same graph") } diffSet := outputs.mapSet() symdiffLogf("Diff Set: %v", diffSet) symdiffLogf("%d", sortedNodes) for _, n := range sortedNodes { if diffSet.Contains(n) && !n.isInput() { diffs := n.diffWRT() for j, child := range n.children { d := diffs[j] if d { symdiffLogf("Adding %x to differentiable set", child.ID()) diffSet.Add(child) } } } } return diffSet, nil } // backwardDiffAnalysis returns a list of Nodes that are affected by differentiating output. // Given a list of WRTs, we want to find a list of nodes that will be affected when backpropagating. func backwardDiffAnalysis(wrt, sortedNodes Nodes) (retVal NodeSet, err error) { symdiffLogf("Backwards analysis") enterLogScope() defer leaveLogScope() if !wrt.AllSameGraph() { return nil, errors.New("The supplied output Nodes are not the same graph") } diffSet := wrt.mapSet() symdiffLogf("wrt:%d diffset: %d", len(wrt), len(diffSet)) symdiffLogf("%v", diffSet) symdiffLogf("sorted: %d", sortedNodes) enterLogScope() for i := len(sortedNodes) - 1; i >= 0; i-- { n := sortedNodes[i] symdiffLogf("working on %v. Has %d children", n, len(n.children)) var op SDOp var ok bool var diffs []bool if op, ok = n.op.(SDOp); ok { diffs = op.DiffWRT(len(n.children)) } symdiffLogf("differentiable WRT: %v", diffs) enterLogScope() symdiffLogf("Children: %v", n.children) if len(diffs) == 0 { // check if this makes nodes unreachable. If it does, then error out if n.isStmt { symdiffLogf("Statement nodes are Non differentiable!") leaveLogScope() continue } else if n.isInput() { symdiffLogf("Input nodes are Non differentiable") leaveLogScope() continue } else if len(n.children) == 0 { symdiffLogf("Leaf nodes have no children") leaveLogScope() continue } g := n.g for _, child := range n.children { parents := graph.NodesOf(g.To(child.ID())) if len(parents) == 1 && len(child.children) > 0 { leaveLogScope() return nil, errors.Errorf("Being unable to differentiate %v would leave a portion of the graph unreachable. Unable to continue", n) } } symdiffLogf("SKIPPING... Non differentiable!") leaveLogScope() continue } inner: for j, child := range n.children { d := diffs[j] if diffSet.Contains(child) && d { symdiffLogf("Adding %x to differentiable set", child.ID()) diffSet.Add(n) break inner } } leaveLogScope() } leaveLogScope() return diffSet, nil } // Backpropagate backpropagates errors by performing reverse-mode symbolic differentiation, starting from the outputs, and working its way towads the inputs. // // This is the rough algorithm: // 1. Filter out nodes that are unreachable // 2. Forwards analysis, where a list of nodes affecting the output is added to consideration // 3. Backwards analysis, where a list of nodes affected by differentiating the output are added to the consideration // 4. If there is a difference in both sets, it will cause an error (both sets should be the same) // 5. Traverse the graph from output towards input. On each visit, perform the symbolic differentiation // // For most cases, Grad() should be used instead of Backpropagate(), as Grad() performs several checks which would be the general use case, before calling Backpropagate() func Backpropagate(outputs, gradOutputs, wrt Nodes) (retVal Nodes, err error) { symdiffLogf("BACKPROP START") symdiffLogf("Outputs: %d", outputs) symdiffLogf("gradOutputs: %d", gradOutputs) symdiffLogf("WRT: %d", wrt) enterLogScope() defer leaveLogScope() g := outputs[0].g // this entire section about removing foreveralone nodes need a rethink symdiffLogf("removing foreveralone nodes") enterLogScope() for i := 0; i < len(g.AllNodes()); i++ { n := g.AllNodes()[i] fr := g.From(n.ID()).Len() to := g.To(n.ID()).Len() if fr == 0 && to == 0 && !n.isConstant() && !n.isInput() { g.RemoveNode(n) symdiffLogf("removed %v(%p); %x; %s", n, n, n.ID(), n.Name()) } } leaveLogScope() var sortedNodes Nodes if sortedNodes, err = Sort(g); err != nil { return nil, errors.Wrap(err, sortFail) } symdiffLogf("sorted nodes: %v", sortedNodes) symdiffLogf("sorted nodes: %d", sortedNodes) var affectsOutput NodeSet var affectedByOutput NodeSet if affectsOutput, err = forwardDiffAnalysis(outputs, sortedNodes); err != nil { return nil, errors.Wrap(err, "Failed during forward differentiation analysis") } if affectedByOutput, err = backwardDiffAnalysis(wrt, sortedNodes); err != nil { return nil, errors.Wrap(err, "Failed during forward differentiation analysis") } symdiffLogf("affects output: %v", affectsOutput) symdiffLogf("affected by output : %v", affectedByOutput) wrtSet := wrt.mapSet() badWRTs := wrtSet.Difference(affectsOutput) if len(badWRTs) > 0 { return nil, SymDiffError{nodes: badWRTs.ToSlice(), err: errors.Errorf("Non Differentiable WRTs: %v", badWRTs)} } outputSet := outputs.mapSet() badOutputs := outputSet.Difference(affectedByOutput) if len(badOutputs) > 0 { symdiffLogf("badOutputs: %#v", badOutputs) return nil, SymDiffError{nodes: badOutputs.ToSlice(), err: errors.Errorf("Non-Differentable Outputs: %v", badOutputs)} } // map a node to a list of gradient terms // these gradient terms will be summed up when we visit the node // when iterating through the nondes in reverse topological order nodeGradMap := make(map[*Node]Nodes) for i, n := range outputs { symdiffLogf("Adding outputs for %x", n.ID()) nodeGradMap[n] = Nodes{gradOutputs[i]} } // "active" nodes are the ones that are differentially influenced by the inputs // and also differentiably influence the outputs. These are the nodes where we need to call the // "pullback" function to backpropagate derivatives activeNodes := affectsOutput.Intersect(affectedByOutput) symdiffLogf("Active: %v", activeNodes) symdiffLogf("Sorted: %d", sortedNodes) symdiffLogf("nodeGradMap: %+#d", FmtNodeMap(nodeGradMap)) enterLogScope() for _, node := range sortedNodes { if _, ok := activeNodes[node]; !ok { symdiffLogf("skipping %x", node.ID()) continue } if node.deriv != nil { symdiffLogf("skipping %x - previously differentiated", node.ID()) nodeGradMap[node] = append(nodeGradMap[node], node.deriv) continue } symdiffLogf("Working on %x %v", node.ID(), node) enterLogScope() // Check if there is any grads coming into this node if len(nodeGradMap[node]) < 1 { leaveLogScope() return nil, SymDiffError{ single: node, gradMap: nodeGradMap, err: errors.New("No gradients found for node"), } } // once we've reached a node, we already backpropagated from its dependents // so we sum up the gradients symdiffLogf("nodeGradMap[%x]: %d", node.ID(), nodeGradMap[node]) if len(nodeGradMap[node]) > 1 { var n *Node symdiffLogf("reduce adding") if n, err = ReduceAdd(nodeGradMap[node], WithGroupName(gradClust)); err != nil { leaveLogScope() return nil, SymDiffError{ single: node, nodes: nodeGradMap[node], gradMap: nodeGradMap, err: errors.Wrap(err, "ReduceAdd failed during differentiation"), } } symdiffLogf("reduced to... %x", n.ID()) // node.derives = append(node.derives, n) n.derivOf = append(n.derivOf, node) node.deriv = n nodeGradMap[node] = Nodes{n} // } } else if len(nodeGradMap[node]) == 1 { deriv := nodeGradMap[node][0] deriv.derivOf = append(deriv.derivOf, node) node.deriv = deriv } gradNode := nodeGradMap[node][0] if !node.isInput() { symdiffLogf("differentiating %x (%v)", node.ID(), node.op) enterLogScope() var op SDOp var childrenGrads Nodes var ok bool if op, ok = node.op.(SDOp); !ok { return nil, SymDiffError{ single: node, err: errors.New("Not a SymDifOp"), } } symdiffLogf("op: %v || optype: %v || node: %v || Children: %#Y || Grad: %v", node.op, node.op.Type(), node.t, node.children, gradNode) if childrenGrads, err = op.SymDiff(node.children, node, gradNode); err != nil { leaveLogScope() return nil, SymDiffError{ single: node, grad: gradNode, gradMap: nodeGradMap, err: errors.Wrapf(err, ".SymDiff() failed"), } } symdiffLogf("Derived(%d): %P", len(childrenGrads), childrenGrads) leaveLogScope() diffs := node.diffWRT() for i, child := range node.children { symdiffLogf("child is %v, i: %v", child, i) differentiable := diffs[i] childGrad := childrenGrads[i] if differentiable { childGrad.setGroup(gradClust) if grads, ok := nodeGradMap[child]; ok { grads = append(grads, childGrad) nodeGradMap[child] = grads } else { nodeGradMap[child] = Nodes{childGrad} } } else { symdiffLogf("Child %x is non differentiable", child.ID()) if childGrad != nil { childGrad.setGroup(strayClust) } } } } else { symdiffLogf("iz input") symdiffLogf("%d ", nodeGradMap[node]) } leaveLogScope() } leaveLogScope() // only we already summed up the gradients for the input nodes, so just take // 0th element for _, n := range wrt { symdiffLogf("nodeGradMap wrt: %d", nodeGradMap[n]) retVal = append(retVal, nodeGradMap[n][0]) } return } // SetDerivOf is used to hack around the fundamental limitations of Gorgonia. // // Specifically it is used to set a node as the derivative of another node, // used in the cuDNN version of batch norm. // // The cuDNN BatchNorm operation produces the derivatives for the scale and bias as a side effect // of calculating the derivative of the input. Because Gorgonia's Ops are modelled as pure functions (and no tuples) // this causes a bit of trouble. With the clever use of scratch space ops multireturn can be simulated. // But this causes derivatives to not be set correctly. func SetDerivOf(deriv, of *Node) { deriv.derivOf = append(deriv.derivOf, of) of.deriv = deriv }
<gh_stars>1-10 package solus import ( "net/http" "net/http/httptest" "net/url" "testing" "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestAllowInsecure(t *testing.T) { c := &Client{ HTTPClient: &http.Client{ Transport: &http.Transport{}, }, } AllowInsecure()(c) require.True(t, c.HTTPClient.Transport.(*http.Transport).TLSClientConfig.InsecureSkipVerify) } func TestSetRetryPolicy(t *testing.T) { c := &Client{} SetRetryPolicy(1, time.Second)(c) assert.Equal(t, 1, c.Retries) assert.Equal(t, time.Second, c.RetryAfter) } type fakeLogger struct{} func (fakeLogger) Debugf(string, ...interface{}) {} func (fakeLogger) Errorf(string, ...interface{}) {} func TestWithLogger(t *testing.T) { c := &Client{} l := fakeLogger{} WithLogger(l)(c) assert.Equal(t, l, c.Logger) } func TestEmailAndPasswordAuthenticator_Authenticate(t *testing.T) { authenticator := EmailAndPasswordAuthenticator{ Email: "<EMAIL>", Password: "<PASSWORD>", } t.Run("positive", func(t *testing.T) { credentials := Credentials{ AccessToken: "access token", TokenType: "token type", ExpiresAt: "expires at", } s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { assert.Equal(t, http.MethodPost, r.Method) assert.Equal(t, "/auth/login", r.URL.Path) assertRequestBody(t, r, AuthLoginRequest{ Email: "<EMAIL>", Password: "<PASSWORD>80rd", }) writeResponse(t, w, http.StatusOK, AuthLoginResponse{ Credentials: credentials, }) })) defer s.Close() u, err := url.Parse(s.URL) require.NoError(t, err) c, err := NewClient(u, authenticator) require.NoError(t, err) require.Equal(t, credentials, c.Credentials) }) t.Run("negative", func(t *testing.T) { t.Run("failed to make request", func(t *testing.T) { _, err := NewClient(&url.URL{}, authenticator, SetRetryPolicy(0, 0)) require.EqualError(t, err, `Post "/auth/login": unsupported protocol scheme ""`) }) t.Run("invalid status", func(t *testing.T) { s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusBadRequest) })) defer s.Close() u, err := url.Parse(s.URL) require.NoError(t, err) _, err = NewClient(u, authenticator) require.EqualError(t, err, "HTTP POST auth/login returns 400 status code") }) }) } func TestAPITokenAuthenticator_Authenticate(t *testing.T) { const token = "foo" authenticator := APITokenAuthenticator{ Token: token, } c, err := NewClient(&url.URL{}, authenticator) require.NoError(t, err) require.Equal(t, Credentials{ AccessToken: token, TokenType: "Bearer", ExpiresAt: "", }, c.Credentials) }
<filename>main.go package main import ( "encoding/json" "flag" "net/http" "strconv" "time" "github.com/boltdb/bolt" gfeeds "github.com/gorilla/feeds" "github.com/gorilla/mux" "github.com/ngaut/log" ) var ( addr = flag.String("addr", ":10086", "http server listen port") dbFile = flag.String("dbfile", ".anyrss.db", "db file path") ) var ( // bucket names settingsBucketName = []byte("settings") channelsBucketName = []byte("channels") channelBucketNamePrefix = []byte("channel:") // const keys settingsIdKey = []byte("id") ) var db *bolt.DB var idChan chan int func genGlobalId() int { return <-idChan } func init() { flag.Parse() var err error db, err = bolt.Open(*dbFile, 0600, nil) if err != nil { log.Fatal(err) } // load global id var globalId int if err = db.Update(func(tx *bolt.Tx) error { b, err := tx.CreateBucketIfNotExists(settingsBucketName) if err != nil { return err } r := b.Get(settingsIdKey) if r == nil { globalId = 1000 } else { globalId, err = strconv.Atoi(string(r)) if err != nil { return err } } return nil }); err != nil { log.Fatal(err) } // run gen id routine idChan = make(chan int) go func(globalId int) { for { if err := db.Update(func(tx *bolt.Tx) error { b := tx.Bucket(settingsBucketName) globalId += 1 err := b.Put(settingsIdKey, []byte(strconv.Itoa(globalId))) if err != nil { return err } return nil }); err != nil { log.Fatal(err) } idChan <- globalId } }(globalId) log.Info("initialize successfully") } // channel handler // PUT: create new channel func channelPutHandler(w http.ResponseWriter, r *http.Request) { decoder := json.NewDecoder(r.Body) var channel Channel err := decoder.Decode(&channel) if err != nil { w.WriteHeader(500) w.Write([]byte(err.Error())) return } err = channel.Save() if err != nil { w.WriteHeader(500) w.Write([]byte(err.Error())) return } b, _ := json.MarshalIndent(channel, "", " ") w.Write(b) } // POST: post feed to channel func channeldPostHandler(w http.ResponseWriter, r *http.Request) { vars := mux.Vars(r) channelName := vars["channel"] c, err := GetChannelByName(channelName) if err != nil { w.WriteHeader(500) w.Write([]byte(err.Error())) return } decoder := json.NewDecoder(r.Body) var feed Feed err = decoder.Decode(&feed) if err != nil { w.WriteHeader(500) w.Write([]byte(err.Error())) return } if !feed.Valid() { w.WriteHeader(500) w.Write([]byte("invalid feed")) return } exists, err := c.HasFeed(&feed) if err != nil { w.WriteHeader(500) w.Write([]byte(err.Error())) return } if exists { w.WriteHeader(500) w.Write([]byte("feed already exists")) return } feed.Id = genGlobalId() feed.CreateAt = time.Now() feed.Hash = feed.CalcHash() feed.ChannelName = channelName err = c.AddFeed(&feed) if err != nil { w.WriteHeader(500) w.Write([]byte(err.Error())) return } b, _ := json.MarshalIndent(feed, "", " ") w.Write(b) } // GET: get channel feed list func channelGetHandler(w http.ResponseWriter, r *http.Request) { vars := mux.Vars(r) channelName := vars["channel"] c, err := GetChannelByName(channelName) if err != nil { w.WriteHeader(500) w.Write([]byte(err.Error())) return } if c == nil { w.WriteHeader(500) w.Write([]byte("no such channel")) return } feeds, err := c.GetFeeds(0, 100) if err != nil { w.WriteHeader(500) w.Write([]byte(err.Error())) return } b, _ := json.MarshalIndent(feeds, "", " ") w.Write(b) } func channelRssHandler(w http.ResponseWriter, r *http.Request) { vars := mux.Vars(r) channelName := vars["channel"] c, err := GetChannelByName(channelName) if err != nil { w.WriteHeader(500) w.Write([]byte(err.Error())) return } if c == nil { w.WriteHeader(500) w.Write([]byte("no such channel")) return } feeds, err := c.GetFeeds(0, 100) if err != nil { w.WriteHeader(500) w.Write([]byte(err.Error())) return } f := &gfeeds.Feed{ Title: channelName, Link: &gfeeds.Link{}, Author: &gfeeds.Author{}, } var fs []*gfeeds.Item for _, feed := range feeds { fs = append(fs, feed.ToGorillaFeedItem()) } f.Items = fs rss, err := f.ToRss() w.Write([]byte(rss)) } func main() { r := mux.NewRouter() r.HandleFunc("/c/{channel:[a-zA-Z0-9]+}", channelGetHandler).Methods("GET") r.HandleFunc("/c/{channel:[a-zA-Z0-9]+}", channeldPostHandler).Methods("POST") r.HandleFunc("/rss/{channel:[a-zA-Z0-9]+}", channelRssHandler).Methods("GET") r.HandleFunc("/c", channelPutHandler).Methods("PUT") http.Handle("/", r) http.ListenAndServe(*addr, nil) }
/** * @author ariscdc * Aris Dela Cruz * https://github.com/ariscdc * * The number of subsets for a given set is 2^k, where k is the number of elements in the set. * Example: * {} -> 2^1 -> 1 -> {} * {1} -> 2^2 -> 2 -> {}, {1} * {1,2} -> 2^3 -> 4 -> {}, {1}, {2}, {1,2} * {1,2,3} -> 2^4 -> 8 -> {}, {1}, {2}, {1,2}, {3}, {1, 3}, {2,3}, {1,2,3} * * Time Complexity: O(2^n) * Since we have to identify each subset, this complexity can no longer be reduced. * * 20160210 1705-1755 (50 mins.) */ public class GenerateAllSubsets { public static List<Set<Integer>> getSubsets(Integer[] set, int index) { List<Set<Integer>> newSubsets = new LinkedList<>(); if (index < 0) { newSubsets.add(new LinkedHashSet<>()); return newSubsets; } List<Set<Integer>> subsets = getSubsets(set, index - 1); for (Set<Integer> subset: subsets) { Set<Integer> newSubset = new LinkedHashSet<>(); for (Integer element: subset) { newSubset.add(element); } newSubset.add(set[index]); newSubsets.add(newSubset); } subsets.addAll(newSubsets); return subsets; } public static void main(String[] args) { Integer[] set = { 1, 2, 3, 4 }; List<Set<Integer>> subsets = getSubsets(set, set.length - 1); System.out.println("Number of Subsets: " + subsets.size()); System.out.println("Subsets:"); System.out.println(subsets); } }
class HIN: """ HIN: Heterogeneous Information Network Object """ def __init__(self,filename=None,table=None,name=None,inverse_relations=True, verbose=False): # If there is no table, create from file if table is None: if filename is None: raise ValueError('HIN object has to be created from a table or from a file.') columns=['relation', 'start_group','start_object', 'end_group','end_object', 'value','timestamp'] t=TCounter() VerboseMessage(verbose,'Reading table from %s ...'%filename) table=pd.read_csv(filename,sep=',',header=None,names=columns,low_memory=False) VerboseMessage(verbose,'Table read in %s.'%ETSec2ETTime(TCounter()-t)) # Cheking the table t=TCounter() table = CheckTable(table) VerboseMessage(verbose,'Table checked in %s.'%ETSec2ETTime(TCounter()-t)) # TODO: Aggregation of multi edges: None, Average, Sum # Filling the fields self.table = table self.name = name self.info = {} # Building Object and Link Groups from Table t=TCounter() self.ReBuildObjectGroupsFromTable(verbose) VerboseMessage(verbose,'Object Groups built in %s.'%ETSec2ETTime(TCounter()-t)) t=TCounter() self.ReBuildLinkGroupsFromTable(verbose) VerboseMessage(verbose,'Link Groups built in %s.'%ETSec2ETTime(TCounter()-t)) if inverse_relations: for relation_name in self.table.relation.unique(): self.CreateInverseLinkGroup(relation_name,verbose=verbose) return; ########################################### # Functions Changing Link Groups # ########################################### # New ones ! def CreateLinkGroup(self,linkgroup,name, datetimes=None, condition=None, verbose=False): if (datetimes!=None and condition!=None) or (datetimes==None and condition==None): raise ValueError('To create a link group you have to provide datetime bounds or (XOR) a condition method.') if datetimes!=None: # Get the group ids of the Link Group og_start = self.object_group_dic[self.GetLinkGroup(linkgroup).start_id] og_end = self.object_group_dic[self.GetLinkGroup(linkgroup).end_id] # Getting subtable of the Link Group subtable=self.table[self.table.relation==linkgroup].copy(deep=True) # Applyting the condition subtable=subtable[(subtable.timestamp>=pd.Timestamp(datetimes['min']))&(subtable.timestamp<=pd.Timestamp(datetimes['max']))] # Changing name subtable.loc[:,'relation'] = name # Saving the new Link Group self.table = self.table.append(subtable).reset_index(drop=True) lg_id = self.GetNewLinkGroupID() self.link_group_dic[lg_id] = LinkGroup(table=subtable, name=subtable.relation.iloc[0], id=lg_id, start_og=og_start, end_og=og_end, verbose=verbose) return; if condition!=None: raise ValueError('Link group creation with ') # TODO: re-organize and move to link_group_functions def CreateInverseLinkGroup(self,existing_relation_name,new_relation_name=None,verbose=False): # Checking that the relation exists if existing_relation_name not in self.table.relation.unique(): raise ValueError('Relation %s does not exist.'%existing_relation_name) # Selecting the sub table of the relation to inverse subtable=self.table[self.table.relation==existing_relation_name].copy(deep=True) # Creating the new, appendable, subtable with the inverse relation new_subtable=pd.DataFrame(columns=subtable.columns) # Filling the entries of the new appendable subtable new_subtable.start_group=subtable.end_group new_subtable.start_object=subtable.end_object new_subtable.end_group=subtable.start_group new_subtable.end_object=subtable.start_object new_subtable.timestamp=subtable.timestamp new_subtable.value=subtable.value # Giving a name to the new relation if new_relation_name is None: new_subtable.relation='inverse_'+existing_relation_name else: new_subtable.relation=new_relation_name # Appending the table and changing the HIN self.table=self.table.append(new_subtable).reset_index(drop=True) new_link_group_id = self.GetNewLinkGroupID() sog_name=new_subtable.start_group.iloc[0] eog_name=new_subtable.end_group.iloc[0] self.link_group_dic[new_link_group_id] = LinkGroup(table=new_subtable, name=new_subtable.relation.iloc[0], id=new_link_group_id, start_og=self.GetObjectGroup(sog_name), end_og=self.GetObjectGroup(eog_name), verbose=verbose) def CreateSubsampledLinkGroup(self,relation_name,new_relation_name,fraction, per_start_object=True,verbose=False): # Get the group ids of the Link Group og_start = self.object_group_dic[self.GetLinkGroup(relation_name).start_id] og_end = self.object_group_dic[self.GetLinkGroup(relation_name).end_id] # Getting subtable of the Link Group subtable=self.table[self.table.relation==relation_name].copy(deep=True) # Subsampling if per_start_object: grouped = subtable.groupby('start_object') subtable = grouped.apply(lambda x: x.sample(frac=fraction)) else: subtable=subtable.sample(frac=fraction) # Changing name subtable.loc[:,'relation'] = new_relation_name # Saving the new Link Group self.table = self.table.append(subtable).reset_index(drop=True) lg_id = self.GetNewLinkGroupID() self.link_group_dic[lg_id] = LinkGroup(table=subtable, name=subtable.relation.iloc[0], id=lg_id, start_og=og_start, end_og=og_end, verbose=verbose) return; def DeleteLinkGroup(self,relation_name): self.link_group_dic.pop(self.GetLinkGroupId(relation_name)) self.table=self.table[self.table.relation!=relation_name].reset_index(drop=True) return; def MergeLinkGroups(self,relation_name, relation_name_to_merge, new_relation_name=None, delete_merged_relation=False, verbose=False): """ Merge contents of relation_name_to_merge table into relation_name table. """ # Get the group ids of the involved Link Groups og1_start = self.object_group_dic[self.GetLinkGroup(relation_name).start_id] og1_end = self.object_group_dic[self.GetLinkGroup(relation_name).end_id] og2_start = self.object_group_dic[self.GetLinkGroup(relation_name_to_merge).start_id] og2_end = self.object_group_dic[self.GetLinkGroup(relation_name_to_merge).end_id] # Check that relations start and end in the same Object Groups if (og1_start.id!=og2_start.id) or (og1_end.id!=og2_end.id): raise ValueError('Link Groups to be merged do not start and end in the same Object Groups.') # Subtable to be merged into a Link Group subtable = self.table[self.table.relation==relation_name].copy(deep=True) subtable_to_merge = self.table[self.table.relation==relation_name_to_merge].copy(deep=True) subtable_to_merge.loc[:,'relation'] = subtable.relation.iloc[0] merged_table=subtable.append(subtable_to_merge) self.table=self.table[self.table.relation!=relation_name] if new_relation_name is not None: merged_table.loc[:,'relation']=new_relation_name self.table=self.table.append(merged_table).reset_index(drop=True) lg_id = self.GetLinkGroupId(relation_name) self.link_group_dic[lg_id] = LinkGroup(table=merged_table, name=merged_table.relation.iloc[0], id=lg_id, start_og=og1_start, end_og=og1_end, verbose=verbose) if delete_merged_relation: self.DeleteLinkGroup(relation_name_to_merge) return; def CreateLinkGroupFromLinkGroup(self,relation_name,new_relation_name,condition_method, verbose=False): # Get the group ids of the Link Group og_start = self.object_group_dic[self.GetLinkGroup(relation_name).start_id] og_end = self.object_group_dic[self.GetLinkGroup(relation_name).end_id] # Getting subtable of the Link Group subtable=self.table[self.table.relation==relation_name].copy(deep=True) # Applyting the condition subtable=subtable[subtable.value.apply(condition_method)] # Changing name subtable.loc[:,'relation'] = new_relation_name # Saving the new Link Group self.table = self.table.append(subtable).reset_index(drop=True) lg_id = self.GetNewLinkGroupID() self.link_group_dic[lg_id] = LinkGroup(table=subtable, name=subtable.relation.iloc[0], id=lg_id, start_og=og_start, end_og=og_end, verbose=verbose) return; def CreateLinkGroupFromLinkGroupWithDates(self,relation_name,new_relation_name,limit_dates, verbose=False): # Get the group ids of the Link Group og_start = self.object_group_dic[self.GetLinkGroup(relation_name).start_id] og_end = self.object_group_dic[self.GetLinkGroup(relation_name).end_id] # Getting subtable of the Link Group subtable=self.table[self.table.relation==relation_name].copy(deep=True) # Applyting the condition subtable=subtable[(subtable.timestamp>=pd.Timestamp(limit_dates['min']))&(subtable.timestamp<=pd.Timestamp(limit_dates['max']))] # Changing name subtable.loc[:,'relation'] = new_relation_name # Saving the new Link Group self.table = self.table.append(subtable).reset_index(drop=True) lg_id = self.GetNewLinkGroupID() self.link_group_dic[lg_id] = LinkGroup(table=subtable, name=subtable.relation.iloc[0], id=lg_id, start_og=og_start, end_og=og_end, verbose=verbose) return; def CreateLinkGroupFromTable(self,new_table,new_relation_name=None,verbose=False): # Check table for start/end group uniqueness and existence, check consistent relation name if new_table.start_group.unique().size!=1 or new_table.end_group.unique().size!=1: raise ValueError('Table has links between more than two object groups.') # Get the group ids of the Link Group og_start_name = new_table.start_group.iloc[0] og_end_name = new_table.end_group.iloc[0] og_start = self.GetObjectGroup(og_start_name) og_end = self.GetObjectGroup(og_end_name) subtable = new_table.copy(deep=True) if new_relation_name is None: relation_name = new_table.loc[:,'relation'].iloc[0] else: relation_name = new_relation_name subtable.loc[:,'relation'] = new_relation_name # Saving the new Link Group self.table = self.table.append(subtable).reset_index(drop=True) lg_id = self.GetNewLinkGroupID() self.link_group_dic[lg_id] = LinkGroup(table=subtable, name=relation_name, id=lg_id, start_og=og_start, end_og=og_end, verbose=verbose) return; def CreateLinkGroupFromConfigurationModel(self,relation_name,new_relation_name, verbose=False): # Get the group ids of the Link Group og_start = self.object_group_dic[self.GetLinkGroup(relation_name).start_id] og_end = self.object_group_dic[self.GetLinkGroup(relation_name).end_id] # Getting subtable of the Link Group subtable=self.table[self.table.relation==relation_name].copy(deep=True) # Shuffling end objects subtable.loc[:,'end_object'] = subtable.loc[:,'end_object'].sample(frac=1).values # Changing name subtable.loc[:,'relation'] = new_relation_name # Saving the new Link Group self.table = self.table.append(subtable).reset_index(drop=True) lg_id = self.GetNewLinkGroupID() self.link_group_dic[lg_id] = LinkGroup(table=subtable, name=subtable.relation.iloc[0], id=lg_id, start_og=og_start, end_og=og_end, verbose=verbose) return; def CreateLinkGroupFromRS(self,relation_name,new_relation_name,parameters, verbose=False): """ """ # Creating the recommendation table predicted_table,report = HINRS(self,relation_name,parameters=parameters,verbose=verbose) predicted_table.loc[:,'relation']= new_relation_name # Creating the new Link Group from the recommendation self.table = self.table.append(predicted_table).reset_index(drop=True) new_link_group_id = self.GetNewLinkGroupID() lg = self.GetLinkGroup(relation_name) og_start = self.GetLinkGroupStartObjectGroup(lg.name) og_end = self.GetLinkGroupEndObjectGroup(lg.name) self.link_group_dic[new_link_group_id] = LinkGroup(table=predicted_table, name=new_relation_name, id=new_link_group_id, start_og=og_start, end_og=og_end, verbose=verbose) self.link_group_dic[new_link_group_id].info = report return; ########################################### # Build Object and Link Groups from Table # ########################################### def ReBuildObjectGroupsFromTable(self,verbose=False): self.object_group_dic = {} object_group_id = 0 for og_name in list(set(self.table.start_group.unique())|set(self.table.end_group.unique())): o_list = GetObjectsFromTableWithGroup(self.table,og_name) self.object_group_dic[object_group_id] = ObjectGroup(object_list=o_list, name=og_name, id=object_group_id, verbose=verbose) object_group_id+=1 return; def ReBuildLinkGroupsFromTable(self,verbose=False): self.link_group_dic = {} link_group_id=0 for lg_name in self.table.relation.unique(): sog_name = self.table[self.table.relation==lg_name].start_group.iloc[0] eog_name = self.table[self.table.relation==lg_name].end_group.iloc[0] self.link_group_dic[link_group_id] = LinkGroup(table=self.table[self.table.relation==lg_name], name=lg_name, id=link_group_id, start_og=self.GetObjectGroup(sog_name), end_og=self.GetObjectGroup(eog_name), verbose=verbose) link_group_id+=1 return ##################################### # HIN and Group Property Retrievers # ##################################### # TODO: re-organize and move to hin_functions # Get ObjectGroup from name def GetObjectGroup(self,name): for og_id,og in self.object_group_dic.items(): if og.name==name: return og; raise ValueError('Object Group %s not found'%name) # Get ObjectGroup at start of LinkGroup from name def GetLinkGroupStartObjectGroup(self,name): return self.GetObjectGroup(self.object_group_dic[self.GetLinkGroup(name).start_id].name) # Get ObjectGroup at end of LinkGroup from name def GetLinkGroupEndObjectGroup(self,name): return self.GetObjectGroup(self.object_group_dic[self.GetLinkGroup(name).end_id].name) # Get LinkGroup from name def GetLinkGroup(self,name): for lg_id,lg in self.link_group_dic.items(): if lg.name==name: return lg; raise ValueError('Link Group %s not found'%name) # Get LinkGroup density from name def GetLinkGroupDensity(self,name): sogs = self.GetLinkGroupStartObjectGroup(name).size eogs = self.GetLinkGroupEndObjectGroup(name).size lgs = self.GetLinkGroup(name).size return lgs/(sogs*eogs); # Get Ids of ObjectGroup from name def GetObjectGroupId(self,name): for og_id,og in self.object_group_dic.items(): if og.name==name: return og_id; raise ValueError('Object Group %s not found'%name) # Get Ids of LinkGroup from name def GetLinkGroupId(self,name): for lg_id,lg in self.link_group_dic.items(): if lg.name==name: return lg_id; raise ValueError('Link Group %s not found'%name) # Get names of ObjectGroups def GetObjectGroupsNames(self): return [og.name for og_id,og in self.object_group_dic.items()] # Get names of LinkGroups def GetLinkGroupsNames(self): return [lg.name for lg_id,lg in self.link_group_dic.items()] # Get def GetObjectGroupPositionDic(self,name): return self.GetObjectGroup(name).OjectPositionDicFromName(); # def GetObjectGroupObjectDic(self,name): return self.GetObjectGroup(name).OjectNameDicFromPosition(); # Get vacant id for new groups def GetNewLinkGroupID(self): return FirstAbsentNumberInList([k for k,v in self.link_group_dic.items()]) def GetNewObjectGroupID(self): return FirstAbsentNumberInList([k for k,v in self.object_group_dic.items()]) # Get path-related objects def GetPathStartGroupPositionDic(self,path): return self.GetLinkGroupStartObjectGroup(path[0]).OjectPositionDicFromName(); def GetPathEndGroupPositionDic(self,path): return self.GetLinkGroupEndObjectGroup(path[-1]).OjectPositionDicFromName(); ############################################## # Path Proportional Abundances & Diversities # ############################################## def stochastic_matrix(self,path): path=CheckPath(path) matrix=self.GetLinkGroup(path[0]).stochastic_matrix for relation in path[1:]: matrix=matrix*self.GetLinkGroup(relation).stochastic_matrix return matrix; def proportional_abundance(self,path,initial_p=None,include_sink=False): path=CheckPath(path) # Compute stochastic matrix for the path matrix = self.stochastic_matrix(path) # Get size of the start object group start_og = self.object_group_dic[self.GetLinkGroup(path[0]).start_id] if initial_p is not None: p=initial_p else: p=np.ones(start_og.size) p=p/p.sum() p=np.append(p,[0]) #<- probability of starting at sink = 0 pa = matrix.T.dot(p) if include_sink: return pa; else: return pa[:-1]/pa[:-1].sum(); def proportional_abundances(self,path,include_sink=False): path=CheckPath(path) matrix=self.stochastic_matrix(path) if include_sink: return matrix; else: matrix=matrix[:-1,:-1] return normalize(matrix,norm='l1',axis=1);#<- if all mass went to sink pa=0 def individual_diversities(self,path,alpha=1.0,include_sink=False): path=CheckPath(path) pas = self.proportional_abundances(path,include_sink=include_sink).tolil().data diversities=[] for p in pas: if np.abs(np.sum(p)-1.0)<1e-4: diversities.append(TrueDiversity(p,alpha)) else: diversities.append(np.nan) return np.array(diversities); def mean_diversity(self,path,alpha=1.0,include_sink=False,method='arithmetic'): path=CheckPath(path) diversities = self.individual_diversities(path,alpha=alpha,include_sink=include_sink) # Computing the mean if method=='arithmetic': return diversities.mean() elif method=='geo': return gmean(diversities) elif method=='wpm': raise ValueError('Weighted Power Mean Method not implemented yet.') def collective_diversity(self,path,alpha=1.0,include_sink=False): path=CheckPath(path) p=self.proportional_abundance(path,include_sink=include_sink) if np.abs(p.sum()-0.0)<1e-6: raise ValueError('All mass was in the sink.') return TrueDiversity(p,alpha); ############################################## # Value propagation # ############################################## def path_value_aggregation(self,values_dic,path): """ Aggregate values using a meta-path. Values in the ending object group are aggregated into values for the starting object group. """ path=CheckPath(path) # Setting the ending object group position eg_pos_dic = self.GetPathEndGroupPositionDic(path) eg = self.GetLinkGroupEndObjectGroup(path[-1]) # Checking that values conforms ending obj. group # (length: dictionary has the same num. of elements) # if eg.size!=len(values_dic): # raise ValueError # (inclusion: all keys are objects of the group) values_dic_keys = [k for k,v in values_dic.items()] if np.setdiff1d(eg.object_list,values_dic_keys).size>0: raise ValueError('Values must be provided for all objects in %s.'%eg.name) # Inverse ending object group position dictionaries inv_eg_pos_dic = dict((v, k) for k, v in eg_pos_dic.items()) # Put values from values_dic in a vector in order given by eg_pos_dic e_values_vec = np.array([values_dic[inv_eg_pos_dic[i]] for i in range(eg.size)]) # proportional abundances matrix is (starting obj. group size)x(ending obj. group size) PAM = self.proportional_abundances(path) s_values_vec = PAM.dot(e_values_vec) s_values_vec[np.ravel(PAM.sum(axis=1))==0.0] = np.nan # putting values in a dictionary sg_pos_dic = self.GetPathStartGroupPositionDic(path) inv_sg_pos_dic = dict((v, k) for k, v in sg_pos_dic.items()) ordered_s_objects = [v for k,v in collections.OrderedDict(sorted(inv_sg_pos_dic.items())).items() ] return dict(zip(ordered_s_objects, s_values_vec)); ##################################### # Plotters # ##################################### def plot_schema(self,filename=None, node_size=700,layout='spring',arrow_size=10, edge_labels=True,node_labels=True): table = self.table[~self.table.relation.apply(lambda r: r.startswith('inverse_'))].drop_duplicates(subset=['relation','start_group','end_group'])[['start_group','end_group','relation']] plot_hin(table,filename=filename, node_size=node_size,layout=layout,arrowsize=arrow_size, edge_labels=edge_labels,node_labels=node_labels) return; # THESE ARE LEGACY FUNCTIONS (to be removed) ####################################################### def GetPathStochasticMatrix(self,relation_list): path=CheckPath(relation_list) matrix=self.GetLinkGroup(path[0]).stochastic_matrix for relation in path[1:]: matrix=matrix*self.GetLinkGroup(relation).stochastic_matrix return matrix; def GetPathProportionalAbundance(self,relation_list, start_object_subset=None, verbose=False): # Compute stochastic matrix for the path matrix = self.GetPathStochasticMatrix(relation_list) # Get size of the start object group start_og = self.object_group_dic[self.GetLinkGroup(relation_list[0]).start_id] # If no subset is given, there is a fast way if start_object_subset==None: P=np.ones(start_og.size) P=P/P.sum() P=np.append(P,[0]) # This zero is the probability of starting at the sink of the group else: # else, we have to assemble the array p # TODO: check that it is a proper subset P=np.zeros(start_og.size+1) # The last position is for the sink of the group positions = [start_og.objects_ids_queue.index(start_og.objects_ids[name]) for name in start_object_subset] P[positions] = 1 P=P/P.sum() return matrix.T.dot(P); def GetSetCollectiveTrueDiversity(self,relation_list,alpha, start_object_subset=None, renormalize=True, verbose=False): P=self.GetPathProportionalAbundance(relation_list,start_object_subset=start_object_subset) # Move mass from the sink node to the rest of the nodes if renormalize: P=P[:-1] if P.sum()<1e-8: raise ValueError('Proportional Abundance cannot be renormalized because all mass was in the sink.') P=P/P.sum() return TrueDiversity(P,alpha); def GetSetMeanIndTrueDiversity(self,relation_list,alpha, method='geo', start_object_subset=None, verbose=False): t=TCounter() if method not in ['wpm','ar','geo']: raise ValueError('Invalid mean method. Admitted methods are wpm (weighted power mean), ar (arithmetic), or geo (geometric).') # Compute stochastic matrix for the path matrix = self.GetPathStochasticMatrix(relation_list).tolil() # Deleting proportional abundance of the sink start object PAs=matrix.data[:-1] # Selecting the propostional abundaces of the start object subset if start_object_subset is not None: positions = [start_og.objects_ids_queue.index(start_og.objects_ids[name]) for name in start_object_subset] PAs=PAs[positions] # computing the diversity of each proportional abundance diversities=[] for P in PAs: diversities.append(TrueDiversity(P,alpha)) diversities=np.array(diversities) # Computing the mean if method=='ar': return diversities.mean() elif method=='geo': return gmean(diversities) elif method=='wpm': raise ValueError('Weighted Power Mean Method not implemented yet.') def GetObjectSetTrueDiversities(self,relation_list,alpha, start_object_subset=None, verbose=False): # Compute stochastic matrix for the path matrix = self.GetPathStochasticMatrix(relation_list).tolil() # Deleting proportional abundance of the sink start object PAs=matrix.data[:-1] # Selecting the propostional abundaces of the start object subset if start_object_subset is not None: positions = [start_og.objects_ids_queue.index(start_og.objects_ids[name]) for name in start_object_subset] PAs=PAs[positions] # computing the diversity of each proportional abundance diversities=[] for P in PAs: diversities.append(TrueDiversity(P,alpha)) return np.array(diversities); ############################################## # Classic Diversity Measures for RS # ############################################## def SurprisalDivMes(self,relation_name,popularity_relation_name,verbose=False): popularity_table = self.table[self.table.relation==popularity_relation_name].copy(deep=True) recommended_table = self.table[self.table.relation==relation_name].copy(deep=True) return Surprisal(popularity_table,recommended_table,verbose=verbose); def NoveltyDivMes(self,relation_name,similarity_relation,verbose=False): table = self.table[self.table.relation==relation_name].copy(deep=True) sim_matrix = cosine_similarity(self.GetLinkGroup(similarity_relation).stochastic_matrix) object_position = self.GetLinkGroupStartObjectGroup(similarity_relation).OjectPositionDicFromName() return Novelty(table,sim_matrix,object_position,verbose=verbose); def IntraListSimilarityDivMes(self,relation_name,similarity_relation,verbose=False): table = self.table[self.table.relation==relation_name].copy(deep=True) sim_matrix = cosine_similarity(self.GetLinkGroup(similarity_relation).stochastic_matrix) object_position = self.GetLinkGroupStartObjectGroup(similarity_relation).OjectPositionDicFromName() return IntraListSimilarity(table,sim_matrix,object_position,verbose=verbose); def PersonalisationDivMes(self,relation_name,verbose=False): table = self.table[self.table.relation==relation_name].copy(deep=True) return Personalisation(table,verbose=verbose);
<gh_stars>1-10 #ifndef EXPONENTIALSCALESLIDER_H #define EXPONENTIALSCALESLIDER_H #include <QWidget> namespace Ui { class ExponentialSlider; } class ExponentialSlider : public QWidget { Q_OBJECT public: explicit ExponentialSlider(QWidget *parent = nullptr); ~ExponentialSlider(); float value(); void setValue(float value); void setEndcapBehavior(bool lowerEndcap, float lowerEndcapValue, bool upperEndcap, float upperEndcapValue); void setExponentialMapping(float base, float exponentMin, float exponentMax); signals: void onValueChanged(float value); private slots: void on_horizontalSlider_valueChanged(int value); void on_signLabel_linkActivated(const QString &link); private: Ui::ExponentialSlider *ui; void updateSignLabel(); bool _enableLowerEndcap; bool _enableUpperEndcap; float _lowerEndcapValue; float _upperEndcapValue; float _base; float _exponentMin; float _exponentMax; bool _negative; }; #endif // EXPONENTIALSCALESLIDER_H
<filename>src/ios_tools/chrome/browser/ui/bookmarks/bookmark_promo_cell.h // Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef IOS_CHROME_BROWSER_UI_BOOKMARKS_BOOKMARK_PROMO_CELL_H_ #define IOS_CHROME_BROWSER_UI_BOOKMARKS_BOOKMARK_PROMO_CELL_H_ #import <UIKit/UIKit.h> @class BookmarkPromoCell; @protocol BookmarkPromoCellDelegate // Called when the SIGN IN button is tapped. - (void)bookmarkPromoCellDidTapSignIn:(BookmarkPromoCell*)bookmarkPromoCell; // Called when the NO THANKS button is tapped. - (void)bookmarkPromoCellDidTapDismiss:(BookmarkPromoCell*)bookmarkPromoCell; @end @interface BookmarkPromoCell : UICollectionViewCell + (NSString*)reuseIdentifier; @property(nonatomic, weak) id<BookmarkPromoCellDelegate> delegate; @end #endif // IOS_CHROME_BROWSER_UI_BOOKMARKS_BOOKMARK_PROMO_CELL_H_
/** * This function will initial FM3 Easy Kit board. */ void rt_hw_board_init() { SysTick_Config(SystemFrequency/RT_TICK_PER_SECOND); }
// Copyright (c) 2010, Google Inc. // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // Utility functions for spawning a helper process using a different // CPU architecture. #ifndef GOOGLE_BREAKPAD_CLIENT_MAC_TESTS_SPAWN_CHILD_PROCESS #define GOOGLE_BREAKPAD_CLIENT_MAC_TESTS_SPAWN_CHILD_PROCESS #include <AvailabilityMacros.h> #ifndef MAC_OS_X_VERSION_10_6 #define MAC_OS_X_VERSION_10_6 1060 #endif #include <crt_externs.h> #include <mach-o/dyld.h> #if MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_6 #include <spawn.h> #endif #include <string> #include <vector> #include "google_breakpad/common/minidump_format.h" namespace google_breakpad_test { using std::string; using std::vector; const MDCPUArchitecture kNativeArchitecture = #if defined(__i386__) MD_CPU_ARCHITECTURE_X86 #elif defined(__x86_64__) MD_CPU_ARCHITECTURE_AMD64 #elif defined(__ppc__) || defined(__ppc64__) MD_CPU_ARCHITECTURE_PPC #else #error "This file has not been ported to this CPU architecture." #endif ; const uint32_t kNativeContext = #if defined(__i386__) MD_CONTEXT_X86 #elif defined(__x86_64__) MD_CONTEXT_AMD64 #elif defined(__ppc__) || defined(__ppc64__) MD_CONTEXT_PPC #else #error "This file has not been ported to this CPU architecture." #endif ; string GetExecutablePath() { char self_path[PATH_MAX]; uint32_t size = sizeof(self_path); if (_NSGetExecutablePath(self_path, &size) != 0) return ""; return self_path; } string GetHelperPath() { string helper_path(GetExecutablePath()); size_t pos = helper_path.rfind('/'); if (pos == string::npos) return ""; helper_path.erase(pos + 1); helper_path += "minidump_generator_test_helper"; return helper_path; } #if MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_6 pid_t spawn_child_process(const char** argv) { posix_spawnattr_t spawnattr; if (posix_spawnattr_init(&spawnattr) != 0) return (pid_t)-1; cpu_type_t pref_cpu_types[2] = { #if defined(__x86_64__) CPU_TYPE_X86, #elif defined(__i386__) CPU_TYPE_X86_64, #endif CPU_TYPE_ANY }; // Set spawn attributes. size_t attr_count = sizeof(pref_cpu_types) / sizeof(pref_cpu_types[0]); size_t attr_ocount = 0; if (posix_spawnattr_setbinpref_np(&spawnattr, attr_count, pref_cpu_types, &attr_ocount) != 0 || attr_ocount != attr_count) { posix_spawnattr_destroy(&spawnattr); return (pid_t)-1; } // Create an argv array. vector<char*> argv_v; while (*argv) { argv_v.push_back(strdup(*argv)); argv++; } argv_v.push_back(NULL); pid_t new_pid = 0; int result = posix_spawnp(&new_pid, argv_v[0], NULL, &spawnattr, &argv_v[0], *_NSGetEnviron()); posix_spawnattr_destroy(&spawnattr); for (unsigned i = 0; i < argv_v.size(); i++) { free(argv_v[i]); } return result == 0 ? new_pid : -1; } #endif } // namespace google_breakpad_test #endif // GOOGLE_BREAKPAD_CLIENT_MAC_TESTS_SPAWN_CHILD_PROCESS
A girl celebrates equality, triumphantly jumping with a rainbow flag during a gay pride parade. Marriage equality came to Wisconsin Friday when a federal judge overturned the state's ban on same-sex marriage as unconstitutional. UPI/Mohammad Kheirkhah | License Photo MADISON, Wis., June 6 (UPI) -- United States District Judge Barbara Crabb overturnedWisconsin's ban on same-sex marriage Friday. "This case is not about whether marriages between same-sex couples are consistent or inconsistent with the teachings of a particular religion, whether such marriages are moral or immoral or whether they are something that should be encouraged or discouraged." Crabb said in her ruling. "Quite simply, this case is about liberty and equality, the two cornerstones of the rights protected by the United States Constitution." Wisconsin voted to add a constitutional amendment banning same-sex marriage in 2006. The American Civil Liberties Union, the ACLU of Wisconsin, and the law firm of Mayer Brown filed a lawsuit on behalf of eight Wisconsin couples challenging the ban. "It is DECLARED that art. XIII, § 13 of the Wisconsin Constitution violates plaintiffs' fundamental right to marry and their right to equal protection of laws under the Fourteenth Amendment to the United States Constiution," Crabb's ruling reads in part. Judge Crabb went on to call marriage "a defining rite of passage and one of the most important events in the lives of millions of people, if not the most important for some." Openly gay Representative Mark Pocan of Madison said, "The federal district court in Madison took another step toward ensuring full equality for every American. It is clear the growing momentum of support for marriage equality will put an end to discriminatory laws that treat LGBT couples as second-class citizens." The current governor of Wisconsin, a Republican, has not commented, though his Democratic opponent Mary Burke said, "Today is a great day for Wisconsin and committed couples who love each other across the state. Every loving couple should have the freedom to marry whomever they choose, and the fact that this freedom is now available in Wisconsin is something we all can and should be proud of." Wisconsin's current Attorney General, J.B. Van Hollen, also a Republican, decried the decision, promising to appeal Crabb's ruling. "As Attorney General, I have an obligation to uphold Wisconsin law and our Constitution," he said. "While today's decision is a setback, we will continue to defend the constitutionality of our traditional marriage laws and the constitutional amendment, which was overwhelmingly approved by voters. I will appeal." Van Hollen's opponent in the next election, Democratic attorney general candidate Jon Richards, said, "I am overjoyed that our LGBT brothers and sisters will finally have the ability to marry in Wisconsin. Today is a joyous day for so many, and I am happy that all Wisconsin residents will be able to enjoy the right of marriage for the first time in our state's history."
Real-time measurement of glomerular filtration rate Purpose of review Measurement of glomerular filtration rate is an essential tool for determining the health or dysfunction of the kidney. The glomerular filtration rate is a dynamic function that can change almost instantaneously in response to stressors. Despite its central role in nephrology, there are no techniques available to the clinician for monitoring glomerular filtration rate in real time. Recent advances in technology to measure fluorescent compounds through the skin are providing a new approach for real-time monitoring of glomerular filtration rate. This review frames these technologies within how such measurements might be used in clinical medicine. Recent findings Fluorescent molecules that act as ideal filtration markers are now available. Using transdermal sensors, the plasma disappearance rate of these exogenous markers can be measured rather than their steady state concentration. This eliminates the delay inherent in using an endogenous marker of filtration and permits continuous monitoring of GFR. Summary These new technologies provide enhanced opportunities for diagnosis of kidney dysfunction and therapeutic monitoring. Accurate assessment of measured GFR will eliminate the erroneous diagnosis of chronic kidney disease (CKD) from many patients. Assessment of renal reserve will provide a new risk factor for progression of CKD. Real-time monitoring of GFR in critically ill patients will allow for earlier diagnosis of acute kidney injury and a dynamic metric to guide therapeutics. These are but a few of the many opportunities that this new technology will provide in both the clinical and research arenas.
//findVolumeByName is trying to discover a volume by name. func (d *Driver) findVolumeByRequest(r volume.Request) (string, error) { volumeName := r.Options["volume_name"] if len(volumeName) > 0 { if r.Name != volumeName { return "", fmt.Errorf("Volume name %s and volume_name parameter %s have to be the same", r.Name, volumeName) } return d.findVolumeByName(volumeName) } return "", nil }
<filename>PythonChallenge/Ex19/19_01.py #!/usr/bin/env python3 # coding:utf-8 from base64 import b64decode f = open("please.txt", "rb") audio = open("indian.wav", "wb") for line in f.readlines(): audio.write(b64decode(line.strip())) f.close() audio.close()
// initSchema initializes the schema for the Postgresql database. func initSchema(db *sqlx.DB) error { file, err := ioutil.ReadFile(functionsFilePath) if err != nil { log.WithError(err).Error("Failed to read in sql file defining postresql functions.") return err } _, err = db.Exec(string(file)) if err != nil { log.WithError(err).Error("Failed execute postresql functions.") return err } file, err = ioutil.ReadFile(schemaFilePath) if err != nil { log.WithError(err).Error("Failed to read in sql file defining postresql schema.") return err } requests := strings.Split(string(file), ";") for _, request := range requests { _, err = db.Exec(request) if err != nil { log.WithError(err).Error("Failed to execute sql to init schema.") return err } } return nil }
#include<iostream> using namespace std; int mat[5][5]; int main() { int i,j,x,sum,s; bool ok; for(i=1;i<=3;i++) for(j=1;j<=3;j++) cin>>mat[i][j]; for(x=100000;x>=0;x--) { ok=true; mat[1][1]=x; sum=x+mat[1][2]+mat[1][3]; mat[2][2]=sum-mat[2][1]-mat[2][3]; mat[3][3]=sum-mat[3][1]-mat[3][2]; for(i=1;i<=3;i++) { s=mat[i][1]+mat[i][2]+mat[i][3]; if(s!=sum) ok=false; s=mat[1][i]+mat[2][i]+mat[3][i]; if(s!=sum) ok=false; } s=mat[1][3]+mat[2][2]+mat[3][1]; if(s!=sum) ok=false; s=mat[1][1]+mat[2][2]+mat[3][3]; if(s!=sum) ok=false; if(ok) { for(i=1;i<=3;i++) { for(j=1;j<=3;j++) cout<<mat[i][j]<<' '; cout<<"\n"; } return 0; } } return 0; }
// // Attempts to resolve a command from a given argument string // Returns NULL if there is no match // Command* Command::ResolveCommand(const char* str) { for(Command* cmd = Command::g_cmds; cmd; cmd = cmd->NextElem()) { if(strcmp(cmd->Name(), str) == 0) { return cmd; } } return NULL; }
<reponame>HSSNPdS/xilogoritmo import React from 'react'; import { Text, View, StyleSheet, Image} from 'react-native'; import { RectButton } from 'react-native-gesture-handler'; import { useNavigation } from '@react-navigation/native'; const logoImg = require('../../assets/Logo.png'); const iebImg = require('../../assets/Ieb.png'); const readImg = require('../../assets/Read.png'); const cactusImg = require('../../assets/Cactus.png'); const createImg = require('../../assets/Create.png'); const sloganImg = require('../../assets/Slogan.png'); export default function Landing() { const { navigate } = useNavigation(); function handleNavigateToNext(){ navigate('Obras') } return( <View style={styles.container}> <View style={styles.logoGroup}> <Image style={styles.ieb} source={iebImg}/> <Image style={styles.logo} source={logoImg}/> <Image style={styles.slogan} source={sloganImg}/> </View> <View style={styles.btnGroup}> <RectButton onPress={handleNavigateToNext} style={styles.button}> <Image style={styles.landingBtn} source={readImg}/> <Text style={styles.landingBtnText}>Ler</Text> </RectButton> <RectButton style={styles.button}> <Image style={styles.landingBtn} source={createImg}/> <Text style={styles.landingBtnText}>Criar cordel</Text> </RectButton> </View> <View style={styles.footerGroup}> <Image style={styles.cactus} source={cactusImg}/> </View> </View> ); } const styles = StyleSheet.create({ container:{ flex: 1, alignItems: 'center', alignContent: 'center', justifyContent: 'center', backgroundColor:'#F0EC82' }, logoGroup:{ flex: 1, marginBottom: 10, alignItems: 'center', alignContent: 'center', justifyContent: 'center', }, logo:{ width: 350, height: 50, resizeMode: 'contain' }, slogan:{ marginTop: '3%', width: 350, height: 60, resizeMode: 'contain' }, btnGroup:{ flex: 0.5, display: 'flex', flexDirection: 'row', }, button:{ marginTop: '5%', marginHorizontal: '5%', }, landingBtn:{ width: 120, height: 120, marginBottom: 2, }, landingBtnText:{ fontSize: 18, fontWeight: "bold", color: '#000', textAlign: 'center', }, footerGroup:{ flex: 0.8, width: '100%', display: 'flex', flexDirection: 'row', }, cactus:{ width: 200, height: 200, marginTop: '10%', }, ieb:{ marginTop: '5%', marginBottom: '5%', width: 150, height: 150 } });
Revitalising technical and vocational education and training in Africa: Issues outstanding One of the stark realities of Africa today is the crisis of youth unemployment. Every year,about 10–12 million poorly skilled young people exiting the various levels of the educationsystem enter the labour market (AfDB & OECD, 2012), where they end up in insecure andsometimes hazardous employment with no prospect of further education or training. Evengraduates of higher education institutions are not spared the frustration of seeking and notimmediately finding a job. In countries such as Zambia and Ethiopia, young graduates maytake up to five years after training before finding a job in the formal sector (ILO, 2013). InGhana, it is estimated that the economy needs to create 300 000 new jobs per year to absorbthe growing number of unemployed (Honorati & Johansson de Silva, 2016). In Tanzania,approximately 800 000 people enter the labour market each year (ILO, 2012), in contrast tothe absorptive capacity of the public sector of only 40 000.
// CreateSpaceIfNotExists creates a space in CloudFoundry using the V2 API // It uses an exponential backoff strategy, returning early if it successfully creates // a space or the space already exists func CreateSpaceIfNotExists(logger lager.Logger, cfClient *cfclient.Client, spaceName string, orgGUID string) (*cfclient.Space, error) { logger.Debug("creating-space") spaceRequest := cfclient.SpaceRequest{ Name: spaceName, OrganizationGuid: orgGUID, } var ( err error space cfclient.Space ) operation := func() error { space, err = cfClient.CreateSpace(spaceRequest) switch e := err.(type) { case nil: return nil case cfclient.CloudFoundryErrors: if len(e.Errors) == 0 { return err } for _, cfError := range e.Errors { if cfError.ErrorCode == internal.SpaceNameTaken { return nil } } case cfclient.CloudFoundryError: if e.ErrorCode == internal.SpaceNameTaken { return nil } return err default: return err } return err } err = backoff.RetryNotify(operation, backoff.NewExponentialBackOff(), func(err error, step time.Duration) { logger.Error("failed-to-create-space", err, lager.Data{ "backoff.step": step.String(), }) }) if err != nil { logger.Error("finally-failed-to-create-space", err) return nil, err } return &space, nil }